text
stringlengths 2
100k
| meta
dict |
---|---|
<?xml version="1.0" encoding="UTF-8"?>
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="http://schema.phpunit.de/5.2/phpunit.xsd"
backupGlobals="false"
colors="true"
bootstrap="vendor/autoload.php"
failOnRisky="true"
failOnWarning="true"
>
<php>
<ini name="error_reporting" value="-1" />
</php>
<testsuites>
<testsuite name="Symfony EventDispatcher Component Test Suite">
<directory>./Tests/</directory>
</testsuite>
</testsuites>
<filter>
<whitelist>
<directory>./</directory>
<exclude>
<directory>./Resources</directory>
<directory>./Tests</directory>
<directory>./vendor</directory>
</exclude>
</whitelist>
</filter>
</phpunit>
| {
"pile_set_name": "Github"
} |
package milkman.plugin.privatebin;
import org.junit.jupiter.api.Test;
import milkman.plugin.privatebin.PrivateBinApi.PrivateBinDataV1;
class SjclDeEncryptorTest {
@Test
void testEncryption() throws Exception {
SjclDeEncryptor sut = new SjclDeEncryptor();
PrivateBinDataV1 data = sut.encrypt("test");
System.out.println(data);
}
@Test
void testDecryption() throws Exception {
String secret64 = "b5E1pTdK/VYvHSZcmCd0gZxJrkJnlVZ6l6wQxgGDgm4=";
PrivateBinDataV1 data = new PrivateBinDataV1("YNY2RpljXcT3f4HpSqUfLg==", 1, 10_000, 256, 128, "gcm", "", "aes", "V1oFfs4uKYw=", "SFZEZC4rkcZ0fuZ9m25UvFHu/K8=", null);
SjclDeEncryptor sut = new SjclDeEncryptor();
String decrypted = sut.decrypt(data, secret64);
System.out.println(decrypted);
}
}
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0
#define _GNU_SOURCE
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include <sys/msg.h>
#include <fcntl.h>
#include "../kselftest.h"
#define MAX_MSG_SIZE 32
struct msg1 {
int msize;
long mtype;
char mtext[MAX_MSG_SIZE];
};
#define TEST_STRING "Test sysv5 msg"
#define MSG_TYPE 1
#define ANOTHER_TEST_STRING "Yet another test sysv5 msg"
#define ANOTHER_MSG_TYPE 26538
struct msgque_data {
key_t key;
int msq_id;
int qbytes;
int qnum;
int mode;
struct msg1 *messages;
};
int restore_queue(struct msgque_data *msgque)
{
int fd, ret, id, i;
char buf[32];
fd = open("/proc/sys/kernel/msg_next_id", O_WRONLY);
if (fd == -1) {
printf("Failed to open /proc/sys/kernel/msg_next_id\n");
return -errno;
}
sprintf(buf, "%d", msgque->msq_id);
ret = write(fd, buf, strlen(buf));
if (ret != strlen(buf)) {
printf("Failed to write to /proc/sys/kernel/msg_next_id\n");
return -errno;
}
id = msgget(msgque->key, msgque->mode | IPC_CREAT | IPC_EXCL);
if (id == -1) {
printf("Failed to create queue\n");
return -errno;
}
if (id != msgque->msq_id) {
printf("Restored queue has wrong id (%d instead of %d)\n",
id, msgque->msq_id);
ret = -EFAULT;
goto destroy;
}
for (i = 0; i < msgque->qnum; i++) {
if (msgsnd(msgque->msq_id, &msgque->messages[i].mtype,
msgque->messages[i].msize, IPC_NOWAIT) != 0) {
printf("msgsnd failed (%m)\n");
ret = -errno;
goto destroy;
};
}
return 0;
destroy:
if (msgctl(id, IPC_RMID, NULL))
printf("Failed to destroy queue: %d\n", -errno);
return ret;
}
int check_and_destroy_queue(struct msgque_data *msgque)
{
struct msg1 message;
int cnt = 0, ret;
while (1) {
ret = msgrcv(msgque->msq_id, &message.mtype, MAX_MSG_SIZE,
0, IPC_NOWAIT);
if (ret < 0) {
if (errno == ENOMSG)
break;
printf("Failed to read IPC message: %m\n");
ret = -errno;
goto err;
}
if (ret != msgque->messages[cnt].msize) {
printf("Wrong message size: %d (expected %d)\n", ret,
msgque->messages[cnt].msize);
ret = -EINVAL;
goto err;
}
if (message.mtype != msgque->messages[cnt].mtype) {
printf("Wrong message type\n");
ret = -EINVAL;
goto err;
}
if (memcmp(message.mtext, msgque->messages[cnt].mtext, ret)) {
printf("Wrong message content\n");
ret = -EINVAL;
goto err;
}
cnt++;
}
if (cnt != msgque->qnum) {
printf("Wrong message number\n");
ret = -EINVAL;
goto err;
}
ret = 0;
err:
if (msgctl(msgque->msq_id, IPC_RMID, NULL)) {
printf("Failed to destroy queue: %d\n", -errno);
return -errno;
}
return ret;
}
int dump_queue(struct msgque_data *msgque)
{
struct msqid_ds ds;
int kern_id;
int i, ret;
for (kern_id = 0; kern_id < 256; kern_id++) {
ret = msgctl(kern_id, MSG_STAT, &ds);
if (ret < 0) {
if (errno == EINVAL)
continue;
printf("Failed to get stats for IPC queue with id %d\n",
kern_id);
return -errno;
}
if (ret == msgque->msq_id)
break;
}
msgque->messages = malloc(sizeof(struct msg1) * ds.msg_qnum);
if (msgque->messages == NULL) {
printf("Failed to get stats for IPC queue\n");
return -ENOMEM;
}
msgque->qnum = ds.msg_qnum;
msgque->mode = ds.msg_perm.mode;
msgque->qbytes = ds.msg_qbytes;
for (i = 0; i < msgque->qnum; i++) {
ret = msgrcv(msgque->msq_id, &msgque->messages[i].mtype,
MAX_MSG_SIZE, i, IPC_NOWAIT | MSG_COPY);
if (ret < 0) {
printf("Failed to copy IPC message: %m (%d)\n", errno);
return -errno;
}
msgque->messages[i].msize = ret;
}
return 0;
}
int fill_msgque(struct msgque_data *msgque)
{
struct msg1 msgbuf;
msgbuf.mtype = MSG_TYPE;
memcpy(msgbuf.mtext, TEST_STRING, sizeof(TEST_STRING));
if (msgsnd(msgque->msq_id, &msgbuf.mtype, sizeof(TEST_STRING),
IPC_NOWAIT) != 0) {
printf("First message send failed (%m)\n");
return -errno;
};
msgbuf.mtype = ANOTHER_MSG_TYPE;
memcpy(msgbuf.mtext, ANOTHER_TEST_STRING, sizeof(ANOTHER_TEST_STRING));
if (msgsnd(msgque->msq_id, &msgbuf.mtype, sizeof(ANOTHER_TEST_STRING),
IPC_NOWAIT) != 0) {
printf("Second message send failed (%m)\n");
return -errno;
};
return 0;
}
int main(int argc, char **argv)
{
int msg, pid, err;
struct msgque_data msgque;
if (getuid() != 0)
return ksft_exit_skip(
"Please run the test as root - Exiting.\n");
msgque.key = ftok(argv[0], 822155650);
if (msgque.key == -1) {
printf("Can't make key: %d\n", -errno);
return ksft_exit_fail();
}
msgque.msq_id = msgget(msgque.key, IPC_CREAT | IPC_EXCL | 0666);
if (msgque.msq_id == -1) {
err = -errno;
printf("Can't create queue: %d\n", err);
goto err_out;
}
err = fill_msgque(&msgque);
if (err) {
printf("Failed to fill queue: %d\n", err);
goto err_destroy;
}
err = dump_queue(&msgque);
if (err) {
printf("Failed to dump queue: %d\n", err);
goto err_destroy;
}
err = check_and_destroy_queue(&msgque);
if (err) {
printf("Failed to check and destroy queue: %d\n", err);
goto err_out;
}
err = restore_queue(&msgque);
if (err) {
printf("Failed to restore queue: %d\n", err);
goto err_destroy;
}
err = check_and_destroy_queue(&msgque);
if (err) {
printf("Failed to test queue: %d\n", err);
goto err_out;
}
return ksft_exit_pass();
err_destroy:
if (msgctl(msgque.msq_id, IPC_RMID, NULL)) {
printf("Failed to destroy queue: %d\n", -errno);
return ksft_exit_fail();
}
err_out:
return ksft_exit_fail();
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!--*************************************************************************-->
<!-- SCICHART® Copyright SciChart Ltd. 2011-2016. All rights reserved. -->
<!-- -->
<!-- Web: http://www.scichart.com -->
<!-- Support: [email protected] -->
<!-- Sales: [email protected] -->
<!-- -->
<!-- header_list_item.xml is part of the SCICHART® Examples. Permission is hereby granted -->
<!-- to modify, create derivative works, distribute and publish any part of this source -->
<!-- code whether for commercial, private or personal use. -->
<!-- -->
<!-- The SCICHART® examples are distributed in the hope that they will be useful, but -->
<!-- without any warranty. It is provided "AS IS" without warranty of any kind, either -->
<!-- expressed or implied. -->
<!--*************************************************************************-->
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/textSeparator"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:background="@color/menu_list_header_background"
android:paddingBottom="13dp"
android:paddingStart="10dp"
android:paddingTop="13dp"
android:paddingEnd="10dp"
android:textAllCaps="true"
android:textColor="@android:color/white"
android:visibility="visible" />
</LinearLayout>
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2003-2006 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package com.sun.jmx.remote.util;
import java.lang.ref.SoftReference;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.WeakHashMap;
import com.sun.jmx.mbeanserver.Util;
/**
* <p>Like WeakHashMap, except that the keys of the <em>n</em> most
* recently-accessed entries are kept as {@link SoftReference soft
* references}. Accessing an element means creating it, or retrieving
* it with {@link #get(Object) get}. Because these entries are kept
* with soft references, they will tend to remain even if their keys
* are not referenced elsewhere. But if memory is short, they will
* be removed.</p>
*/
public class CacheMap<K, V> extends WeakHashMap<K, V> {
/**
* <p>Create a <code>CacheMap</code> that can keep up to
* <code>nSoftReferences</code> as soft references.</p>
*
* @param nSoftReferences Maximum number of keys to keep as soft
* references. Access times for {@link #get(Object) get} and
* {@link #put(Object, Object) put} have a component that scales
* linearly with <code>nSoftReferences</code>, so this value
* should not be too great.
*
* @throws IllegalArgumentException if
* <code>nSoftReferences</code> is negative.
*/
public CacheMap(int nSoftReferences) {
if (nSoftReferences < 0) {
throw new IllegalArgumentException("nSoftReferences = " +
nSoftReferences);
}
this.nSoftReferences = nSoftReferences;
}
public V put(K key, V value) {
cache(key);
return super.put(key, value);
}
public V get(Object key) {
cache(Util.<K>cast(key));
return super.get(key);
}
/* We don't override remove(Object) or try to do something with
the map's iterators to detect removal. So we may keep useless
entries in the soft reference list for keys that have since
been removed. The assumption is that entries are added to the
cache but never removed. But the behavior is not wrong if
they are in fact removed -- the caching is just less
performant. */
private void cache(K key) {
Iterator<SoftReference<K>> it = cache.iterator();
while (it.hasNext()) {
SoftReference<K> sref = it.next();
K key1 = sref.get();
if (key1 == null)
it.remove();
else if (key.equals(key1)) {
// Move this element to the head of the LRU list
it.remove();
cache.add(0, sref);
return;
}
}
int size = cache.size();
if (size == nSoftReferences) {
if (size == 0)
return; // degenerate case, equivalent to WeakHashMap
it.remove();
}
cache.add(0, new SoftReference<K>(key));
}
/* List of soft references for the most-recently referenced keys.
The list is in most-recently-used order, i.e. the first element
is the most-recently referenced key. There are never more than
nSoftReferences elements of this list.
If we didn't care about J2SE 1.3 compatibility, we could use
LinkedHashSet in conjunction with a subclass of SoftReference
whose equals and hashCode reflect the referent. */
private final LinkedList<SoftReference<K>> cache =
new LinkedList<SoftReference<K>>();
private final int nSoftReferences;
}
| {
"pile_set_name": "Github"
} |
/*
* Knowage, Open Source Business Intelligence suite
* Copyright (C) 2016 Engineering Ingegneria Informatica S.p.A.
*
* Knowage is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knowage is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package it.eng.spagobi.api.v2;
import java.util.List;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import it.eng.spagobi.api.AbstractSpagoBIResource;
import it.eng.spagobi.behaviouralmodel.check.bo.Check;
import it.eng.spagobi.behaviouralmodel.check.dao.ICheckDAO;
import it.eng.spagobi.commons.constants.SpagoBIConstants;
import it.eng.spagobi.commons.dao.DAOFactory;
import it.eng.spagobi.services.rest.annotations.ManageAuthorization;
import it.eng.spagobi.services.rest.annotations.UserConstraint;
import it.eng.spagobi.utilities.exceptions.SpagoBIRestServiceException;
@Path("/2.0/predefinedChecks")
@ManageAuthorization
public class ModalitiesResource extends AbstractSpagoBIResource {
private final String charset = "; charset=UTF-8";
@GET
@UserConstraint(functionalities = { SpagoBIConstants.CONTSTRAINT_MANAGEMENT })
@Path("/")
@Produces(MediaType.APPLICATION_JSON + charset)
public Response getPredefined() {
ICheckDAO checksDao = null;
List<Check> fullList = null;
try {
checksDao = DAOFactory.getChecksDAO();
checksDao.setUserProfile(getUserProfile());
fullList = checksDao.loadPredefinedChecks();
return Response.ok(fullList).build();
} catch (Exception e) {
logger.error("Error with loading resource", e);
throw new SpagoBIRestServiceException("sbi.modalities.check.rest.error", buildLocaleFromSession(), e);
}
}
}
| {
"pile_set_name": "Github"
} |
[
{
"type": "feature",
"category": "MediaPackage",
"description": "This release adds support for user-defined tagging of MediaPackage resources. Users may now call operations to list, add and remove tags from channels and origin-endpoints. Users can also specify tags to be attached to these resources during their creation. Describe and list operations on these resources will now additionally return any tags associated with them."
},
{
"type": "feature",
"category": "SSM",
"description": "This release updates AWS Systems Manager APIs to support service settings for AWS customers. A service setting is a key-value pair that defines how a user interacts with or uses an AWS service, and is typically created and consumed by the AWS service team. AWS customers can read a service setting via GetServiceSetting API and update the setting via UpdateServiceSetting API or ResetServiceSetting API, which are introduced in this release. For example, if an AWS service charges money to the account based on a feature or service usage, then the AWS service team might create a setting with the default value of \"false\". This means the user can't use this feature unless they update the setting to \"true\" and intentionally opt in for a paid feature."
}
] | {
"pile_set_name": "Github"
} |
{
"scriptCategory": "English-like",
"timeOfDayFormat": "H:mm",
"openAppDrawerTooltip": "নেভিগেশ্বন মেনু খোলক",
"backButtonTooltip": "উভতি যাওক",
"closeButtonTooltip": "বন্ধ কৰক",
"deleteButtonTooltip": "মচক",
"nextMonthTooltip": "পৰৱৰ্তী মাহ",
"previousMonthTooltip": "পূৰ্বৱৰ্তী মাহ",
"nextPageTooltip": "পৰৱৰ্তী পৃষ্ঠা",
"previousPageTooltip": "পূৰ্বৱৰ্তী পৃষ্ঠা",
"showMenuTooltip": "মেনুখন দেখুৱাওক",
"aboutListTileTitle": "$applicationNameৰ বিষয়ে",
"licensesPageTitle": "অনুজ্ঞাপত্ৰসমূহ",
"pageRowsInfoTitle": "$rowCountৰ $firstRow–$lastRow",
"pageRowsInfoTitleApproximate": "$rowCountৰ $firstRow–$lastRow",
"rowsPerPageTitle": "প্ৰতিটো পৃষ্ঠাত থকা শাৰী:",
"tabLabel": "$tabCountৰ $tabIndexটা টেব",
"selectedRowCountTitleOne": "১টা বস্তু বাছনি কৰা হ'ল",
"selectedRowCountTitleOther": "$selectedRowCountটা বস্তু বাছনি কৰা হ’ল",
"cancelButtonLabel": "বাতিল কৰক",
"closeButtonLabel": "বন্ধ কৰক",
"continueButtonLabel": "অব্যাহত ৰাখক",
"copyButtonLabel": "প্ৰতিলিপি কৰক",
"cutButtonLabel": "কাট কৰক",
"okButtonLabel": "ঠিক আছে",
"pasteButtonLabel": "পে'ষ্ট কৰক",
"selectAllButtonLabel": "সকলো বাছনি কৰক",
"viewLicensesButtonLabel": "অনুজ্ঞাপত্ৰসমূহ চাওক",
"anteMeridiemAbbreviation": "পূৰ্বাহ্ন",
"postMeridiemAbbreviation": "অপৰাহ্ন",
"timePickerHourModeAnnouncement": "সময় বাছনি কৰক",
"timePickerMinuteModeAnnouncement": "মিনিট বাছনি কৰক",
"modalBarrierDismissLabel": "অগ্ৰাহ্য কৰক",
"signedInLabel": "ছাইন ইন কৰা হ’ল",
"hideAccountsLabel": "একাউণ্টসমূহ লুকুৱাওক",
"showAccountsLabel": "একাউণ্টসমূহ দেখুৱাওক",
"drawerLabel": "নেভিগেশ্বন মেনু",
"popupMenuLabel": "প'পআপ মেনু",
"dialogLabel": "ডায়ল'গ",
"alertDialogLabel": "সতৰ্কবাৰ্তা",
"searchFieldLabel": "সন্ধান কৰক",
"reorderItemToStart": "আৰম্ভণিলৈ স্থানান্তৰ কৰক",
"reorderItemToEnd": "শেষলৈ স্থানান্তৰ কৰক",
"reorderItemUp": "ওপৰলৈ নিয়ক",
"reorderItemDown": "তললৈ স্থানান্তৰ কৰক",
"reorderItemLeft": "বাওঁফাললৈ স্থানান্তৰ কৰক",
"reorderItemRight": "সোঁফাললৈ স্থানান্তৰ কৰক",
"expandedIconTapHint": "সংকোচন কৰক",
"collapsedIconTapHint": "বিস্তাৰ কৰক",
"remainingTextFieldCharacterCountOne": "১টা বর্ণ বাকী আছে",
"remainingTextFieldCharacterCountOther": "$remainingCountটা বর্ণ বাকী আছে",
"refreshIndicatorSemanticLabel": "ৰিফ্ৰেশ্ব কৰক",
"moreButtonTooltip": "অধিক",
"dateSeparator": "/",
"dateHelpText": "mm/dd/yyyy",
"selectYearSemanticsLabel": "বছৰ বাছনি কৰক",
"unspecifiedDate": "তাৰিখ",
"unspecifiedDateRange": "তাৰিখৰ পৰিসৰ",
"dateInputLabel": "তাৰিখটো দিয়ক",
"dateRangeStartLabel": "আৰম্ভণিৰ তাৰিখ",
"dateRangeEndLabel": "সমাপ্তিৰ তাৰিখ",
"dateRangeStartDateSemanticLabel": "আৰম্ভণিৰ তাৰিখ $fullDate",
"dateRangeEndDateSemanticLabel": "সমাপ্তিৰ তাৰিখ $fullDate",
"invalidDateFormatLabel": "অমান্য ফৰ্মেট।",
"invalidDateRangeLabel": "অমান্য পৰিসৰ।",
"dateOutOfRangeLabel": "সীমাৰ বাহিৰত।",
"saveButtonLabel": "ছেভ কৰক",
"datePickerHelpText": "তাৰিখ বাছনি কৰক",
"dateRangePickerHelpText": "পৰিসৰ বাছনি কৰক",
"calendarModeButtonLabel": "কেলেণ্ডাৰলৈ সলনি কৰক",
"inputDateModeButtonLabel": "ইনপুটলৈ সলনি কৰক",
"timePickerDialHelpText": "সময় বাছনি কৰক",
"timePickerInputHelpText": "সময় দিয়ক",
"timePickerHourLabel": "ঘণ্টা",
"timePickerMinuteLabel": "মিনিট",
"invalidTimeLabel": "এটা মান্য সময় দিয়ক",
"dialModeButtonLabel": "ডায়েল বাছনিকৰ্তাৰ ম’ডলৈ সলনি কৰক",
"inputTimeModeButtonLabel": "পাঠ ইনপুটৰ ম’ডলৈ সলনি কৰক",
"licensesPackageDetailTextZero": "No licenses",
"licensesPackageDetailTextOne": "১ খন অনুজ্ঞাপত্ৰ",
"licensesPackageDetailTextOther": "$licenseCount খন অনুজ্ঞাপত্ৰ"
}
| {
"pile_set_name": "Github"
} |
// CodeMirror, copyright (c) by Marijn Haverbeke and others
// Distributed under an MIT license: http://codemirror.net/LICENSE
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
var GUTTER_ID = "CodeMirror-lint-markers";
function showTooltip(e, content) {
var tt = document.createElement("div");
tt.className = "CodeMirror-lint-tooltip";
tt.appendChild(content.cloneNode(true));
document.body.appendChild(tt);
function position(e) {
if (!tt.parentNode) return CodeMirror.off(document, "mousemove", position);
tt.style.top = Math.max(0, e.clientY - tt.offsetHeight - 5) + "px";
tt.style.left = (e.clientX + 5) + "px";
}
CodeMirror.on(document, "mousemove", position);
position(e);
if (tt.style.opacity != null) tt.style.opacity = 1;
return tt;
}
function rm(elt) {
if (elt.parentNode) elt.parentNode.removeChild(elt);
}
function hideTooltip(tt) {
if (!tt.parentNode) return;
if (tt.style.opacity == null) rm(tt);
tt.style.opacity = 0;
setTimeout(function() { rm(tt); }, 600);
}
function showTooltipFor(e, content, node) {
var tooltip = showTooltip(e, content);
function hide() {
CodeMirror.off(node, "mouseout", hide);
if (tooltip) { hideTooltip(tooltip); tooltip = null; }
}
var poll = setInterval(function() {
if (tooltip) for (var n = node;; n = n.parentNode) {
if (n && n.nodeType == 11) n = n.host;
if (n == document.body) return;
if (!n) { hide(); break; }
}
if (!tooltip) return clearInterval(poll);
}, 400);
CodeMirror.on(node, "mouseout", hide);
}
function LintState(cm, options, hasGutter) {
this.marked = [];
this.options = options;
this.timeout = null;
this.hasGutter = hasGutter;
this.onMouseOver = function(e) { onMouseOver(cm, e); };
}
function parseOptions(cm, options) {
if (options instanceof Function) return {getAnnotations: options};
if (!options || options === true) options = {};
if (!options.getAnnotations) options.getAnnotations = cm.getHelper(CodeMirror.Pos(0, 0), "lint");
if (!options.getAnnotations) throw new Error("Required option 'getAnnotations' missing (lint addon)");
return options;
}
function clearMarks(cm) {
var state = cm.state.lint;
if (state.hasGutter) cm.clearGutter(GUTTER_ID);
for (var i = 0; i < state.marked.length; ++i)
state.marked[i].clear();
state.marked.length = 0;
}
function makeMarker(labels, severity, multiple, tooltips) {
var marker = document.createElement("div"), inner = marker;
marker.className = "CodeMirror-lint-marker-" + severity;
if (multiple) {
inner = marker.appendChild(document.createElement("div"));
inner.className = "CodeMirror-lint-marker-multiple";
}
if (tooltips != false) CodeMirror.on(inner, "mouseover", function(e) {
showTooltipFor(e, labels, inner);
});
return marker;
}
function getMaxSeverity(a, b) {
if (a == "error") return a;
else return b;
}
function groupByLine(annotations) {
var lines = [];
for (var i = 0; i < annotations.length; ++i) {
var ann = annotations[i], line = ann.from.line;
(lines[line] || (lines[line] = [])).push(ann);
}
return lines;
}
function annotationTooltip(ann) {
var severity = ann.severity;
if (!severity) severity = "error";
var tip = document.createElement("div");
tip.className = "CodeMirror-lint-message-" + severity;
tip.appendChild(document.createTextNode(ann.message));
return tip;
}
function startLinting(cm) {
var state = cm.state.lint, options = state.options;
var passOptions = options.options || options; // Support deprecated passing of `options` property in options
if (options.async || options.getAnnotations.async)
options.getAnnotations(cm.getValue(), updateLinting, passOptions, cm);
else
updateLinting(cm, options.getAnnotations(cm.getValue(), passOptions, cm));
}
function updateLinting(cm, annotationsNotSorted) {
clearMarks(cm);
var state = cm.state.lint, options = state.options;
var annotations = groupByLine(annotationsNotSorted);
for (var line = 0; line < annotations.length; ++line) {
var anns = annotations[line];
if (!anns) continue;
var maxSeverity = null;
var tipLabel = state.hasGutter && document.createDocumentFragment();
for (var i = 0; i < anns.length; ++i) {
var ann = anns[i];
var severity = ann.severity;
if (!severity) severity = "error";
maxSeverity = getMaxSeverity(maxSeverity, severity);
if (options.formatAnnotation) ann = options.formatAnnotation(ann);
if (state.hasGutter) tipLabel.appendChild(annotationTooltip(ann));
if (ann.to) state.marked.push(cm.markText(ann.from, ann.to, {
className: "CodeMirror-lint-mark-" + severity,
__annotation: ann
}));
}
if (state.hasGutter)
cm.setGutterMarker(line, GUTTER_ID, makeMarker(tipLabel, maxSeverity, anns.length > 1,
state.options.tooltips));
}
if (options.onUpdateLinting) options.onUpdateLinting(annotationsNotSorted, annotations, cm);
}
function onChange(cm) {
var state = cm.state.lint;
clearTimeout(state.timeout);
state.timeout = setTimeout(function(){startLinting(cm);}, state.options.delay || 500);
}
function popupSpanTooltip(ann, e) {
var target = e.target || e.srcElement;
showTooltipFor(e, annotationTooltip(ann), target);
}
function onMouseOver(cm, e) {
var target = e.target || e.srcElement;
if (!/\bCodeMirror-lint-mark-/.test(target.className)) return;
var box = target.getBoundingClientRect(), x = (box.left + box.right) / 2, y = (box.top + box.bottom) / 2;
var spans = cm.findMarksAt(cm.coordsChar({left: x, top: y}, "client"));
for (var i = 0; i < spans.length; ++i) {
var ann = spans[i].__annotation;
if (ann) return popupSpanTooltip(ann, e);
}
}
CodeMirror.defineOption("lint", false, function(cm, val, old) {
if (old && old != CodeMirror.Init) {
clearMarks(cm);
cm.off("change", onChange);
CodeMirror.off(cm.getWrapperElement(), "mouseover", cm.state.lint.onMouseOver);
delete cm.state.lint;
}
if (val) {
var gutters = cm.getOption("gutters"), hasLintGutter = false;
for (var i = 0; i < gutters.length; ++i) if (gutters[i] == GUTTER_ID) hasLintGutter = true;
var state = cm.state.lint = new LintState(cm, parseOptions(cm, val), hasLintGutter);
cm.on("change", onChange);
if (state.options.tooltips != false)
CodeMirror.on(cm.getWrapperElement(), "mouseover", state.onMouseOver);
startLinting(cm);
}
});
});
| {
"pile_set_name": "Github"
} |
//
// ********************************************************************
// * License and Disclaimer *
// * *
// * The Geant4 software is copyright of the Copyright Holders of *
// * the Geant4 Collaboration. It is provided under the terms and *
// * conditions of the Geant4 Software License, included in the file *
// * LICENSE and available at http://cern.ch/geant4/license . These *
// * include a list of copyright holders. *
// * *
// * Neither the authors of this software system, nor their employing *
// * institutes,nor the agencies providing financial support for this *
// * work make any representation or warranty, express or implied, *
// * regarding this software system or assume any liability for its *
// * use. Please see the license in the file LICENSE and URL above *
// * for the full disclaimer and the limitation of liability. *
// * *
// * This code implementation is the result of the scientific and *
// * technical work of the GEANT4 collaboration. *
// * By using, copying, modifying or distributing the software (or *
// * any work based on the software) you agree to acknowledge its *
// * use in resulting scientific publications, and indicate your *
// * acceptance of all terms of the Geant4 Software license. *
// ********************************************************************
//
// Previous authors: G. Guerrieri, S. Guatelli and M. G. Pia, INFN Genova, Italy
// Authors (since 2007): S. Guatelli,University of Wollongong, Australia
//
#ifndef G4HumanPhantomPrimaryGeneratorAction_h
#define G4HumanPhantomPrimaryGeneratorAction_h 1
#include "G4VUserPrimaryGeneratorAction.hh"
#include "globals.hh"
#include <vector>
class G4GeneralParticleSource;
class G4Event;
class G4HumanPhantomPrimaryGeneratorAction : public G4VUserPrimaryGeneratorAction
{
public:
G4HumanPhantomPrimaryGeneratorAction();
~G4HumanPhantomPrimaryGeneratorAction();
public:
void GeneratePrimaries(G4Event* anEvent);
private:
G4GeneralParticleSource* particleGun;
};
#endif
| {
"pile_set_name": "Github"
} |
martin@vbubuntu:~$ sudo litmus -k http://192.168.178.28/temp tester tester
-> running `basic':
0. init.................. pass
1. begin................. pass
2. options............... pass
3. put_get............... pass
4. put_get_utf8_segment.. pass
5. put_no_parent......... pass
6. mkcol_over_plain...... pass
7. delete................ pass
8. delete_null........... pass
9. delete_fragment....... pass
10. mkcol................. pass
11. mkcol_again........... pass
12. delete_coll........... pass
13. mkcol_no_parent....... pass
14. mkcol_with_body....... pass
15. finish................ pass
<- summary for `basic': of 16 tests run: 16 passed, 0 failed. 100.0%
-> running `copymove':
0. init.................. pass
1. begin................. pass
2. copy_init............. pass
3. copy_simple........... pass
4. copy_overwrite........ pass
5. copy_nodestcoll....... pass
6. copy_cleanup.......... pass
7. copy_coll............. pass
8. copy_shallow.......... pass
9. move.................. pass
10. move_coll............. pass
11. move_cleanup.......... pass
12. finish................ pass
<- summary for `copymove': of 13 tests run: 13 passed, 0 failed. 100.0%
-> running `props':
0. init.................. pass
1. begin................. pass
2. propfind_invalid...... pass
3. propfind_invalid2..... pass
4. propfind_d0........... pass
5. propinit.............. pass
6. propset............... pass
7. propget............... pass
8. propextended.......... pass
9. propmove.............. pass
10. propget............... pass
11. propdeletes........... pass
12. propget............... pass
13. propreplace........... pass
14. propget............... pass
15. propnullns............ pass
16. propget............... pass
17. prophighunicode....... pass
18. propget............... pass
19. propremoveset......... pass
20. propget............... pass
21. propsetremove......... pass
22. propget............... pass
23. propvalnspace......... pass
24. propwformed........... pass
25. propinit.............. pass
26. propmanyns............ pass
27. propget............... pass
28. propcleanup........... pass
29. finish................ pass
<- summary for `props': of 30 tests run: 30 passed, 0 failed. 100.0%
-> running `locks':
0. init.................. pass
1. begin................. pass
2. options............... pass
3. precond............... pass
4. init_locks............ pass
5. put................... pass
6. lock_excl............. pass
7. discover.............. pass
8. refresh............... pass
9. notowner_modify....... pass
10. notowner_lock......... pass
11. owner_modify.......... pass
12. notowner_modify....... pass
13. notowner_lock......... pass
14. copy.................. pass
15. cond_put.............. pass
16. fail_cond_put......... pass
17. cond_put_with_not..... pass
18. cond_put_corrupt_token pass
19. complex_cond_put...... pass
20. fail_complex_cond_put. pass
21. unlock................ pass
22. fail_cond_put_unlocked pass
23. lock_shared........... pass
24. notowner_modify....... pass
25. notowner_lock......... pass
26. owner_modify.......... pass
27. double_sharedlock..... pass
28. notowner_modify....... pass
29. notowner_lock......... pass
30. unlock................ pass
31. prep_collection....... pass
32. lock_collection....... pass
33. owner_modify.......... pass
34. notowner_modify....... pass
35. refresh............... pass
36. indirect_refresh...... pass
37. unlock................ pass
38. unmapped_lock......... pass
39. unlock................ pass
40. finish................ pass
<- summary for `locks': of 41 tests run: 41 passed, 0 failed. 100.0%
-> running `http':
0. init.................. pass
1. begin................. pass
2. expect100............. pass
3. finish................ pass
<- summary for `http': of 4 tests run: 4 passed, 0 failed. 100.0%
martin@vbubuntu:~$
| {
"pile_set_name": "Github"
} |
#include <CtrlLib/CtrlLib.h>
using namespace Upp;
GUI_APP_MAIN
{
RichText txt = ParseQTF(LoadDataFile("Sample.qtf"));
String out = GetHomeDirFile("upp_test.rtf");
SaveFile(out, EncodeRTF(txt));
LaunchWebBrowser(out);
}
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
u"""Many a singular.
---
layout: post
source: Garner's Modern American Usage
source_url: http://bit.ly/1T4alrY
title: Many a singular.
date: 2014-06-10 12:31:19
categories: writing
---
The idiom 'many a' requires a singular verb.
"""
from proselint.tools import memoize, preferred_forms_check
@memoize
def check(text):
"""Suggest the preferred forms."""
err = "misc.many_a"
msg = "'many a' requires a singular verb."
preferences = [
["is many a", ["are many a"]],
["has been many a", ["have been many a"]],
["was many a", ["were many a"]],
]
return preferred_forms_check(text, preferences, err, msg)
| {
"pile_set_name": "Github"
} |
"Filed out from Dolphin Smalltalk 7"!
Object subclass: #Stream
instanceVariableNames: ''
classVariableNames: 'EndOfStreamSignal'
poolDictionaries: ''
classInstanceVariableNames: ''!
Stream guid: (GUID fromString: '{87b4c4c4-026e-11d3-9fd7-00a0cc3e4a32}')!
Stream isAbstract: true!
Stream comment: 'Stream represents a class of objects which can be used for streaming I/O. Stream itself is abstract.
InstanceVariables
None.
Class Variables:
EndOfStreamSignal <exceptionSignaler/exeptionSelector>. Exception to mark attempts to read off the end of the stream.'!
!Stream categoriesForClass!Collections-Streams! !
!Stream methodsFor!
atEnd
"Answer whether the receiver is at the end of its values."
^self subclassResponsibility!
basicContentsSpecies
"Private - Answer the class of Object to be used when answering collections of undecoded
elements from the receiver."
^Array!
basicNext
"Answer the next raw element accessible by the receiver."
"For all but encoded streams this is the same as #next"
^self next!
basicNext: anInteger
"Private - Answer a <sequencedReadableCollection> containing the next anInteger number of objects
accessible by the receiver."
| newStream |
newStream := self contentsSpecies writeStream: anInteger.
anInteger timesRepeat: [newStream basicNextPut: self basicNext].
^newStream contents!
basicNext: countInteger into: aSequenceableCollection startingAt: startInteger
"Private - Destructively replace the elements of the <sequenceableCollection> argument in the
<integer> interval (startAt..startAt+count-1) with the next countInteger undecoded elements
of the receiver. Answer aSequenceableCollection."
"Implementation Note: This will fail if the receiver is not readable."
startInteger to: startInteger + countInteger - 1
do: [:i | aSequenceableCollection basicAt: i put: self basicNext].
^aSequenceableCollection!
basicNextAvailable: anInteger
"Private - Answer up to anInteger undecoded elements of the receiver's collection. Generally, the
answer will be a collection of the same class as the one accessed by the receiver (though
this is determined by the receiver), and will contain anInteger undecoded elements, or as
many as are left in the receiver's collection."
| writeStream count |
count := 0.
writeStream := self basicContentsSpecies writeStream: anInteger.
[count == anInteger or: [self atEnd]] whileFalse:
[writeStream basicNextPut: self basicNext.
count := count + 1].
^writeStream grabContents!
close
"Relinquish any external resources associated with the receiver, and put the
receiver into a 'closed' state (as appropriate to the subclass). Answer the receiver."
^self!
contentsSpecies
"Private - Answer the class of Object to be used when answering collections of elements
from the receiver."
^Array!
display: anObject
"Ask anObject to append its end-user textual description to the receiver."
anObject displayOn: self!
do: aMonadicValuable
"Evaluate the <monadicValuable> argument for each of the receiver's future sequence values,
terminating only when there are no more future sequence values (i.e. the stream is at an
end). N.B. If evaluating the valuable has side effects on the receiver stream, then the
behaviour is undefined."
[self atEnd] whileFalse: [aMonadicValuable value: self next]!
elementSize
"Answer the size in bytes of the streamed over elements. It is an error if the stream has #pointers encoding. For example this is 1 for a #utf8 stream because each UTF-8 code unit requires one byte (and each UTF-8 code point may requried between 1 and 4 code units)."
^self contentsSpecies elementSize!
encoding
"Answer the encoding of the stream, one of:
- #binary, e.g. a stream over a ByteArray
- #pointers, e.g. a Stream over an Array
- #ansi, a Stream over an AnsiString
- #utf8, a Stream over a Utf8String
- #utf16le, a Stream over a Utf16String
- #utf32, a Stream over a Utf32String."
^self contentsSpecies encoding!
errorEndOfStream
"Raise an error to the effect that an attempt was made to read off the end of the collection
over which the receiver is streaming."
^self class endOfStreamSignal signalWith: self!
errorNotPositionable
^self error: 'The stream is not positionable'!
errorNotReadable
"Private - An attempt was made to read from a write-only stream. Raise an appropriate exception."
^self error: 'The stream is not readable'!
isPositionable
"Answer whether the receiver supports the #position[:] messages for querying and setting its position in the data stream."
^false!
isReadable
"Answer whether the receiver can be read from (i.e. it implements the gettableStream
protocol)."
^false!
isWriteable
"Answer whether the receiver can be written to (i.e. it implements the puttableStream
protocol)."
^false!
next
"Answer the next object accessible by the receiver."
"Implementation Note: Must be defined by subclasses which wish to implement the
<gettableStream> protocol."
^self errorNotReadable!
next: anInteger
"Answer a <sequencedReadableCollection> containing the next anInteger number of objects
accessible by the receiver."
| writeStream |
writeStream := self contentsSpecies writeStream: anInteger.
anInteger timesRepeat: [writeStream nextPut: self next].
^writeStream grabContents!
next: countInteger into: aSequenceableCollection startingAt: startInteger
"Destructively replace the elements of the <sequenceableCollection> argument in the
<integer> interval (startAt..startAt+count-1) with the next countInteger elements of the
receiver. Answer aSequenceableCollection."
| last pos |
last := startInteger + countInteger.
pos := startInteger.
[pos >= last] whileFalse: [pos := (aSequenceableCollection encodedAt: pos put: self next) + 1].
^aSequenceableCollection!
nextAvailable
"Answer the next of the receiver's future sequence values, or nil if at the end of the stream."
^self atEnd ifFalse: [self next]!
nextAvailable: anInteger
"Answer up to anInteger elements of the receiver's collection. Generally, the answer will be
a collection of the same class as the one accessed by the receiver (though this is
determined by the receiver), and will contain anInteger elements, or as many as are left in
the receiver's collection."
| newStream count |
newStream := self contentsSpecies writeStream: (count := anInteger).
[count == 0 or: [self atEnd]] whileFalse:
[newStream nextPut: self next.
count := count - 1].
^newStream contents!
nextInto: aSequenceableCollection
"Destructively replace all elements of the argument, aSequenceableCollection,
with the next elements of the receiver. Answer aSequenceableCollection."
^self next: aSequenceableCollection size into: aSequenceableCollection startingAt: 1!
nextMatchFor: anObject
"Access the next object and answer whether it is equal to the argument, anObject. Raise an
end of stream exception (via #next) if there are no more elements in the receiver."
^anObject = self next!
print: anObject
"Ask anObject to append its textual description to the receiver"
anObject printOn: self!
skipThrough: anObject
"Set the receivers position reference to be past the next occurrence of the argument,
anObject, in the collection. Answer the receiver, or nil if no such occurrence existed."
"Included for compatibility with VisualWorks - the ANSI standard message #skipTo: should be
used in preference"
[self atEnd] whileFalse: [self next = anObject ifTrue: [^self]].
^nil!
skipTo: anObject
"Set the receivers position reference to be past the next occurrence of the argument,
anObject, in the collection. Answer whether such an occurrence existed."
[self atEnd] whileFalse: [self next = anObject ifTrue: [^true]].
^false!
store: anObject
"Ask anObject to append its storeString to the receiver."
anObject storeOn: self!
upTo: anObject
"Answer a collection of elements starting with the next element accessed by the receiver,
and up to, not inclusive of, the next element that is equal to anObject. Positions the
stream after anObject if found. If anObject is not in the collection, answer the entire rest
of the collection. If the receiver is at its end, answer an empty Collection."
| newStream nextObject |
newStream := self contentsSpecies writeStream: 128.
[self atEnd or: [(nextObject := self next) = anObject]] whileFalse: [newStream nextPut: nextObject].
^newStream contents!
upToEnd
"Answer a collection consisting of the future sequence values of the receiver (i.e. from the
current position to the end)."
| remainder |
remainder := self contentsSpecies writeStream: 128.
[self atEnd] whileFalse: [remainder nextPut: self next].
^remainder contents! !
!Stream categoriesFor: #atEnd!public!testing! !
!Stream categoriesFor: #basicContentsSpecies!accessing!private! !
!Stream categoriesFor: #basicNext!accessing!public! !
!Stream categoriesFor: #basicNext:!accessing!private! !
!Stream categoriesFor: #basicNext:into:startingAt:!accessing!private! !
!Stream categoriesFor: #basicNextAvailable:!accessing!private! !
!Stream categoriesFor: #close!operations!public! !
!Stream categoriesFor: #contentsSpecies!constants!private! !
!Stream categoriesFor: #display:!printing!public! !
!Stream categoriesFor: #do:!enumerating!public! !
!Stream categoriesFor: #elementSize!constants!public! !
!Stream categoriesFor: #encoding!constants!public! !
!Stream categoriesFor: #errorEndOfStream!exceptions!public! !
!Stream categoriesFor: #errorNotPositionable!exceptions!private! !
!Stream categoriesFor: #errorNotReadable!exceptions!private! !
!Stream categoriesFor: #isPositionable!public!testing! !
!Stream categoriesFor: #isReadable!public!testing! !
!Stream categoriesFor: #isWriteable!public!testing! !
!Stream categoriesFor: #next!accessing!public! !
!Stream categoriesFor: #next:!accessing!public! !
!Stream categoriesFor: #next:into:startingAt:!accessing!public! !
!Stream categoriesFor: #nextAvailable!accessing!public! !
!Stream categoriesFor: #nextAvailable:!accessing!public! !
!Stream categoriesFor: #nextInto:!accessing!public! !
!Stream categoriesFor: #nextMatchFor:!accessing!public! !
!Stream categoriesFor: #print:!printing!public! !
!Stream categoriesFor: #skipThrough:!positioning!public! !
!Stream categoriesFor: #skipTo:!positioning!public! !
!Stream categoriesFor: #store:!printing!public! !
!Stream categoriesFor: #upTo:!accessing!public! !
!Stream categoriesFor: #upToEnd!accessing!public! !
!Stream class methodsFor!
endOfStreamSignal
"Answer an exceptionSignaler/exceptionSelector object raised when an attempt is
made to read off the end of the stream. Those interested in catching end of
Stream exceptions should use this accessor to get the exceptionSelector to pass
as the first parameter to an #on:do: message. Current the answer is a Signal
instance, but it could be an Exception subclass in future."
^EndOfStreamSignal!
initialize
"Private - Initialize the receiver's class variables.
Stream initialize
"
EndOfStreamSignal := Signal description: 'End of stream'!
new
"Streams must be instantiated onto something."
^self shouldNotImplement! !
!Stream class categoriesFor: #endOfStreamSignal!constants!public! !
!Stream class categoriesFor: #initialize!development!initializing!private! !
!Stream class categoriesFor: #new!instance creation!public! !
| {
"pile_set_name": "Github"
} |
#! /bin/bash
charsets="$@"
base=$1
language=$(basename $(pwd))
target=$language.h
xlate () {
../xlt ../maps/$1.map ../maps/$2.map
}
# This is a recursive call.
if test -z "$base"; then
for d in `ls */doit.sh | cut -d/ -f1`; do
echo '[ '$d' ]'
cd ./$d >/dev/null
./doit.sh
cd .. >/dev/null
done
exit 0
fi
# Counts
for cs in $charsets; do
echo '+'$cs...
if test "$cs" != "$base"; then
if test -s $cs.xbase; then
mv -f $cs.xbase $cs.base
else
rm -f $cs.xbase 2>/dev/null
xlate $base $cs <$base.base >$cs.base
if test ! -s $cs.base; then
echo "Cannot create $cs.base" 2>&1
exit 1
fi
fi
fi
../basetoc $cs <$cs.base >$cs.c
done
# Pairs
if test -f paircounts.$base; then
if test `echo $charsets | wc -w` -gt 8; then
echo '*** Warning: more than 8 charsets. Expect coredumps... ***' 1>&2
fi
for cs in $charsets; do
echo '++'$cs...
if test -f paircounts.$cs; then
cp paircounts.$cs $cs.pair
else
xlate $base $cs <$base.pair >$cs.pair
fi
if test ! -s $cs.pair; then
echo "Cannot create $cs.pair" 2>&1
exit 1
fi
../pairtoc $cs ../letters/$cs.letters <$cs.pair >$cs.p.c
done
fi
# Totals
echo =totals...
../totals.pl $charsets
if test ! -s totals.c; then
echo "Cannot create totals.c" 2>&1
exit 1
fi
echo '>'$target...
echo "/***** THIS IS A GENERATED FILE. DO NOT TOUCH! *****/" >$target
for cs in $charsets; do
cat $cs.c >>$target
if test -s $cs.p.c; then
cat $cs.p.c >>$target
fi
echo >>$target
done
cat totals.c >>$target
echo done.
| {
"pile_set_name": "Github"
} |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef LIBMEMTRACK_WRAPPER_H_
#define LIBMEMTRACK_WRAPPER_H_
#include <stdint.h>
// Wrapper on top of libmemtrack API.
// Opaque structure with memory stats.
// See $ANDROID/system/core/libmemtrack/include/memtrack/memtrack.h for details.
struct libmemtrack_proc;
// These numbers are vendor-specific and can't be trusted as a stable metric
// across different hardware or driver versions.
class MemtrackProc {
public:
explicit MemtrackProc(int pid);
~MemtrackProc();
uint64_t graphics_total() const;
uint64_t graphics_pss() const;
uint64_t gl_total() const;
uint64_t gl_pss() const;
uint64_t other_total() const;
uint64_t other_pss() const;
bool has_errors() const { return proc_ == nullptr; };
private:
MemtrackProc(const MemtrackProc&) = delete;
void operator=(const MemtrackProc&) = delete;
libmemtrack_proc* proc_ = nullptr;
};
#endif // LIBMEMTRACK_WRAPPER_H_
| {
"pile_set_name": "Github"
} |
version https://git-lfs.github.com/spec/v1
oid sha256:8ae7a9c05c6db195ca82c69e2e24335d8671210e12471de2b8196f109817d79c
size 7725
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "thread.h"
#include "asm_support_mips.h"
#include "base/logging.h"
namespace art {
void Thread::InitCpu() {
CHECK_EQ(THREAD_FLAGS_OFFSET, ThreadFlagsOffset<4>().Int32Value());
CHECK_EQ(THREAD_CARD_TABLE_OFFSET, CardTableOffset<4>().Int32Value());
CHECK_EQ(THREAD_EXCEPTION_OFFSET, ExceptionOffset<4>().Int32Value());
}
void Thread::CleanupCpu() {
// Do nothing.
}
} // namespace art
| {
"pile_set_name": "Github"
} |
/** Declaration of extension methods for base additions
Copyright (C) 2003-2010 Free Software Foundation, Inc.
Written by: Richard Frith-Macdonald <[email protected]>
and: Adam Fedor <[email protected]>
This file is part of the GNUstep Base Library.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02111 USA.
*/
#ifndef INCLUDED_NSLock_GNUstepBase_h
#define INCLUDED_NSLock_GNUstepBase_h
#import <GNUstepBase/GSVersionMacros.h>
#import <Foundation/NSLock.h>
#if defined(__cplusplus)
extern "C" {
#endif
#if OS_API_VERSION(GS_API_NONE,GS_API_LATEST)
/**
* Returns IDENT which will be initialized
* to an instance of a CLASSNAME in a thread safe manner.
* If IDENT has been previously initialized
* this macro merely returns IDENT.
* IDENT is considered uninitialized, if it contains nil.
* CLASSNAME must be either NSLock, NSRecursiveLock or one
* of their subclasses.
* See [NSLock+newLockAt:] for details.
* This macro is intended for code that cannot insure
* that a lock can be initialized in thread safe manner otherwise.
* <example>
* NSLock *my_lock = nil;
*
* void function (void)
* {
* [GS_INITIALIZED_LOCK(my_lock, NSLock) lock];
* do_work ();
* [my_lock unlock];
* }
*
* </example>
*/
#define GS_INITIALIZED_LOCK(IDENT,CLASSNAME) \
(IDENT != nil ? (id)IDENT : (id)[CLASSNAME newLockAt: &IDENT])
@interface NSLock (GNUstepBase)
/**
* Initializes the id pointed to by location
* with a new instance of the receiver's class
* in a thread safe manner, unless
* it has been previously initialized.
* Returns the contents pointed to by location.
* The location is considered unintialized if it contains nil.
* <br/>
* This method is used in the GS_INITIALIZED_LOCK macro
* to initialize lock variables when it cannot be insured
* that they can be initialized in a thread safe environment.
* <example>
* NSLock *my_lock = nil;
*
* void function (void)
* {
* [GS_INITIALIZED_LOCK(my_lock, NSLock) lock];
* do_work ();
* [my_lock unlock];
* }
*
* </example>
*/
+ (id) newLockAt: (id *)location;
@end
@interface NSRecursiveLock (GNUstepBase)
/**
* Initializes the id pointed to by location
* with a new instance of the receiver's class
* in a thread safe manner, unless
* it has been previously initialized.
* Returns the contents pointed to by location.
* The location is considered unintialized if it contains nil.
* <br/>
* This method is used in the GS_INITIALIZED_LOCK macro
* to initialize lock variables when it cannot be insured
* that they can be initialized in a thread safe environment.
* <example>
* NSLock *my_lock = nil;
*
* void function (void)
* {
* [GS_INITIALIZED_LOCK(my_lock, NSLock) lock];
* do_work ();
* [my_lock unlock];
* }
*
* </example>
*/
+ (id) newLockAt: (id *)location;
@end
#endif /* OS_API_VERSION */
#if defined(__cplusplus)
}
#endif
#endif /* INCLUDED_NSLock_GNUstepBase_h */
| {
"pile_set_name": "Github"
} |
package de.unihd.dbs.heideltime.standalone.components.impl;
import java.io.InputStream;
import java.net.URI;
import java.net.URL;
import java.util.HashMap;
import org.apache.uima.UimaContext;
import org.apache.uima.cas.AbstractCas;
import org.apache.uima.cas.SofaID;
import org.apache.uima.resource.ResourceAccessException;
import org.apache.uima.resource.Session;
import org.apache.uima.util.InstrumentationFacility;
import org.apache.uima.util.Logger;
@SuppressWarnings("deprecation")
public class StandaloneConfigContext implements UimaContext {
private HashMap<String, Object> settings = new HashMap<String, Object>();
@Override
public Object getConfigParameterValue(String aParamName) {
return settings.get(aParamName);
}
public void setConfigParameterValue(String aParamName, Object aParamValue) {
settings.put(aParamName, aParamValue);
}
@Override
public Object getConfigParameterValue(String aGroupName, String aParamName) {
return settings.get(aParamName);
}
/*
* leave these defunct because we don't use them for now
*/
@Override
public String[] getConfigurationGroupNames() {
// TODO Auto-generated method stub
return null;
}
@Override
public String[] getConfigParameterNames(String aGroup) {
// TODO Auto-generated method stub
return null;
}
@Override
public String[] getConfigParameterNames() {
// TODO Auto-generated method stub
return null;
}
@Override
public Logger getLogger() {
// TODO Auto-generated method stub
return null;
}
@Override
public InstrumentationFacility getInstrumentationFacility() {
// TODO Auto-generated method stub
return null;
}
@Override
public URL getResourceURL(String aKey) throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public URI getResourceURI(String aKey) throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getResourceFilePath(String aKey)
throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public InputStream getResourceAsStream(String aKey)
throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public Object getResourceObject(String aKey) throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public URL getResourceURL(String aKey, String[] aParams)
throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public URI getResourceURI(String aKey, String[] aParams)
throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getResourceFilePath(String aKey, String[] aParams)
throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public InputStream getResourceAsStream(String aKey, String[] aParams)
throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public Object getResourceObject(String aKey, String[] aParams)
throws ResourceAccessException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getDataPath() {
// TODO Auto-generated method stub
return null;
}
@Override
public Session getSession() {
// TODO Auto-generated method stub
return null;
}
@Override
public SofaID mapToSofaID(String aSofaName) {
// TODO Auto-generated method stub
return null;
}
@Override
public String mapSofaIDToComponentSofaName(String aSofaID) {
// TODO Auto-generated method stub
return null;
}
@Override
public SofaID[] getSofaMappings() {
// TODO Auto-generated method stub
return null;
}
@Override
@SuppressWarnings("rawtypes")
public AbstractCas getEmptyCas(Class aCasInterface) {
// TODO Auto-generated method stub
return null;
}
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export enum apiUrls {
GET_SERVICES = '/v4/:appId/govern/microservices',
GET_SERVICE_SCHEMAS = '/v4/:appId/registry/microservices/:serviceId/schemas',
GET_SERVICE_INSTANCES = '/v4/:appId/registry/microservices/:serviceId/instances',
GET_SERVICE = '/v4/:appId/govern/microservices/:serviceId'
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2001-2016 Food and Agriculture Organization of the
* United Nations (FAO-UN), United Nations World Food Programme (WFP)
* and United Nations Environment Programme (UNEP)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or (at
* your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*
* Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
* Rome - Italy. email: [email protected]
*/
package org.fao.geonet.api.records.formatters.groovy.template;
import com.google.common.collect.Lists;
import org.fao.geonet.SystemInfo;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public abstract class AbstractTemplateParserTest {
public static void assertCorrectRender(TNode parseTree, Map<String, Object> model, String expected) throws IOException {
final ByteArrayOutputStream result = new ByteArrayOutputStream();
TRenderContext context = new TRenderContext(result, model);
parseTree.render(context);
assertEquals(expected + "\n" + result, expected.replaceAll("\\n|\\r|\\s+", ""), result.toString().replaceAll("\\n|\\r|\\s+", ""));
}
public static TemplateParser createTestParser(String systemInfoStage) throws IllegalAccessException, InstantiationException {
SystemInfo info = SystemInfo.createForTesting(systemInfoStage);
final TemplateParser parser = new TemplateParser();
final TextContentParser contentParser = TextContentParserTest.createTestTextContentParser();
parser.textContentParser = contentParser;
parser.tnodeFactories = Lists.<TNodeFactory>newArrayList(
new TNodeFactoryIf(info, contentParser), new TNodeFactoryRepeat(info, contentParser), new TNodeFactoryTranslate(info, contentParser),
new TNodeFactoryInclude(info, contentParser), new TNodeFactoryTransclude(info, contentParser));
return parser;
}
}
| {
"pile_set_name": "Github"
} |
/*
*****************************************************************************
**
** File : LinkerScript.ld
**
** Abstract : Linker script for STM32F107VCTx Device with
** 256KByte FLASH, 64KByte RAM
**
** Set heap size, stack size and stack location according
** to application requirements.
**
** Set memory bank area and size if external memory is used.
**
** Target : STMicroelectronics STM32
**
**
** Distribution: The file is distributed as is, without any warranty
** of any kind.
**
** (c)Copyright Ac6.
** You may use this file as-is or modify it according to the needs of your
** project. Distribution of this file (unmodified or modified) is not
** permitted. Ac6 permit registered System Workbench for MCU users the
** rights to distribute the assembled, compiled & linked contents of this
** file as part of an application binary file, provided that it is built
** using the System Workbench for MCU toolchain.
**
*****************************************************************************
*/
/* Entry Point */
ENTRY(Reset_Handler)
/* Highest address of the user mode stack */
_estack = 0x20010000; /* end of RAM */
/* Generate a link error if heap and stack don't fit into RAM */
_Min_Heap_Size = 0x1000; /* required amount of heap */
_Min_Stack_Size = 0x1400; /* required amount of stack */
/* Specify the memory areas */
MEMORY
{
RAM (xrw) : ORIGIN = 0x20000000, LENGTH = 64K
FLASH (rx) : ORIGIN = 0x8000000, LENGTH = 256K
}
/* Define output sections */
SECTIONS
{
/* The startup code goes first into FLASH */
.isr_vector :
{
. = ALIGN(4);
KEEP(*(.isr_vector)) /* Startup code */
. = ALIGN(4);
} >FLASH
/* The program code and other data goes into FLASH */
.text :
{
. = ALIGN(4);
*(.text) /* .text sections (code) */
*(.text*) /* .text* sections (code) */
*(.glue_7) /* glue arm to thumb code */
*(.glue_7t) /* glue thumb to arm code */
*(.eh_frame)
KEEP (*(.init))
KEEP (*(.fini))
. = ALIGN(4);
_etext = .; /* define a global symbols at end of code */
} >FLASH
/* Constant data goes into FLASH */
.rodata :
{
. = ALIGN(4);
*(.rodata) /* .rodata sections (constants, strings, etc.) */
*(.rodata*) /* .rodata* sections (constants, strings, etc.) */
. = ALIGN(4);
} >FLASH
.ARM.extab : { *(.ARM.extab* .gnu.linkonce.armextab.*) } >FLASH
.ARM : {
__exidx_start = .;
*(.ARM.exidx*)
__exidx_end = .;
} >FLASH
.preinit_array :
{
PROVIDE_HIDDEN (__preinit_array_start = .);
KEEP (*(.preinit_array*))
PROVIDE_HIDDEN (__preinit_array_end = .);
} >FLASH
.init_array :
{
PROVIDE_HIDDEN (__init_array_start = .);
KEEP (*(SORT(.init_array.*)))
KEEP (*(.init_array*))
PROVIDE_HIDDEN (__init_array_end = .);
} >FLASH
.fini_array :
{
PROVIDE_HIDDEN (__fini_array_start = .);
KEEP (*(SORT(.fini_array.*)))
KEEP (*(.fini_array*))
PROVIDE_HIDDEN (__fini_array_end = .);
} >FLASH
/* used by the startup to initialize data */
_sidata = LOADADDR(.data);
/* Initialized data sections goes into RAM, load LMA copy after code */
.data :
{
. = ALIGN(4);
_sdata = .; /* create a global symbol at data start */
*(.data) /* .data sections */
*(.data*) /* .data* sections */
. = ALIGN(4);
_edata = .; /* define a global symbol at data end */
} >RAM AT> FLASH
/* Uninitialized data section */
. = ALIGN(4);
.bss :
{
/* This is used by the startup in order to initialize the .bss secion */
_sbss = .; /* define a global symbol at bss start */
__bss_start__ = _sbss;
*(.bss)
*(.bss*)
*(COMMON)
. = ALIGN(4);
_ebss = .; /* define a global symbol at bss end */
__bss_end__ = _ebss;
} >RAM
/* User_heap_stack section, used to check that there is enough RAM left */
._user_heap_stack :
{
. = ALIGN(8);
PROVIDE ( end = . );
PROVIDE ( _end = . );
. = . + _Min_Heap_Size;
. = . + _Min_Stack_Size;
. = ALIGN(8);
} >RAM
/* Remove information from the standard libraries */
/DISCARD/ :
{
libc.a ( * )
libm.a ( * )
libgcc.a ( * )
}
.ARM.attributes 0 : { *(.ARM.attributes) }
}
| {
"pile_set_name": "Github"
} |
{
"DESCRIPTION": "Generates a \"phcomment\" image using Nekobot API",
"USAGE": "{{prefix}}phcomment (@member) [text]",
"EXAMPLES": "{{prefix}}phcomment Hi!\n{{prefix}}phcomment @Androz#2091 Hi!",
"MISSING_TEXT": "Please specify the comment text!"
} | {
"pile_set_name": "Github"
} |
> ### GitAds
> `DKImagePickerController` is being sponsored by the following tool; please help to support us by taking a look and signing up to a free trial:
> [<img src="https://images.gitads.io/DKImagePickerController" alt=“GitAds”/>](https://tracking.gitads.io/?repo=DKImagePickerController)
DKImagePickerController
=======================
[](http://travis-ci.org/zhangao0086/DKImagePickerController) [][docsLink] [][mitLink] [](https://github.com/Carthage/Carthage)
<img width="50%" height="50%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot3.png" /><img width="50%" height="50%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot4.png" />
---
<img width="50%" height="50%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot11.png" /><img width="50%" height="50%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot6.png" />
---
## Description
`DKImagePickerController` is a highly customizable, Pure-Swift library.
### Features
* Supports both single and multiple selection.
* Supports filtering albums and sorting by type.
* Supports landscape, iPad, and orientation switching.
* iCloud Support.
* Supports batch exports `PHAsset` to lcoal files.
* Inline mode Support.
* Customizable `UICollectionViewLayout`.
* Customizable `camera`, `photo gallery` and `photo editor`.
* Dark Mode Support
## Requirements
* iOS 9.0+ (Drop support for iOS 8 in 4.3.0 or above)
* ARC
* Swift 4 & 5
## Installation
### CocoaPods
#### iOS 9 and newer
DKImagePickerController is available on CocoaPods. Simply add the following line to your podfile:
```
# For latest release in cocoapods
pod 'DKImagePickerController'
```
#### For Swift 4.1
```
pod 'DKImagePickerController', :git => 'https://github.com/zhangao0086/DKImagePickerController.git', :branch => 'Swift4'
```
#### For iOS 8
```
pod 'DKImagePickerController', :git => 'https://github.com/zhangao0086/DKImagePickerController.git', :branch => 'iOS8'
```
#### Subspecs
There are 7 subspecs available now:
| Subspec | Description |
|---|---|
| Core | Required. |
| ImageDataManager | Required. The subspec provides data to `DKImagePickerController`. |
| Resource | Required. The subspec provides resource management and internationalization. |
| PhotoGallery | Optional. The subspec provides preview feature for PHAsset. |
| Camera | Optional. The subspec provides camera feature. |
| InlineCamera | Optional. The subspec should be pushed by `UINavigationController`, like `UIImagePickerController` with `UIImagePickerControllerSourceType.camera`. |
| PhotoEditor | Optional. The subspec provides basic image editing features. |
This means you can install only some of the `DKImagePickerController` modules. By default, you get all subspecs.
If you need to use your own photo editor, simply specify subspecs other than `PhotoEditor`:
```ruby
pod 'DKImagePickerController', :subspecs => ['PhotoGallery', 'Camera', 'InlineCamera']
```
More information, see [Extensions](#extensions).
### Carthage
```
github "zhangao0086/DKImagePickerController"
```
If you use Carthage to build your dependencies, make sure you have added `CropViewController.framework`, `DKCamera.framework`, `DKImagePickerController.framework`, `DKPhotoGallery.framework` and `SDWebImage.framework` to the _"Linked Frameworks and Libraries"_ section of your target, and have included them in your Carthage framework copying build phase.
## Getting Started
#### Initialization and presentation
```swift
let pickerController = DKImagePickerController()
pickerController.didSelectAssets = { (assets: [DKAsset]) in
print("didSelectAssets")
print(assets)
}
self.presentViewController(pickerController, animated: true) {}
````
#### Configurations
```swift
/// Use UIDelegate to Customize the picker UI.
@objc public var UIDelegate: DKImagePickerControllerBaseUIDelegate!
/// Forces deselect of previous selected image. allowSwipeToSelect will be ignored.
@objc public var singleSelect = false
/// Auto close picker on single select
@objc public var autoCloseOnSingleSelect = true
/// The maximum count of assets which the user will be able to select, a value of 0 means no limit.
@objc public var maxSelectableCount = 0
/// Photos will be tagged with the location where they are taken.
/// If true, your Info.plist should include the "Privacy - Location XXX" tag.
open var containsGPSInMetadata = false
/// Set the defaultAssetGroup to specify which album is the default asset group.
public var defaultAssetGroup: PHAssetCollectionSubtype?
/// Allow swipe to select images.
@objc public var allowSwipeToSelect: Bool = false
/// Allow select all
@objc public var allowSelectAll: Bool = false
/// A Bool value indicating whether the inline mode is enabled.
@objc public var inline: Bool = false
/// The type of picker interface to be displayed by the controller.
@objc public var assetType: DKImagePickerControllerAssetType = .allAssets
/// If sourceType is Camera will cause the assetType & maxSelectableCount & allowMultipleTypes & defaultSelectedAssets to be ignored.
@objc public var sourceType: DKImagePickerControllerSourceType = .both
/// A Bool value indicating whether allows to select photos and videos at the same time.
@objc public var allowMultipleTypes = true
/// A Bool value indicating whether to allow the picker auto-rotate the screen.
@objc public var allowsLandscape = false
/// Set the showsEmptyAlbums to specify whether or not the empty albums is shown in the picker.
@objc public var showsEmptyAlbums = true
/// A Bool value indicating whether to allow the picker shows the cancel button.
@objc public var showsCancelButton = false
/// The block is executed when the user presses the cancel button.
@objc public var didCancel: (() -> Void)?
/// The block is executed when the user presses the select button.
@objc public var didSelectAssets: ((_ assets: [DKAsset]) -> Void)?
/// The block is executed when the number of selected assets is changed.
@objc public var selectedChanged: (() -> Void)?
/// A Bool value indicating whether to allow the picker to auto-export the selected assets to the specified directory when done is called.
/// picker will creating a default exporter if exportsWhenCompleted is true and the exporter is nil.
@objc public var exportsWhenCompleted = false
@objc public var exporter: DKImageAssetExporter?
/// Indicates the status of the exporter.
@objc public private(set) var exportStatus = DKImagePickerControllerExportStatus.none {
willSet {
if self.exportStatus != newValue {
self.willChangeValue(forKey: #keyPath(DKImagePickerController.exportStatus))
}
}
didSet {
if self.exportStatus != oldValue {
self.didChangeValue(forKey: #keyPath(DKImagePickerController.exportStatus))
self.exportStatusChanged?(self.exportStatus)
}
}
}
/// The block is executed when the exportStatus is changed.
@objc public var exportStatusChanged: ((DKImagePickerControllerExportStatus) -> Void)?
/// The object that acts as the data source of the picker.
@objc public private(set) lazy var groupDataManager: DKImageGroupDataManager
```
## Inline Mode
<img width="30%" height="30%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot11.png" />
```swift
let groupDataManagerConfiguration = DKImageGroupDataManagerConfiguration()
groupDataManagerConfiguration.fetchLimit = 10
groupDataManagerConfiguration.assetGroupTypes = [.smartAlbumUserLibrary]
let groupDataManager = DKImageGroupDataManager(configuration: groupDataManagerConfiguration)
self.pickerController = DKImagePickerController(groupDataManager: groupDataManager)
pickerController.inline = true
pickerController.UIDelegate = CustomInlineLayoutUIDelegate(imagePickerController: pickerController)
pickerController.assetType = .allPhotos
pickerController.sourceType = .photo
let pickerView = self.pickerController.view!
pickerView.frame = CGRect(x: 0, y: 170, width: self.view.bounds.width, height: 200)
self.view.addSubview(pickerView)
```
## Customizable UI
<img width="30%" height="30%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot6.png" />
For example, see [CustomUIDelegate](https://github.com/zhangao0086/DKImagePickerController/tree/develop/Example/DKImagePickerControllerDemo/CustomUIDelegate).
## Customizable Layout
<img width="30%" height="30%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot10.png" />
For example, see [CustomLayoutUIDelegate](https://github.com/zhangao0086/DKImagePickerController/tree/develop/Example/DKImagePickerControllerDemo/CustomLayoutUIDelegate).
### Conforms UIAppearance protocol
<img width="30%" height="30%" src="https://raw.githubusercontent.com/zhangao0086/DKImagePickerController/develop/Screenshot9.png" />
You can easily customize the appearance of the navigation bar using the appearance proxy.
```swift
UINavigationBar.appearance().titleTextAttributes = [
NSFontAttributeName : UIFont(name: "Optima-BoldItalic", size: 21)!,
NSForegroundColorAttributeName : UIColor.redColor()
]
```
## Exporting to file
By default, the picker uses a singleton object of `DKImageAssetExporter` to export `DKAsset` to local files.
```swift
/*
Configuration options for a DKImageAssetExporter. When an exporter is created,
a copy of the configuration object is made - you cannot modify the configuration
of an exporter after it has been created.
*/
@objc
public class DKImageAssetExporterConfiguration: NSObject, NSCopying {
@objc public var imageExportPreset = DKImageExportPresent.compatible
/// videoExportPreset can be used to specify the transcoding quality for videos (via a AVAssetExportPreset* string).
@objc public var videoExportPreset = AVAssetExportPresetHighestQuality
#if swift(>=4.0)
@objc public var avOutputFileType = AVFileType.mov
#else
@objc public var avOutputFileType = AVFileTypeQuickTimeMovie
#endif
@objc public var exportDirectory = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("DKImageAssetExporter")
}
/*
A DKImageAssetExporter object exports DKAsset(PHAsset) from album (or iCloud) to the app's tmp directory (by default).
It automatically deletes the exported directories when it receives a UIApplicationWillTerminate notification.
*/
@objc
open class DKImageAssetExporter: DKBaseManager {
/// This method starts an asynchronous export operation of a batch of asset.
@discardableResult
@objc public func exportAssetsAsynchronously(assets: [DKAsset], completion: ((_ info: [AnyHashable : Any]) -> Void)?) -> DKImageAssetExportRequestID
}
```
This exporter can automatically convert HEIF to JPEG:
```swift
@objc
public enum DKImageExportPresent: Int {
case
compatible, // A preset for converting HEIF formatted images to JPEG.
current // A preset for passing image data as-is to the client.
}
```
You also can observe the export progress of each asset:
```swift
@objc
public protocol DKImageAssetExporterObserver {
@objc optional func exporterWillBeginExporting(exporter: DKImageAssetExporter, asset: DKAsset)
/// The progress can be obtained from the DKAsset.
@objc optional func exporterDidUpdateProgress(exporter: DKImageAssetExporter, asset: DKAsset)
/// When the asset's error is not nil, it indicates that an error occurred while exporting.
@objc optional func exporterDidEndExporting(exporter: DKImageAssetExporter, asset: DKAsset)
}
extension DKAsset {
/// The exported file will be placed in this location.
/// All exported files can be automatically cleaned by the DKImageAssetDiskPurger when appropriate.
@objc public var localTemporaryPath: URL?
@objc public var fileName: String?
/// Indicates the file's size in bytes.
@objc public var fileSize: UInt
/// If you export an asset whose data is not on the local device, and you have enabled downloading with the isNetworkAccessAllowed property, the progress indicates the progress of the download. A value of 0.0 indicates that the download has just started, and a value of 1.0 indicates the download is complete.
@objc public var progress: Double
/// Describes the error that occurred if the export has failed or been cancelled.
@objc public var error: Error?
}
```
For example, see `Export automatically` and `Export manually`.
## Extensions
This picker uses `DKImageExtensionController` manages all extensions, you can register it with a `DKImageBaseExtension` and a specified `DKImageExtensionType` to customize `camera`, `photo gallery` and `photo editor`:
```swift
/// Registers an extension for the specified type.
public class func registerExtension(extensionClass: DKImageBaseExtension.Type, for type: DKImageExtensionType)
public class func unregisterExtension(for type: DKImageExtensionType)
```
The `perform` function will be called with a dictionary providing current context information when an extension is triggered:
```swift
/// Starts the extension.
func perform(with extraInfo: [AnyHashable: Any])
/// Completes the extension.
func finish()
```
The `extraInfo` will provide different information for different `DKImageExtensionType`:
##### Camera
```swift
let didFinishCapturingImage = extraInfo["didFinishCapturingImage"] as? ((UIImage, [AnyHashable : Any]?) -> Void)
let didCancel = extraInfo["didCancel"] as? (() -> Void)
```
For a custom camera example, see [CustomCameraExtension](https://github.com/zhangao0086/DKImagePickerController/tree/develop/Example/DKImagePickerControllerDemo/CustomCamera).
##### InlineCamera
The `extraInfo` is the same as for `Camera`.
##### Photo Gallery
```swift
let groupId = extraInfo["groupId"] as? String
let presentationIndex = extraInfo["presentationIndex"] as? Int
let presentingFromImageView = extraInfo["presentingFromImageView"] as? UIImageView
```
##### Photo Editor
```swift
let image = extraInfo["image"] as? UIImage
let didFinishEditing = extraInfo["didFinishEditing"] as? ((UIImage, [AnyHashable : Any]?) -> Void)
let metadata = extraInfo["metadata"] as? [AnyHashable : Any]
```
## How to use in Objective-C
#### If you use [CocoaPods](http://cocoapods.org/)
* Add the following two lines into your `Podfile`:
```
pod 'DKImagePickerController'
use_frameworks!
```
* Import the library into your Objective-C file:
```objective-c
#import <DKImagePickerController/DKImagePickerController-Swift.h>
```
#### If you use it directly in your project
> See also:[Swift and Objective-C in the Same Project](https://developer.apple.com/library/ios/documentation/Swift/Conceptual/BuildingCocoaApps/MixandMatch.html)
* Drag and drop the [DKCamera][DKCamera], `DKImageManager` and `DKImagePickerController` to your project
* Import the library into your Objective-C file:
```objective-c
#import "YourProductModuleName-Swift.h"
```
---
then you can:
```objective-c
DKImagePickerController *pickerController = [DKImagePickerController new];
[pickerController setDidSelectAssets:^(NSArray * __nonnull assets) {
NSLog(@"didSelectAssets");
}];
[self presentViewController:pickerController animated:YES completion:nil];
```
## Localization
The default supported languages:
> en, es, da, de, fr, hu, ja, ko, nb-NO, pt_BR, ru, tr, ur, vi, ar, it, zh-Hans, zh-Hant
You can also add a hook to return your own localized string:
```swift
DKImagePickerControllerResource.customLocalizationBlock = { title in
if title == "picker.select.title" {
return "Test(%@)"
} else {
return nil
}
}
```
or images:
```swift
DKImagePickerControllerResource.customImageBlock = { imageName in
if imageName == "camera" {
return DKImagePickerControllerResource.photoGalleryCheckedImage()
} else {
return nil
}
}
```
## Contributing to this project
If you have feature requests or bug reports, feel free to help out by sending pull requests or by creating new issues.
## License
DKImagePickerController is released under the MIT license. See LICENSE for details.
[mitLink]:http://opensource.org/licenses/MIT
[docsLink]:http://cocoadocs.org/docsets/DKImagePickerController
[DKCamera]:https://github.com/zhangao0086/DKCamera
| {
"pile_set_name": "Github"
} |
import React from 'react';
import TestRenderer, { act } from 'react-test-renderer';
import Wasm from '../src/Wasm';
import bytes from './bytes';
import bytesWithImport from './bytes-imports';
// mock fetch
const fetchMock = url => new Promise((resolve, reject) => {
let arrayBuffer = new ArrayBuffer([]);
if (url.match(/bytes\.wasm/)) {
arrayBuffer = bytes.buffer;
} else if (url.match(/bytes-imports\.wasm/)) {
arrayBuffer = bytesWithImport.buffer;
} else {
reject(new Error('404'));
return;
}
resolve({
arrayBuffer: () => arrayBuffer
});
});
const delay = (time = 3000) =>
new Promise(resolve =>
setTimeout(() => {
resolve();
}),
time
);
const importObject = {
imports: {
add_js: (a, b) => a + b,
}
};
describe('Wasm', () => {
beforeEach(() => {
global.fetch = fetchMock;
});
it('should set loading', () => {
global.fetch = (...params) => delay().then(() => fetchMock(...params));
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url="/bytes.wasm">
{props => {
result = props;
return JSON.stringify(props);
}}
</Wasm>
);
});
expect(result).toMatchObject({
loading: true,
error: null,
data: null
});
expect(testRenderer.toJSON()).toMatchSnapshot();
});
it('should set error if no url and bufferSource are provided', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm>
{props => {
result = props;
return JSON.stringify(props);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
data: null
});
expect(result.error).toBeInstanceOf(Error);
expect(result.error.message).toEqual(
'Can\'t instantiate WebAssembly module, invalid parameters.'
);
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should set error if invalid url is provided', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url="/error_404.wasm">
{props => {
result = props;
return JSON.stringify(props);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
data: null
});
expect(result.error).toBeInstanceOf(Error);
expect(result.error.message).toEqual('404');
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should set error if url is provided with wrong type', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url={5}>
{props => {
result = props;
return JSON.stringify(props);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
data: null
});
expect(result.error).toBeInstanceOf(Error);
expect(result.error.message).toEqual(
'Can\'t instantiate WebAssembly module, invalid parameters.'
);
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should set error if invalid ArrayBuffer is provided', done => {
const invalidBytes = new Uint8Array(bytes);
invalidBytes[0] = bytes[0] + 1;
expect(invalidBytes[0]).not.toEqual(bytes[0]);
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm bufferSource={invalidBytes}>
{props => {
result = props;
return JSON.stringify(props);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
data: null
});
expect(result.error).toBeInstanceOf(Error);
expect(result.error.message.match(
/Wasm decoding failed: expected magic word 00 61 73 6d, found 01 61 73 6d @\+0/
)).toBeTruthy();
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should set error if bufferSource is provided with wrong type', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm bufferSource={42}>
{props => {
result = props;
return JSON.stringify(props);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
data: null
});
expect(result.error).toBeInstanceOf(Error);
expect(result.error.message).toEqual(
'WebAssembly.compile(): Argument 0 must be a buffer source'
);
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should load module from ArrayBuffer', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm bufferSource={bytes}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should load module from url', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url="/bytes.wasm">
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should load module from ArrayBuffer with importObject', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm bufferSource={bytesWithImport} importObject={importObject}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add_js(1, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should load module from url with importObject', done => {
let result;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url="/bytes-imports.wasm" importObject={importObject}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add_js(1, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
done();
}, 1000);
});
it('should instantiate a module from a url using instantiateStreaming', done => {
const originalInstantiateStreaming = WebAssembly.instantiateStreaming;
WebAssembly.instantiateStreaming = (fetchCall, importObj) => fetchCall
.then(response => response.arrayBuffer())
.then(buff => WebAssembly.compile(buff))
.then(module =>
WebAssembly.instantiate(module, importObj).then(instance => ({
module,
instance
}))
);
const spy = jest.spyOn(WebAssembly, 'instantiateStreaming');
let result;
act(() => {
TestRenderer.create(
<Wasm url="/bytes.wasm">
{props => {
result = props;
return null;
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(spy).toHaveBeenCalled();
WebAssembly.instantiateStreaming = originalInstantiateStreaming;
spy.mockRestore();
done();
}, 1000);
});
it('should instantiate a module from a url using instantiate as fallback', done => {
const spy = jest.spyOn(WebAssembly, 'instantiate');
const originalInstantiateStreaming = WebAssembly.instantiateStreaming;
WebAssembly.instantiateStreaming = undefined;
let result;
act(() => {
TestRenderer.create(
<Wasm url="/bytes.wasm">
{props => {
result = props;
return null;
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(spy).toHaveBeenCalled();
WebAssembly.instantiateStreaming = originalInstantiateStreaming;
spy.mockRestore();
done();
}, 1000);
});
it('should reinstantiate module on url change', done => {
let result;
let spy;
let originalInstantiateStreaming;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url="/bytes.wasm">
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
spy = jest.spyOn(WebAssembly, 'instantiate');
originalInstantiateStreaming = WebAssembly.instantiateStreaming;
WebAssembly.instantiateStreaming = undefined;
act(() => {
testRenderer.update(
<Wasm url="/bytes-imports.wasm" importObject={importObject}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add_js(1, 2)}
</div>
);
}}
</Wasm>
);
});
expect(result).toMatchObject({
loading: true,
error: null,
data: null
});
expect(testRenderer.toJSON()).toMatchSnapshot();
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(result.data.instance.exports.add_js).toBeTruthy();
expect(result.data.instance.exports.div).toBeFalsy();
expect(testRenderer.toJSON()).toMatchSnapshot();
expect(spy).toHaveBeenCalled();
WebAssembly.instantiateStreaming = originalInstantiateStreaming;
spy.mockRestore();
done();
}, 2000);
}, 2000);
});
it('should reinstantiate module on bufferSource change', done => {
let result;
let spy;
let originalInstantiateStreaming;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm bufferSource={bytes}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
spy = jest.spyOn(WebAssembly, 'instantiate');
originalInstantiateStreaming = WebAssembly.instantiateStreaming;
WebAssembly.instantiateStreaming = undefined;
act(() => {
testRenderer.update(
<Wasm bufferSource={bytesWithImport} importObject={importObject}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add_js(1, 2)}
</div>
);
}}
</Wasm>
);
});
expect(result).toMatchObject({
loading: true,
error: null,
data: null
});
expect(testRenderer.toJSON()).toMatchSnapshot();
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(result.data.instance.exports.add_js).toBeTruthy();
expect(result.data.instance.exports.div).toBeFalsy();
expect(testRenderer.toJSON()).toMatchSnapshot();
expect(spy).toHaveBeenCalled();
WebAssembly.instantiateStreaming = originalInstantiateStreaming;
spy.mockRestore();
done();
}, 2000);
}, 2000);
});
it('should not reinstantiate module if url doesn\'t change', done => {
let result;
let spy;
let originalInstantiateStreaming;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url="/bytes.wasm">
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
spy = jest.spyOn(WebAssembly, 'instantiate');
originalInstantiateStreaming = WebAssembly.instantiateStreaming;
WebAssembly.instantiateStreaming = undefined;
act(() => {
testRenderer.update(
<Wasm url="/bytes.wasm">
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
expect(result).toMatchObject({
loading: false,
error: null
});
expect(testRenderer.toJSON()).toMatchSnapshot();
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(result.data.instance.exports.add).toBeTruthy();
expect(result.data.instance.exports.div).toBeTruthy();
expect(testRenderer.toJSON()).toMatchSnapshot();
expect(spy).not.toHaveBeenCalled();
WebAssembly.instantiateStreaming = originalInstantiateStreaming;
spy.mockRestore();
done();
}, 2000);
}, 2000);
});
it('should not reinstantiate module if bufferSource doesn\'t change', done => {
let result;
let spy;
let originalInstantiateStreaming;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm bufferSource={bytes}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
spy = jest.spyOn(WebAssembly, 'instantiate');
originalInstantiateStreaming = WebAssembly.instantiateStreaming;
WebAssembly.instantiateStreaming = undefined;
act(() => {
testRenderer.update(
<Wasm bufferSource={bytes}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
expect(result).toMatchObject({
loading: false,
error: null
});
expect(testRenderer.toJSON()).toMatchSnapshot();
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(result.data.instance.exports.add).toBeTruthy();
expect(result.data.instance.exports.div).toBeTruthy();
expect(testRenderer.toJSON()).toMatchSnapshot();
expect(spy).not.toHaveBeenCalled();
WebAssembly.instantiateStreaming = originalInstantiateStreaming;
spy.mockRestore();
done();
}, 2000);
}, 2000);
});
it('should not reinstantiate module if bufferSource changes but url is defined', done => {
let result;
let spy;
let originalInstantiateStreaming;
let testRenderer;
act(() => {
testRenderer = TestRenderer.create(
<Wasm url="/bytes.wasm">
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(testRenderer.toJSON()).toMatchSnapshot();
spy = jest.spyOn(WebAssembly, 'instantiate');
originalInstantiateStreaming = WebAssembly.instantiateStreaming;
WebAssembly.instantiateStreaming = undefined;
act(() => {
testRenderer.update(
<Wasm url="/bytes.wasm" bufferSource={bytesWithImport}>
{props => {
const {
loading,
error,
data
} = props;
result = props;
return !loading && !error && (
<div>
loading: {String(loading)}
error: {String(error)}
1 + 2 = {data.instance.exports.add(1, 2)}
20 / 2 = {data.instance.exports.div(20, 2)}
</div>
);
}}
</Wasm>
);
});
expect(result).toMatchObject({
loading: false,
error: null
});
expect(testRenderer.toJSON()).toMatchSnapshot();
setTimeout(() => {
expect(result).toMatchObject({
loading: false,
error: null
});
expect(result.data.module).toBeInstanceOf(WebAssembly.Module);
expect(result.data.instance).toBeInstanceOf(WebAssembly.Instance);
expect(result.data.instance.exports.add).toBeTruthy();
expect(result.data.instance.exports.div).toBeTruthy();
expect(testRenderer.toJSON()).toMatchSnapshot();
expect(spy).not.toHaveBeenCalled();
WebAssembly.instantiateStreaming = originalInstantiateStreaming;
spy.mockRestore();
done();
}, 2000);
}, 2000);
});
});
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<title>Rgaa30 Test.11-2-2 Passed 01</title>
</head>
<body>
<div>
<h1>Rgaa30 Test.11-2-2 Passed 01</h1>
<div class="test-detail" lang="fr"> Chaque attribut
<code>title</code> permet-il de connaître la fonction exacte du
<a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mChpSaisie">champ de formulaire</a> auquel il est associé ?
</div>
<div class="testcase">
</div>
<div class="test-explanation">
Passed.
</div>
</div>
</body>
</html> | {
"pile_set_name": "Github"
} |
{
"dependencies": {
"Microsoft.NETCore.App": {
"version": "1.0.0",
"type": "platform"
},
"Microsoft.AspNetCore.Server.IISIntegration": "1.0.0",
"Microsoft.AspNetCore.Server.Kestrel": "1.0.0",
"Microsoft.AspnetCore.Owin": "1.0.0",
"Nancy": "2.0.0-barneyrubble"
},
"tools": {
"Microsoft.AspNetCore.Server.IISIntegration.Tools": "1.0.0-preview2-final"
},
"frameworks": {
"netcoreapp1.0": {
"imports": [
"dotnet5.6",
"portable-net45+win8"
]
}
},
"buildOptions": {
"emitEntryPoint": true,
"preserveCompilationContext": true
},
"runtimeOptions": {
"gcServer": true
},
"publishOptions": {
"include": [
"wwwroot",
"web.config"
]
},
"scripts": {
"postpublish": [ "dotnet publish-iis --publish-folder %publish:OutputPath% --framework %publish:FullTargetFramework%" ]
},
"tooling": {
"defaultNamespace": "ProductCatalog"
}
}
| {
"pile_set_name": "Github"
} |
/*
The MIT License (MIT)
Copyright (c) 2014 Dirk Groenen
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
*/
(function($){
$.fn.viewportChecker = function(useroptions){
// Define options and extend with user
var options = {
classToAdd: 'visible',
classToRemove : 'invisible',
classToAddForFullView : 'full-visible',
removeClassAfterAnimation: false,
offset: 100,
repeat: false,
invertBottomOffset: true,
callbackFunction: function(elem, action){},
scrollHorizontal: false,
scrollBox: window
};
$.extend(options, useroptions);
// Cache the given element and height of the browser
var $elem = this,
boxSize = {height: $(options.scrollBox).height(), width: $(options.scrollBox).width()},
scrollElem = ((navigator.userAgent.toLowerCase().indexOf('webkit') != -1 || navigator.userAgent.toLowerCase().indexOf('windows phone') != -1) ? 'body' : 'html');
/*
* Main method that checks the elements and adds or removes the class(es)
*/
this.checkElements = function(){
var viewportStart, viewportEnd;
// Set some vars to check with
if (!options.scrollHorizontal){
viewportStart = $(scrollElem).scrollTop();
viewportEnd = (viewportStart + boxSize.height);
}
else{
viewportStart = $(scrollElem).scrollLeft();
viewportEnd = (viewportStart + boxSize.width);
}
// Loop through all given dom elements
$elem.each(function(){
var $obj = $(this),
objOptions = {},
attrOptions = {};
// Get any individual attribution data
if ($obj.data('vp-add-class'))
attrOptions.classToAdd = $obj.data('vp-add-class');
if ($obj.data('vp-remove-class'))
attrOptions.classToRemove = $obj.data('vp-remove-class');
if ($obj.data('vp-add-class-full-view'))
attrOptions.classToAddForFullView = $obj.data('vp-add-class-full-view');
if ($obj.data('vp-keep-add-class'))
attrOptions.removeClassAfterAnimation = $obj.data('vp-remove-after-animation');
if ($obj.data('vp-offset'))
attrOptions.offset = $obj.data('vp-offset');
if ($obj.data('vp-repeat'))
attrOptions.repeat = $obj.data('vp-repeat');
if ($obj.data('vp-scrollHorizontal'))
attrOptions.scrollHorizontal = $obj.data('vp-scrollHorizontal');
if ($obj.data('vp-invertBottomOffset'))
attrOptions.scrollHorizontal = $obj.data('vp-invertBottomOffset');
// Extend objOptions with data attributes and default options
$.extend(objOptions, options);
$.extend(objOptions, attrOptions);
// If class already exists; quit
if ($obj.data('vp-animated') && !objOptions.repeat){
return;
}
// Check if the offset is percentage based
if (String(objOptions.offset).indexOf("%") > 0)
objOptions.offset = (parseInt(objOptions.offset) / 100) * boxSize.height;
// Get the raw start and end positions
var rawStart = (!objOptions.scrollHorizontal) ? $obj.offset().top : $obj.offset().left,
rawEnd = (!objOptions.scrollHorizontal) ? rawStart + $obj.height() : rawStart + $obj.width();
// Add the defined offset
var elemStart = Math.round( rawStart ) + objOptions.offset,
elemEnd = (!objOptions.scrollHorizontal) ? elemStart + $obj.height() : elemStart + $obj.width();
if (objOptions.invertBottomOffset)
elemEnd -= (objOptions.offset * 2);
// Add class if in viewport
if ((elemStart < viewportEnd) && (elemEnd > viewportStart)){
// Remove class
$obj.removeClass(objOptions.classToRemove);
$obj.addClass(objOptions.classToAdd);
// Do the callback function. Callback wil send the jQuery object as parameter
objOptions.callbackFunction($obj, "add");
// Check if full element is in view
if (rawEnd <= viewportEnd && rawStart >= viewportStart)
$obj.addClass(objOptions.classToAddForFullView);
else
$obj.removeClass(objOptions.classToAddForFullView);
// Set element as already animated
$obj.data('vp-animated', true);
if (objOptions.removeClassAfterAnimation) {
$obj.one('webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend', function(){
$obj.removeClass(objOptions.classToAdd);
});
}
// Remove class if not in viewport and repeat is true
} else if ($obj.hasClass(objOptions.classToAdd) && (objOptions.repeat)){
$obj.removeClass(objOptions.classToAdd + " " + objOptions.classToAddForFullView);
// Do the callback function.
objOptions.callbackFunction($obj, "remove");
// Remove already-animated-flag
$obj.data('vp-animated', false);
}
});
};
/**
* Binding the correct event listener is still a tricky thing.
* People have expierenced sloppy scrolling when both scroll and touch
* events are added, but to make sure devices with both scroll and touch
* are handles too we always have to add the window.scroll event
*
* @see https://github.com/dirkgroenen/jQuery-viewport-checker/issues/25
* @see https://github.com/dirkgroenen/jQuery-viewport-checker/issues/27
*/
// Select the correct events
if( 'ontouchstart' in window || 'onmsgesturechange' in window ){
// Device with touchscreen
$(document).bind("touchmove MSPointerMove pointermove", this.checkElements);
}
// Always load on window load
$(options.scrollBox).bind("load scroll", this.checkElements);
// On resize change the height var
$(window).resize(function(e){
boxSize = {height: $(options.scrollBox).height(), width: $(options.scrollBox).width()};
$elem.checkElements();
});
// trigger inital check if elements already visible
this.checkElements();
// Default jquery plugin behaviour
return this;
};
})(jQuery);
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<title>LCD Library: Member List</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css"/>
</head>
<body onload='searchBox.OnSelectItem(0);'>
<!-- Generated by Doxygen 1.7.4 -->
<script type="text/javascript"><!--
var searchBox = new SearchBox("searchBox", "search",false,'Search');
--></script>
<div id="top">
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectlogo"><img alt="Logo" src="logoGoogle.jpg"/></td>
<td style="padding-left: 0.5em;">
<div id="projectname">LCD Library <span id="projectnumber">1.2.1</span></div>
<div id="projectbrief">LCD Library - LCD control class hierarchy library. Drop in replacement for the LiquidCrystal Library.</div>
</td>
</tr>
</tbody>
</table>
</div>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li class="current"><a href="annotated.html"><span>Classes</span></a></li>
<li><a href="files.html"><span>Files</span></a></li>
<li id="searchli">
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="annotated.html"><span>Class List</span></a></li>
<li><a href="classes.html"><span>Class Index</span></a></li>
<li><a href="hierarchy.html"><span>Class Hierarchy</span></a></li>
<li><a href="functions.html"><span>Class Members</span></a></li>
</ul>
</div>
</div>
<div class="header">
<div class="headertitle">
<div class="title">LiquidCrystal_SR2W Member List</div> </div>
</div>
<div class="contents">
This is the complete list of members for <a class="el" href="class_liquid_crystal___s_r2_w.html">LiquidCrystal_SR2W</a>, including all inherited members.<table>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a88b16ea0e5c7d1cabc5007d48bcbd2b0">_cols</a></td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [protected]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#ae47a0e2eff74431a39774b788d5761f4">_displaycontrol</a></td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [protected]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#aef093ba3f8e1016267b40ac235a0fa0f">_displayfunction</a></td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [protected]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a726b9a68d091dd8683a18e83f3a8fd3c">_displaymode</a></td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [protected]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#ac1374911fb145fea430c21092ada0c06">_numlines</a></td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [protected]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a990338759d2abe10b0fb1743b7789566">_polarity</a></td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [protected]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#abb3ed88d530f6283e6159b4973e7da9e">autoscroll</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#aba8867fe2210cbfa8db869208709be10">backlight</a>(void)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a3f587d1cbb2d59765ef60a5216b56fea">begin</a>(uint8_t cols, uint8_t rows, uint8_t charsize=LCD_5x8DOTS)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [virtual]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a878b36878fa8287093964eba83aace77">blink</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#afa699e0beeeee03cce8cef87eba81c4a">clear</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a91cba8f93c692abcddf8bc3de58d2d3a">createChar</a>(uint8_t location, uint8_t charmap[])</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a194814f64dfa50a90e07e0fe0d361620">cursor</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a5b07cf05e8e5e7c53654f5ca0cf58b89">display</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#aee45ad37f09312f5d9982257e2d37e68">home</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a00bb2db1390721abc7b24ac4b8c276c8">LCD</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a238e9f6476dc7df64af04eb6c87f6ac7">leftToRight</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_liquid_crystal___s_r2_w.html#af307fdf5c8feb757e965074dcdeb1dd3">LiquidCrystal_SR2W</a>(uint8_t srdata, uint8_t srclock, t_backlighPol blpol=POSITIVE)</td><td><a class="el" href="class_liquid_crystal___s_r2_w.html">LiquidCrystal_SR2W</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#aad2abc99d1aca5403873579d9d72c2d4">moveCursorLeft</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a09eec0c712e54b066f5894635c1fe75c">moveCursorRight</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a96035dde40efbf73390e00b5beb00231">noAutoscroll</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a2a331b4e142734411b2f1cfaffe7a488">noBacklight</a>(void)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a3b755c4b397b5985752be8c30ee1a9b5">noBlink</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#aec8ffaa1e69c7a6e13ac0cfbc29151d9">noCursor</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#af3974da6d988ba2d21c25135ada12108">noDisplay</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a191639be183be1476c9bfe6d455d23b2">off</a>(void)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a718da3a638deb59bd1c7a5222a52d98a">on</a>(void)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#ac014830eadc26bfd86308ea8734f4428">rightToLeft</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a6f3a503055b3b8dcf0f61b2633c584f7">scrollDisplayLeft</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#abfc44b294772f09020bfa32af8a79571">scrollDisplayRight</a>()</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_liquid_crystal___s_r2_w.html#a65dc6f261c319be8e56f3c1f6a5c877d">send</a>(uint8_t value, uint8_t mode)</td><td><a class="el" href="class_liquid_crystal___s_r2_w.html">LiquidCrystal_SR2W</a></td><td><code> [virtual]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_liquid_crystal___s_r2_w.html#a2158db27287c1564a03e7a1472beb3b6">setBacklight</a>(uint8_t mode)</td><td><a class="el" href="class_liquid_crystal___s_r2_w.html">LiquidCrystal_SR2W</a></td><td><code> [virtual]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a53f4ee9b39d9ab3d7ae4d9f8dedca3bc">setBacklightPin</a>(uint8_t value, t_backlighPol pol)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [inline, virtual]</code></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a48220450fd152b25994eb7d0ba340e8d">setCursor</a>(uint8_t col, uint8_t row)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td></td></tr>
<tr class="memlist"><td><a class="el" href="class_l_c_d.html#a2d89cc2e62f72afb5f15a7fd812900e3">write</a>(uint8_t value)</td><td><a class="el" href="class_l_c_d.html">LCD</a></td><td><code> [virtual]</code></td></tr>
</table></div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
<a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(0)"><span class="SelectionMark"> </span>All</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(1)"><span class="SelectionMark"> </span>Classes</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(2)"><span class="SelectionMark"> </span>Files</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(3)"><span class="SelectionMark"> </span>Functions</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(4)"><span class="SelectionMark"> </span>Variables</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(5)"><span class="SelectionMark"> </span>Typedefs</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(6)"><span class="SelectionMark"> </span>Enumerations</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(7)"><span class="SelectionMark"> </span>Enumerator</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(8)"><span class="SelectionMark"> </span>Defines</a></div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<hr class="footer"/><address class="footer"><small>Generated on Thu Apr 5 2012 18:17:46 for LCD Library by 
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.7.4 </small></address>
</body>
</html>
| {
"pile_set_name": "Github"
} |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the files COPYING and Copyright.html. COPYING can be found at the root *
* of the source code distribution tree; Copyright.html can be found at the *
* root level of an installed copy of the electronic HDF5 document set and *
* is linked from the top-level documents page. It can also be found at *
* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
* access to either file, you may request a copy from [email protected]. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/*****************************************************************************
FILE
util.cpp - Utility routines for HDF5 C++ tests.
EXTERNAL ROUTINES/VARIABLES:
***************************************************************************/
#ifdef OLD_HEADER_FILENAME
#include <iostream.h>
#else
#include <iostream>
#endif
#include <string>
#ifndef H5_NO_NAMESPACE
#ifndef H5_NO_STD
using std::cerr;
using std::endl;
#endif // H5_NO_STD
#endif
#include "h5test.h"
#include "H5Cpp.h"
#ifndef H5_NO_NAMESPACE
using namespace H5;
#endif
#include "h5cpputil.h"
/*-------------------------------------------------------------------------
* Function: test_report
*
* Purpose: Prints out the number of errors for the tests indicated
* by 'testname,' if there were any failures occurred. If
* no failure, test_report prints out the tests passed message.
*
* Return: if any failure has occurred: 1
*
* if no failure occurs: 0
*
* Programmer: Binh-Minh Ribler (using C code segment for reporting tests)
* Friday, February 6, 2001
*
* Modifications:
*
*-------------------------------------------------------------------------
*/
int test_report( int nerrors, const H5std_string& testname )
{
if (nerrors)
{
nerrors = MAX(1, nerrors);
if (1 == nerrors)
cerr << "***** " << nerrors << testname
<< " TEST FAILED! *****" << endl;
else
cerr << "***** " << nerrors << testname
<< " TESTS FAILED! *****" << endl;
return 1;
}
else
{
cerr << "All" << testname << " tests passed." << endl;
return 0;
}
}
/*-------------------------------------------------------------------------
* Function: issue_fail_msg
*
* Purpose: Displays that a function has failed with its location.
*
* Return: None
*
* Programmer: Binh-Minh Ribler (copied and modified macro CHECK from C)
* Monday, December 20, 2004
*
*-------------------------------------------------------------------------
*/
void issue_fail_msg(const char* where, int line, const char* file_name,
const char* message)
{
//if (GetTestVerbosity()>=VERBO_HI)
{
cerr << ">>> FAILED in " << where << " at line " << line
<< " in " << file_name << " - " << message << endl << endl;
}
}
/*-------------------------------------------------------------------------
* Function: check_values
*
* Purpose: Checks a read value against the written value. If they are
* different, the function will print out a message and the
* different values. This function is made to reuse the code
* segment that is used in various places throughout
* the test code. Where the C version of this code segment
* "goto error," this function will return -1.
*
* Return: Success: 0
*
* Failure: -1
*
* Programmer: Binh-Minh Ribler (using C code segment for checking values)
* Friday, February 6, 2001
*
* Modifications:
*
*-------------------------------------------------------------------------
*/
int check_values (hsize_t i, hsize_t j, int apoint, int acheck)
{
if (apoint != acheck)
{
cerr << " Read different values than written.\n" << endl;
cerr << " At index " << (unsigned long)i << "," <<
(unsigned long)j << endl;
return -1;
}
return 0;
} // check_values
/*-------------------------------------------------------------------------
* Function: verify_val (const char*, const char*,...)
*
* Purpose: Compares two character strings. If they are
* different, the function will print out a message and the
* different values.
*
* Return: Success: 0
*
* Failure: -1
*
* Programmer: Binh-Minh Ribler
* May 2, 2010
*
* Modifications:
*
*-------------------------------------------------------------------------
*/
void verify_val(const char* x, const char* value, const char* where, int line, const char* file_name)
{
if (GetTestVerbosity()>=VERBO_HI)
{
cerr << endl;
cerr << " Call to routine: " << where << " at line " << line
<< " in " << file_name << " had value " << x << endl;
}
if (strcmp(x, value) != 0)
{
cerr << endl;
cerr << "*** UNEXPECTED VALUE from " << where << " should be "
<< value << ", but is " << x << " at line " << line
<< " in " << file_name << endl;
IncTestNumErrs();
throw TestFailedException(where, "");
}
}
//--------------------------------------------------------------------------
// Function: InvalidActionException default constructor
//--------------------------------------------------------------------------
InvalidActionException::InvalidActionException():Exception(){}
//--------------------------------------------------------------------------
// Function: InvalidActionException overloaded constructor
//
// Purpose: Creates an InvalidActionException with the name of the function,
// which the failure should have occurred but didn't, and a
// message explaining why it should fail.
// Parameters
// func_name - IN: Name of the function where failure should occur
// message - IN: Message
//--------------------------------------------------------------------------
InvalidActionException::InvalidActionException(const H5std_string func_name, const H5std_string message) : Exception(func_name, message) {}
//--------------------------------------------------------------------------
// Function: InvalidActionException destructor
//--------------------------------------------------------------------------
InvalidActionException::~InvalidActionException() {}
//--------------------------------------------------------------------------
// Function: TestFailedException default constructor
//--------------------------------------------------------------------------
TestFailedException::TestFailedException():Exception(){}
//--------------------------------------------------------------------------
// Function: TestFailedException overloaded constructor
//
// Purpose: Creates an TestFailedException with the name of the function,
// which the failure should have occurred but didn't, and a
// message explaining why it should fail.
// Parameters
// func_name - IN: Name of the function where failure should occur
// message - IN: Message
//--------------------------------------------------------------------------
TestFailedException::TestFailedException(const H5std_string func_name, const H5std_string message) : Exception(func_name, message) {}
//--------------------------------------------------------------------------
// Function: TestFailedException destructor
//--------------------------------------------------------------------------
TestFailedException::~TestFailedException() {}
| {
"pile_set_name": "Github"
} |
package file
import (
"bufio"
"io/ioutil"
"os"
)
// ReadLines reads the content of the given file into a string slice
func ReadLines(fileName string) ([]string, error) {
file, err := os.Open(fileName)
if err != nil {
return nil, err
}
defer file.Close()
scanner := bufio.NewScanner(file)
var lines []string
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
return lines, scanner.Err()
}
// LoadData reads the specified file and returns it as a bytes slice.
func LoadData(file string) ([]byte, error) {
if len(file) == 0 {
return []byte{}, nil
}
bytes, err := ioutil.ReadFile(file)
if err != nil {
return []byte{}, err
}
return bytes, nil
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.13"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>Bounce2: bounce.ino</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">Bounce2
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.13 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
</div><!-- top -->
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="headertitle">
<div class="title">bounce.ino</div> </div>
</div><!--header-->
<div class="contents">
<dl class="todo"><dt><b><a class="el" href="todo.html#_todo000001">Todo:</a></b></dt><dd>Make Bounce2 more abstract. Split it from the hardware layer. Remove deboucing code from Bounce2 and make a new Debounce class from that code. Bounce2 should extend Debounce. </dd></dl>
<p>Simple example of the <a class="el" href="class_bounce.html" title="The Debouncer:Bounce class. Links the Deboucing class to a hardware pin. ">Bounce</a> library that switches the debug LED when a button is pressed.</p>
<div class="fragment"><div class="line"></div><div class="line">/* </div><div class="line"> DESCRIPTION</div><div class="line"> ====================</div><div class="line"> Simple example of the Bounce library that switches the debug LED when a button is pressed.</div><div class="line"> */</div><div class="line">// Include the Bounce2 library found here :</div><div class="line">// https://github.com/thomasfredericks/Bounce2</div><div class="line">#include <Bounce2.h></div><div class="line"></div><div class="line">#define BUTTON_PIN 2</div><div class="line">#define LED_PIN 13</div><div class="line"></div><div class="line">// Instantiate a Bounce object</div><div class="line">Bounce debouncer = Bounce(); </div><div class="line"></div><div class="line">void setup() {</div><div class="line"></div><div class="line"> // Setup the button with an internal pull-up :</div><div class="line"> pinMode(BUTTON_PIN,INPUT_PULLUP);</div><div class="line"></div><div class="line"> // After setting up the button, setup the Bounce instance :</div><div class="line"> debouncer.attach(BUTTON_PIN);</div><div class="line"> debouncer.interval(5); // interval in ms</div><div class="line"></div><div class="line"> //Setup the LED :</div><div class="line"> pinMode(LED_PIN,OUTPUT);</div><div class="line"></div><div class="line">}</div><div class="line"></div><div class="line">void loop() {</div><div class="line"> // Update the Bounce instance :</div><div class="line"> debouncer.update();</div><div class="line"></div><div class="line"> // Get the updated value :</div><div class="line"> int value = debouncer.read();</div><div class="line"></div><div class="line"> // Turn on or off the LED as determined by the state :</div><div class="line"> if ( value == LOW ) {</div><div class="line"> digitalWrite(LED_PIN, HIGH );</div><div class="line"> } </div><div class="line"> else {</div><div class="line"> digitalWrite(LED_PIN, LOW );</div><div class="line"> }</div><div class="line"></div><div class="line">}</div><div class="line"></div><div class="line"></div></div><!-- fragment --> </div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.13
</small></address>
</body>
</html>
| {
"pile_set_name": "Github"
} |
EXTRA_DIST = smooth_opengl3.c smooth_opengl3.dsp
noinst_PROGRAMS = smooth_opengl3
smooth_opengl3_SOURCES = smooth_opengl3.c
smooth_opengl3_LDFLAGS = -export-dynamic
smooth_opengl3_LDADD = ../../../src/lib@[email protected] $(GL_LIBS)
smooth_opengl3_CFLAGS = -I$(top_srcdir)/include $(X_CFLAGS)
| {
"pile_set_name": "Github"
} |
# EnumTest
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**enumString** | [**EnumStringEnum**](#EnumStringEnum) | | [optional]
**enumStringRequired** | [**EnumStringRequiredEnum**](#EnumStringRequiredEnum) | |
**enumInteger** | [**EnumIntegerEnum**](#EnumIntegerEnum) | | [optional]
**enumNumber** | [**EnumNumberEnum**](#EnumNumberEnum) | | [optional]
**outerEnum** | [**OuterEnum**](OuterEnum.md) | | [optional]
<a name="EnumStringEnum"></a>
## Enum: EnumStringEnum
Name | Value
---- | -----
UPPER | "UPPER"
LOWER | "lower"
EMPTY | ""
<a name="EnumStringRequiredEnum"></a>
## Enum: EnumStringRequiredEnum
Name | Value
---- | -----
UPPER | "UPPER"
LOWER | "lower"
EMPTY | ""
<a name="EnumIntegerEnum"></a>
## Enum: EnumIntegerEnum
Name | Value
---- | -----
NUMBER_1 | 1
NUMBER_MINUS_1 | -1
<a name="EnumNumberEnum"></a>
## Enum: EnumNumberEnum
Name | Value
---- | -----
NUMBER_1_DOT_1 | 1.1
NUMBER_MINUS_1_DOT_2 | -1.2
| {
"pile_set_name": "Github"
} |
' Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
Imports Microsoft.CodeAnalysis.Text
Imports Microsoft.CodeAnalysis.VisualBasic.Symbols
Imports Microsoft.CodeAnalysis.VisualBasic.Syntax
Namespace Microsoft.CodeAnalysis.VisualBasic.CodeGen
Partial Friend Class StackScheduler
Private Class DummyLocal
Inherits SynthesizedLocal
Public Sub New(container As Symbol)
MyBase.New(container, Nothing, SynthesizedLocalKind.OptimizerTemp)
End Sub
Friend Overrides Function ComputeType(Optional containingBinder As Binder = Nothing) As TypeSymbol
Throw ExceptionUtilities.Unreachable
End Function
End Class
End Class
End Namespace
| {
"pile_set_name": "Github"
} |
'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"\u0628.\u0646",
"\u062f.\u0646"
],
"DAY": [
"\u06cc\u06d5\u06a9\u0634\u06d5\u0645\u0645\u06d5",
"\u062f\u0648\u0648\u0634\u06d5\u0645\u0645\u06d5",
"\u0633\u06ce\u0634\u06d5\u0645\u0645\u06d5",
"\u0686\u0648\u0627\u0631\u0634\u06d5\u0645\u0645\u06d5",
"\u067e\u06ce\u0646\u062c\u0634\u06d5\u0645\u0645\u06d5",
"\u06be\u06d5\u06cc\u0646\u06cc",
"\u0634\u06d5\u0645\u0645\u06d5"
],
"ERANAMES": [
"\u067e\u06ce\u0634 \u0632\u0627\u06cc\u06cc\u0646",
"\u0632\u0627\u06cc\u06cc\u0646\u06cc"
],
"ERAS": [
"\u067e\u06ce\u0634 \u0632\u0627\u06cc\u06cc\u0646",
"\u0632\u0627\u06cc\u06cc\u0646\u06cc"
],
"FIRSTDAYOFWEEK": 5,
"MONTH": [
"\u06a9\u0627\u0646\u0648\u0648\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u0634\u0648\u0628\u0627\u062a",
"\u0626\u0627\u0632\u0627\u0631",
"\u0646\u06cc\u0633\u0627\u0646",
"\u0626\u0627\u06cc\u0627\u0631",
"\u062d\u0648\u0632\u06d5\u06cc\u0631\u0627\u0646",
"\u062a\u06d5\u0645\u0648\u0648\u0632",
"\u0626\u0627\u0628",
"\u0626\u06d5\u06cc\u0644\u0648\u0648\u0644",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u06a9\u0627\u0646\u0648\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645"
],
"SHORTDAY": [
"\u06cc\u06d5\u06a9\u0634\u06d5\u0645\u0645\u06d5",
"\u062f\u0648\u0648\u0634\u06d5\u0645\u0645\u06d5",
"\u0633\u06ce\u0634\u06d5\u0645\u0645\u06d5",
"\u0686\u0648\u0627\u0631\u0634\u06d5\u0645\u0645\u06d5",
"\u067e\u06ce\u0646\u062c\u0634\u06d5\u0645\u0645\u06d5",
"\u06be\u06d5\u06cc\u0646\u06cc",
"\u0634\u06d5\u0645\u0645\u06d5"
],
"SHORTMONTH": [
"\u06a9\u0627\u0646\u0648\u0648\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u0634\u0648\u0628\u0627\u062a",
"\u0626\u0627\u0632\u0627\u0631",
"\u0646\u06cc\u0633\u0627\u0646",
"\u0626\u0627\u06cc\u0627\u0631",
"\u062d\u0648\u0632\u06d5\u06cc\u0631\u0627\u0646",
"\u062a\u06d5\u0645\u0648\u0648\u0632",
"\u0626\u0627\u0628",
"\u0626\u06d5\u06cc\u0644\u0648\u0648\u0644",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u06a9\u0627\u0646\u0648\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645"
],
"STANDALONEMONTH": [
"\u06a9\u0627\u0646\u0648\u0648\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u0634\u0648\u0628\u0627\u062a",
"\u0626\u0627\u0632\u0627\u0631",
"\u0646\u06cc\u0633\u0627\u0646",
"\u0626\u0627\u06cc\u0627\u0631",
"\u062d\u0648\u0632\u06d5\u06cc\u0631\u0627\u0646",
"\u062a\u06d5\u0645\u0648\u0648\u0632",
"\u0626\u0627\u0628",
"\u0626\u06d5\u06cc\u0644\u0648\u0648\u0644",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u06a9\u0627\u0646\u0648\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645"
],
"WEEKENDRANGE": [
4,
4
],
"fullDate": "y MMMM d, EEEE",
"longDate": "d\u06cc MMMM\u06cc y",
"medium": "y MMM d HH:mm:ss",
"mediumDate": "y MMM d",
"mediumTime": "HH:mm:ss",
"short": "y-MM-dd HH:mm",
"shortDate": "y-MM-dd",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "Rial",
"DECIMAL_SEP": "\u066b",
"GROUP_SEP": "\u066c",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 0,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "ckb-ir",
"localeID": "ckb_IR",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]);
| {
"pile_set_name": "Github"
} |
/*
* FXGL - JavaFX Game Library. The MIT License (MIT).
* Copyright (c) AlmasB ([email protected]).
* See LICENSE for details.
*/
package com.almasb.fxgl.physics.box2d.callbacks;
import com.almasb.fxgl.physics.box2d.common.JBoxSettings;
/**
* Contact impulses for reporting. Impulses are used instead of forces because sub-step forces may
* approach infinity for rigid body collisions. These match up one-to-one with the contact points in
* b2Manifold.
*
* @author Daniel Murphy
*/
public class ContactImpulse {
public float[] normalImpulses = new float[JBoxSettings.maxManifoldPoints];
public float[] tangentImpulses = new float[JBoxSettings.maxManifoldPoints];
public int count;
}
| {
"pile_set_name": "Github"
} |
Filter 1: ON PK Fc 948 Hz Gain -2.9 dB Q 1.27
Filter 2: ON PK Fc 1994 Hz Gain 1.7 dB Q 2.98
Filter 3: ON PK Fc 3145 Hz Gain 3.2 dB Q 3.63
Filter 4: ON PK Fc 19706 Hz Gain -6.9 dB Q 0.17
Filter 5: ON PK Fc 14 Hz Gain 5.9 dB Q 0.45
Filter 6: ON PK Fc 67 Hz Gain 1.6 dB Q 0.26
Filter 7: ON PK Fc 4671 Hz Gain 3.8 dB Q 3.67
Filter 8: ON PK Fc 5813 Hz Gain -3.5 dB Q 1.88
Filter 9: ON PK Fc 9084 Hz Gain 4.2 dB Q 5.03 | {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 18ec59ff8011343668b099a9ba40aa96
TextureImporter:
internalIDToNameTable: []
externalObjects: {}
serializedVersion: 10
mipmaps:
mipMapMode: 0
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 1
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Android
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
internalID: 0
vertices: []
indices:
edges: []
weights: []
secondaryTextures: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
%% @copyright 2007 Mochi Media, Inc.
%% @author Matthew Dempsky <[email protected]>
%%
%% @doc Erlang module for automatically reloading modified modules
%% during development.
-module(reloader).
-author("Matthew Dempsky <[email protected]>").
-include_lib("kernel/include/file.hrl").
-behaviour(gen_server).
-export([start/0, start_link/0]).
-export([stop/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([all_changed/0]).
-export([is_changed/1]).
-export([reload_modules/1]).
-record(state, {last, tref}).
%% External API
%% @spec start() -> ServerRet
%% @doc Start the reloader.
start() ->
gen_server:start({local, ?MODULE}, ?MODULE, [], []).
%% @spec start_link() -> ServerRet
%% @doc Start the reloader.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
%% @spec stop() -> ok
%% @doc Stop the reloader.
stop() ->
gen_server:call(?MODULE, stop).
%% gen_server callbacks
%% @spec init([]) -> {ok, State}
%% @doc gen_server init, opens the server in an initial state.
init([]) ->
{ok, TRef} = timer:send_interval(timer:seconds(1), doit),
{ok, #state{last = stamp(), tref = TRef}}.
%% @spec handle_call(Args, From, State) -> tuple()
%% @doc gen_server callback.
handle_call(stop, _From, State) ->
{stop, shutdown, stopped, State};
handle_call(_Req, _From, State) ->
{reply, {error, badrequest}, State}.
%% @spec handle_cast(Cast, State) -> tuple()
%% @doc gen_server callback.
handle_cast(_Req, State) ->
{noreply, State}.
%% @spec handle_info(Info, State) -> tuple()
%% @doc gen_server callback.
handle_info(doit, State) ->
Now = stamp(),
_ = doit(State#state.last, Now),
{noreply, State#state{last = Now}};
handle_info(_Info, State) ->
{noreply, State}.
%% @spec terminate(Reason, State) -> ok
%% @doc gen_server termination callback.
terminate(_Reason, State) ->
{ok, cancel} = timer:cancel(State#state.tref),
ok.
%% @spec code_change(_OldVsn, State, _Extra) -> State
%% @doc gen_server code_change callback (trivial).
code_change(_Vsn, State, _Extra) ->
{ok, State}.
%% @spec reload_modules([atom()]) -> [{module, atom()} | {error, term()}]
%% @doc code:purge/1 and code:load_file/1 the given list of modules in order,
%% return the results of code:load_file/1.
reload_modules(Modules) ->
[begin code:purge(M), code:load_file(M) end || M <- Modules].
%% @spec all_changed() -> [atom()]
%% @doc Return a list of beam modules that have changed.
all_changed() ->
[M || {M, Fn} <- code:all_loaded(), is_list(Fn), is_changed(M)].
%% @spec is_changed(atom()) -> boolean()
%% @doc true if the loaded module is a beam with a vsn attribute
%% and does not match the on-disk beam file, returns false otherwise.
is_changed(M) ->
try
module_vsn(M:module_info()) =/= module_vsn(code:get_object_code(M))
catch _:_ ->
false
end.
%% Internal API
module_vsn({M, Beam, _Fn}) ->
{ok, {M, Vsn}} = beam_lib:version(Beam),
Vsn;
module_vsn(L) when is_list(L) ->
{_, Attrs} = lists:keyfind(attributes, 1, L),
{_, Vsn} = lists:keyfind(vsn, 1, Attrs),
Vsn.
doit(From, To) ->
[case file:read_file_info(Filename) of
{ok, #file_info{mtime = Mtime}} when Mtime >= From, Mtime < To ->
reload(Module);
{ok, _} ->
unmodified;
{error, enoent} ->
%% The Erlang compiler deletes existing .beam files if
%% recompiling fails. Maybe it's worth spitting out a
%% warning here, but I'd want to limit it to just once.
gone;
{error, Reason} ->
io:format("Error reading ~s's file info: ~p~n",
[Filename, Reason]),
error
end || {Module, Filename} <- code:all_loaded(), is_list(Filename)].
reload(Module) ->
io:format("Reloading ~p ...", [Module]),
code:purge(Module),
case code:load_file(Module) of
{module, Module} ->
io:format(" ok.~n"),
leptus:upgrade(), %% upgrade cowboy dispatch
case erlang:function_exported(Module, test, 0) of
true ->
io:format(" - Calling ~p:test() ...", [Module]),
case catch Module:test() of
ok ->
io:format(" ok.~n"),
reload;
Reason ->
io:format(" fail: ~p.~n", [Reason]),
reload_but_test_failed
end;
false ->
reload
end;
{error, Reason} ->
io:format(" fail: ~p.~n", [Reason]),
error
end.
stamp() ->
erlang:localtime().
%%
%% Tests
%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
| {
"pile_set_name": "Github"
} |
-
id: 1
name: golang
repo_count: 2
-
id: 2
name: database
repo_count: 1
- id: 3
name: SQL
repo_count: 1
- id: 4
name: graphql
repo_count: 1
- id: 5
name: topicname1
repo_count: 1
- id: 6
name: topicname2
repo_count: 2
| {
"pile_set_name": "Github"
} |
<?php
/**
* @copyright Copyright (C) eZ Systems AS. All rights reserved.
* @license For full copyright and license information view LICENSE file distributed with this source code.
*/
namespace eZ\Publish\Core\REST\Server\Input\Parser;
use eZ\Publish\Core\REST\Common\Input\BaseParser;
use eZ\Publish\Core\REST\Common\Input\ParsingDispatcher;
use eZ\Publish\Core\REST\Common\Input\ParserTools;
use eZ\Publish\Core\REST\Common\Exceptions;
use eZ\Publish\API\Repository\ObjectStateService;
/**
* Parser for ObjectStateGroupCreate.
*/
class ObjectStateGroupCreate extends BaseParser
{
/**
* Object state service.
*
* @var \eZ\Publish\API\Repository\ObjectStateService
*/
protected $objectStateService;
/** @var \eZ\Publish\Core\REST\Common\Input\ParserTools */
protected $parserTools;
/**
* Construct.
*
* @param \eZ\Publish\API\Repository\ObjectStateService $objectStateService
* @param \eZ\Publish\Core\REST\Common\Input\ParserTools $parserTools
*/
public function __construct(ObjectStateService $objectStateService, ParserTools $parserTools)
{
$this->objectStateService = $objectStateService;
$this->parserTools = $parserTools;
}
/**
* Parse input structure.
*
* @param array $data
* @param \eZ\Publish\Core\REST\Common\Input\ParsingDispatcher $parsingDispatcher
*
* @return \eZ\Publish\API\Repository\Values\ObjectState\ObjectStateGroupCreateStruct
*/
public function parse(array $data, ParsingDispatcher $parsingDispatcher)
{
if (!array_key_exists('identifier', $data)) {
throw new Exceptions\Parser("Missing 'identifier' attribute for ObjectStateGroupCreate.");
}
$objectStateGroupCreateStruct = $this->objectStateService->newObjectStateGroupCreateStruct($data['identifier']);
if (!array_key_exists('defaultLanguageCode', $data)) {
throw new Exceptions\Parser("Missing 'defaultLanguageCode' attribute for ObjectStateGroupCreate.");
}
$objectStateGroupCreateStruct->defaultLanguageCode = $data['defaultLanguageCode'];
if (!array_key_exists('names', $data) || !is_array($data['names'])) {
throw new Exceptions\Parser("Missing or invalid 'names' element for ObjectStateGroupCreate.");
}
if (!array_key_exists('value', $data['names']) || !is_array($data['names']['value'])) {
throw new Exceptions\Parser("Missing or invalid 'names' element for ObjectStateGroupCreate.");
}
$objectStateGroupCreateStruct->names = $this->parserTools->parseTranslatableList($data['names']);
if (array_key_exists('descriptions', $data) && is_array($data['descriptions'])) {
$objectStateGroupCreateStruct->descriptions = $this->parserTools->parseTranslatableList($data['descriptions']);
}
return $objectStateGroupCreateStruct;
}
}
| {
"pile_set_name": "Github"
} |
---
title: "Document Picker in Xamarin.iOS"
description: "This document describes the iOS Document Picker and discusses how to use it in Xamarin.iOS. It takes a look at iCloud, documents, common setup code, document provider extensions, and more."
ms.prod: xamarin
ms.assetid: 89539D79-BC6E-4A3E-AEC6-69D9A6CC6818
ms.technology: xamarin-ios
author: davidortinau
ms.author: daortin
ms.date: 06/05/2017
---
# Document Picker in Xamarin.iOS
The Document Picker allows documents to be shared between apps. These documents may be stored in iCloud or in a different app’s directory. Documents are shared via the set of [Document Provider Extensions](~/ios/platform/extensions.md) the user has installed on their device.
Because of the difficulty of keeping documents synchronized across apps and the cloud, they introduce a certain amount of necessary complexity.
## Requirements
The following is required to complete the steps presented in this article:
- **Xcode 7 and iOS 8 or newer** – Apple's Xcode 7 and iOS 8 or newer APIs need to be installed and configured on the developer's computer.
- **Visual Studio or Visual Studio for Mac** – The latest version of Visual Studio for Mac should be installed.
- **iOS Device** – An iOS device running iOS 8 or above.
## Changes to iCloud
To implement the new features of the Document Picker, the following changes have been made to Apple's iCloud Service:
- The iCloud Daemon has been completely rewritten using CloudKit.
- The existing iCloud features have been renamed iCloud Drive.
- Support for Microsoft Windows OS has been added to iCloud.
- An iCloud folder has been added in the Mac OS Finder.
- iOS devices can access the contents of the Mac OS iCloud folder.
> [!IMPORTANT]
> Apple [provides tools](https://developer.apple.com/support/allowing-users-to-manage-data/)
> to help developers properly handle the European Union's General Data
> Protection Regulation (GDPR).
## What is a Document?
When referring to a Document in iCloud, it is a single, stand-alone entity and should be perceived as such by the user. A user may wish to modify the document or share it with other users (by using email, for example).
There are several types of files that the user will immediately recognize as Documents, such as Pages, Keynote or Numbers files. However, iCloud is not limited to this concept. For example, the state of a game (such as a Chess match) can be treated as a document and stored in iCloud. This file could be passed between a user's devices and allow them to pick up a game where they left off on a different device.
## Dealing with Documents
Before diving into the code required to use the Document Picker with Xamarin, this article is going to cover the best practices for working with iCloud Documents, and several of the modifications made to existing APIs required to support the Document Picker.
### Using File Coordination
Because a file can be modified from several different locations, coordination must be used to prevent data loss.
[](document-picker-images/image1.png#lightbox)
Let's take a look at the above illustration:
1. An iOS device using file coordination creates a new Document and saves it to the iCloud Folder.
2. iCloud saves the modified file to the cloud for distribution to every device.
3. An attached Mac sees the modified file in the iCloud Folder and uses File Coordination to copy down the changes to the file.
4. A device not using File Coordination makes a change to the file and saves it to the iCloud Folder. These changes are instantly replicated to the other devices.
Assume the original iOS device or the Mac was editing the file, now their changes are lost and overwritten with the version of the file from the uncoordinated device. To prevent data loss, File Coordination is a must when working with cloud-based Documents.
### Using UIDocument
`UIDocument` makes things simple (or `NSDocument` on macOS) by doing all of the heavy lifting for the developer. It provides built in File Coordination with background queues to keep from blocking the application's UI.
`UIDocument` exposes multiple, high-level APIs that ease the development effort of a Xamarin application for any purpose the developer requires.
The following code creates a subclass of `UIDocument` to implement a generic text-based document that can be used to store and retrieve text from iCloud:
```csharp
using System;
using Foundation;
using UIKit;
namespace DocPicker
{
public class GenericTextDocument : UIDocument
{
#region Private Variable Storage
private NSString _dataModel;
#endregion
#region Computed Properties
public string Contents {
get { return _dataModel.ToString (); }
set { _dataModel = new NSString(value); }
}
#endregion
#region Constructors
public GenericTextDocument (NSUrl url) : base (url)
{
// Set the default document text
this.Contents = "";
}
public GenericTextDocument (NSUrl url, string contents) : base (url)
{
// Set the default document text
this.Contents = contents;
}
#endregion
#region Override Methods
public override bool LoadFromContents (NSObject contents, string typeName, out NSError outError)
{
// Clear the error state
outError = null;
// Were any contents passed to the document?
if (contents != null) {
_dataModel = NSString.FromData( (NSData)contents, NSStringEncoding.UTF8 );
}
// Inform caller that the document has been modified
RaiseDocumentModified (this);
// Return success
return true;
}
public override NSObject ContentsForType (string typeName, out NSError outError)
{
// Clear the error state
outError = null;
// Convert the contents to a NSData object and return it
NSData docData = _dataModel.Encode(NSStringEncoding.UTF8);
return docData;
}
#endregion
#region Events
public delegate void DocumentModifiedDelegate(GenericTextDocument document);
public event DocumentModifiedDelegate DocumentModified;
internal void RaiseDocumentModified(GenericTextDocument document) {
// Inform caller
if (this.DocumentModified != null) {
this.DocumentModified (document);
}
}
#endregion
}
}
```
The `GenericTextDocument` class presented above will be used throughout this article when working with the Document Picker and external Documents in a Xamarin.iOS 8 application.
## Asynchronous File Coordination
iOS 8 provides several new Asynchronous File Coordination features via the new File Coordination APIs. Before iOS 8, all existing File Coordination APIs were totally synchronous. This meant the developer was responsible for implementing their own background queuing to prevent File Coordination from blocking the application's UI.
The new `NSFileAccessIntent` class contains a URL pointing to the file and several options to control the type of coordination required. The following code demonstrates moving a file from one location to another using intents:
```csharp
// Get source options
var srcURL = NSUrl.FromFilename ("FromFile.txt");
var srcIntent = NSFileAccessIntent.CreateReadingIntent (srcURL, NSFileCoordinatorReadingOptions.ForUploading);
// Get destination options
var dstURL = NSUrl.FromFilename ("ToFile.txt");
var dstIntent = NSFileAccessIntent.CreateReadingIntent (dstURL, NSFileCoordinatorReadingOptions.ForUploading);
// Create an array
var intents = new NSFileAccessIntent[] {
srcIntent,
dstIntent
};
// Initialize a file coordination with intents
var queue = new NSOperationQueue ();
var fileCoordinator = new NSFileCoordinator ();
fileCoordinator.CoordinateAccess (intents, queue, (err) => {
// Was there an error?
if (err!=null) {
Console.WriteLine("Error: {0}",err.LocalizedDescription);
}
});
```
## Discovering and Listing Documents
The way to discover and list Documents is by using the existing `NSMetadataQuery` APIs. This section will cover new features added to `NSMetadataQuery` that make working with Documents even easier than before.
### Existing Behavior
Prior to iOS 8, `NSMetadataQuery` was slow to pickup local file changes such as: deletes, creates and renames.
[](document-picker-images/image2.png#lightbox)
In the above diagram:
1. For files that already exist in the Application Container, `NSMetadataQuery` has existing `NSMetadata` records pre-created and spooled so they are instantly available to the application.
1. The application creates a new file in the Application Container.
1. There is a delay before `NSMetadataQuery` sees the modification to the Application Container and creates the required `NSMetadata` record.
Because of the delay in the creation of the `NSMetadata` record, the application had to have two data sources open: one for local file changes and one for cloud based changes.
### Stitching
In iOS 8, `NSMetadataQuery` is easier to use directly with a new feature called Stitching:
[](document-picker-images/image3.png#lightbox)
Using Stitching in the above diagram:
1. As before, for files that already exist in the Application Container, `NSMetadataQuery` has existing `NSMetadata` records pre-created and spooled.
1. The application creates a new file in the Application Container using File Coordination.
1. A hook in the Application Container sees the modification and calls `NSMetadataQuery` to create the required `NSMetadata` record.
1. The `NSMetadata` record is created directly after the file and is made available to the application.
By using Stitching the application no longer has to open a data source to monitor local and cloud based file changes. Now the application can rely on `NSMetadataQuery` directly.
> [!IMPORTANT]
> Stitching only works if the Application is using File Coordination as presented in the section above. If File Coordination is not being used, the APIs default to the existing pre iOS 8 behavior.
### New iOS 8 Metadata Features
The following new features have been added to `NSMetadataQuery` in iOS 8:
- `NSMetatadataQuery` can now list non-local documents stored in the cloud.
- New APIs have been added to access metadata information on the cloud-based documents.
- There is a new `NSUrl_PromisedItems` API that will to access the file attributes of files that may or may not have their content available locally.
- Use the `GetPromisedItemResourceValue` method to get information about a given file or use the `GetPromisedItemResourceValues` method to get information on more than one file at a time.
Two new file coordination flags have been added for dealing with metadata:
- `NSFileCoordinatorReadImmediatelyAvailableMetadataOnly`
- `NSFileCoordinatorWriteContentIndependentMetadataOnly`
With the above flags, the contents of the Document file do not need to be available locally for them to be used.
The following code segment shows how to use `NSMetadataQuery` to query for the existence of a specific file and build the file if it doesn't exist:
```csharp
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using Foundation;
using UIKit;
using ObjCRuntime;
using System.IO;
#region Static Properties
public const string TestFilename = "test.txt";
#endregion
#region Computed Properties
public bool HasiCloud { get; set; }
public bool CheckingForiCloud { get; set; }
public NSUrl iCloudUrl { get; set; }
public GenericTextDocument Document { get; set; }
public NSMetadataQuery Query { get; set; }
#endregion
#region Private Methods
private void FindDocument () {
Console.WriteLine ("Finding Document...");
// Create a new query and set it's scope
Query = new NSMetadataQuery();
Query.SearchScopes = new NSObject [] {
NSMetadataQuery.UbiquitousDocumentsScope,
NSMetadataQuery.UbiquitousDataScope,
NSMetadataQuery.AccessibleUbiquitousExternalDocumentsScope
};
// Build a predicate to locate the file by name and attach it to the query
var pred = NSPredicate.FromFormat ("%K == %@"
, new NSObject[] {
NSMetadataQuery.ItemFSNameKey
, new NSString(TestFilename)});
Query.Predicate = pred;
// Register a notification for when the query returns
NSNotificationCenter.DefaultCenter.AddObserver (this,
new Selector("queryDidFinishGathering:"), NSMetadataQuery.DidFinishGatheringNotification,
Query);
// Start looking for the file
Query.StartQuery ();
Console.WriteLine ("Querying: {0}", Query.IsGathering);
}
[Export("queryDidFinishGathering:")]
public void DidFinishGathering (NSNotification notification) {
Console.WriteLine ("Finish Gathering Documents.");
// Access the query and stop it from running
var query = (NSMetadataQuery)notification.Object;
query.DisableUpdates();
query.StopQuery();
// Release the notification
NSNotificationCenter.DefaultCenter.RemoveObserver (this
, NSMetadataQuery.DidFinishGatheringNotification
, query);
// Load the document that the query returned
LoadDocument(query);
}
private void LoadDocument (NSMetadataQuery query) {
Console.WriteLine ("Loading Document...");
// Take action based on the returned record count
switch (query.ResultCount) {
case 0:
// Create a new document
CreateNewDocument ();
break;
case 1:
// Gain access to the url and create a new document from
// that instance
NSMetadataItem item = (NSMetadataItem)query.ResultAtIndex (0);
var url = (NSUrl)item.ValueForAttribute (NSMetadataQuery.ItemURLKey);
// Load the document
OpenDocument (url);
break;
default:
// There has been an issue
Console.WriteLine ("Issue: More than one document found...");
break;
}
}
#endregion
#region Public Methods
public void OpenDocument(NSUrl url) {
Console.WriteLine ("Attempting to open: {0}", url);
Document = new GenericTextDocument (url);
// Open the document
Document.Open ( (success) => {
if (success) {
Console.WriteLine ("Document Opened");
} else
Console.WriteLine ("Failed to Open Document");
});
// Inform caller
RaiseDocumentLoaded (Document);
}
public void CreateNewDocument() {
// Create path to new file
// var docsFolder = Environment.GetFolderPath (Environment.SpecialFolder.Personal);
var docsFolder = Path.Combine(iCloudUrl.Path, "Documents");
var docPath = Path.Combine (docsFolder, TestFilename);
var ubiq = new NSUrl (docPath, false);
// Create new document at path
Console.WriteLine ("Creating Document at:" + ubiq.AbsoluteString);
Document = new GenericTextDocument (ubiq);
// Set the default value
Document.Contents = "(default value)";
// Save document to path
Document.Save (Document.FileUrl, UIDocumentSaveOperation.ForCreating, (saveSuccess) => {
Console.WriteLine ("Save completion:" + saveSuccess);
if (saveSuccess) {
Console.WriteLine ("Document Saved");
} else {
Console.WriteLine ("Unable to Save Document");
}
});
// Inform caller
RaiseDocumentLoaded (Document);
}
public bool SaveDocument() {
bool successful = false;
// Save document to path
Document.Save (Document.FileUrl, UIDocumentSaveOperation.ForOverwriting, (saveSuccess) => {
Console.WriteLine ("Save completion: " + saveSuccess);
if (saveSuccess) {
Console.WriteLine ("Document Saved");
successful = true;
} else {
Console.WriteLine ("Unable to Save Document");
successful=false;
}
});
// Return results
return successful;
}
#endregion
#region Events
public delegate void DocumentLoadedDelegate(GenericTextDocument document);
public event DocumentLoadedDelegate DocumentLoaded;
internal void RaiseDocumentLoaded(GenericTextDocument document) {
// Inform caller
if (this.DocumentLoaded != null) {
this.DocumentLoaded (document);
}
}
#endregion
```
### Document Thumbnails
Apple feels that the best user experience when listing documents for an application is to use previews. This gives the end users context, so they can quickly identify the document that they want to work with.
Prior to iOS 8, showing document previews required a custom implementation. New to iOS 8 are file system attributes that allow the developer to quickly work with Document Thumbnails.
#### Retrieving Document Thumbnails
By calling the `GetPromisedItemResourceValue` or `GetPromisedItemResourceValues` methods, `NSUrl_PromisedItems` API, a `NSUrlThumbnailDictionary`, is returned. The only key currently in this dictionary is the `NSThumbnial1024X1024SizeKey` and its matching `UIImage`.
#### Saving Document Thumbnails
The easiest way to save a thumbnail is by using `UIDocument`. By calling the `GetFileAttributesToWrite` method of the `UIDocument` and setting the thumbnail, it will automatically be saved when the Document file is. The iCloud Daemon will see this change and propagate it to iCloud. On Mac OS X, thumbnails are automatically generated for the developer by the Quick Look plugin.
With the basics of working with iCloud based Documents in place, along with the modifications to existing API, we are ready to implement the Document Picker View Controller in a Xamarin iOS 8 Mobile Application.
## Enabling iCloud in Xamarin
Before the Document Picker can be used in a Xamarin.iOS Application, iCloud support needs to be enabled both in your application and via Apple.
The following steps walkthrough the process of provisioning for iCloud.
1. Create an iCloud Container.
2. Create an App ID that contains the iCloud App Service.
3. Create a Provisioning profile that includes this App ID.
The [Working with Capabilities](~/ios/deploy-test/provisioning/capabilities/icloud-capabilities.md) guide walks through the first two steps. To create a provisioning profile, follow the steps in the [Provisioning Profile](~/ios/get-started/installation/device-provisioning/index.md#provisioning-your-device) guide.
The following steps walkthrough the process of configuring your application for iCloud:
Do the following:
1. Open the project in Visual Studio for Mac or Visual Studio.
2. In the **Solution Explorer**, right-click the project and select Options.
3. In the Options Dialog Box select **iOS Application**, ensure that the **Bundle Identifier** matches the one that was defined in **App ID** created above for the application.
4. Select **iOS Bundle Signing**, select the **Developer Identity** and the **Provisioning Profile** created above.
5. Click the **OK** button to save the changes and close the dialog box.
6. Right-click on `Entitlements.plist` in the **Solution Explorer** to open it in the editor.
> [!IMPORTANT]
> In Visual Studio you may need to open the Entitlements editor by right-clicking on it, selecting **Open With…** and selecting Property List Editor
7. Check **Enable iCloud** , **iCloud Documents** , **Key-value storage** and **CloudKit** .
8. Ensure the **Container** exists for the application (as created above). Example: `iCloud.com.your-company.AppName`
9. Save the changes to the file.
For more information on Entitlements refer to the [Working with Entitlements](~/ios/deploy-test/provisioning/entitlements.md) guide.
With the above setup in place, the application can now use cloud-based documents and the new Document Picker View Controller.
## Common Setup Code
Before getting started with the Document Picker View Controller, there is some standard setup code required. Start by modifying the application's `AppDelegate.cs` file and make it look like the following:
```csharp
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using Foundation;
using UIKit;
using ObjCRuntime;
using System.IO;
namespace DocPicker
{
[Register ("AppDelegate")]
public partial class AppDelegate : UIApplicationDelegate
{
#region Static Properties
public const string TestFilename = "test.txt";
#endregion
#region Computed Properties
public override UIWindow Window { get; set; }
public bool HasiCloud { get; set; }
public bool CheckingForiCloud { get; set; }
public NSUrl iCloudUrl { get; set; }
public GenericTextDocument Document { get; set; }
public NSMetadataQuery Query { get; set; }
public NSData Bookmark { get; set; }
#endregion
#region Private Methods
private void FindDocument () {
Console.WriteLine ("Finding Document...");
// Create a new query and set it's scope
Query = new NSMetadataQuery();
Query.SearchScopes = new NSObject [] {
NSMetadataQuery.UbiquitousDocumentsScope,
NSMetadataQuery.UbiquitousDataScope,
NSMetadataQuery.AccessibleUbiquitousExternalDocumentsScope
};
// Build a predicate to locate the file by name and attach it to the query
var pred = NSPredicate.FromFormat ("%K == %@",
new NSObject[] {NSMetadataQuery.ItemFSNameKey
, new NSString(TestFilename)});
Query.Predicate = pred;
// Register a notification for when the query returns
NSNotificationCenter.DefaultCenter.AddObserver (this
, new Selector("queryDidFinishGathering:")
, NSMetadataQuery.DidFinishGatheringNotification
, Query);
// Start looking for the file
Query.StartQuery ();
Console.WriteLine ("Querying: {0}", Query.IsGathering);
}
[Export("queryDidFinishGathering:")]
public void DidFinishGathering (NSNotification notification) {
Console.WriteLine ("Finish Gathering Documents.");
// Access the query and stop it from running
var query = (NSMetadataQuery)notification.Object;
query.DisableUpdates();
query.StopQuery();
// Release the notification
NSNotificationCenter.DefaultCenter.RemoveObserver (this
, NSMetadataQuery.DidFinishGatheringNotification
, query);
// Load the document that the query returned
LoadDocument(query);
}
private void LoadDocument (NSMetadataQuery query) {
Console.WriteLine ("Loading Document...");
// Take action based on the returned record count
switch (query.ResultCount) {
case 0:
// Create a new document
CreateNewDocument ();
break;
case 1:
// Gain access to the url and create a new document from
// that instance
NSMetadataItem item = (NSMetadataItem)query.ResultAtIndex (0);
var url = (NSUrl)item.ValueForAttribute (NSMetadataQuery.ItemURLKey);
// Load the document
OpenDocument (url);
break;
default:
// There has been an issue
Console.WriteLine ("Issue: More than one document found...");
break;
}
}
#endregion
#region Public Methods
public void OpenDocument(NSUrl url) {
Console.WriteLine ("Attempting to open: {0}", url);
Document = new GenericTextDocument (url);
// Open the document
Document.Open ( (success) => {
if (success) {
Console.WriteLine ("Document Opened");
} else
Console.WriteLine ("Failed to Open Document");
});
// Inform caller
RaiseDocumentLoaded (Document);
}
public void CreateNewDocument() {
// Create path to new file
// var docsFolder = Environment.GetFolderPath (Environment.SpecialFolder.Personal);
var docsFolder = Path.Combine(iCloudUrl.Path, "Documents");
var docPath = Path.Combine (docsFolder, TestFilename);
var ubiq = new NSUrl (docPath, false);
// Create new document at path
Console.WriteLine ("Creating Document at:" + ubiq.AbsoluteString);
Document = new GenericTextDocument (ubiq);
// Set the default value
Document.Contents = "(default value)";
// Save document to path
Document.Save (Document.FileUrl, UIDocumentSaveOperation.ForCreating, (saveSuccess) => {
Console.WriteLine ("Save completion:" + saveSuccess);
if (saveSuccess) {
Console.WriteLine ("Document Saved");
} else {
Console.WriteLine ("Unable to Save Document");
}
});
// Inform caller
RaiseDocumentLoaded (Document);
}
/// <summary>
/// Saves the document.
/// </summary>
/// <returns><c>true</c>, if document was saved, <c>false</c> otherwise.</returns>
public bool SaveDocument() {
bool successful = false;
// Save document to path
Document.Save (Document.FileUrl, UIDocumentSaveOperation.ForOverwriting, (saveSuccess) => {
Console.WriteLine ("Save completion: " + saveSuccess);
if (saveSuccess) {
Console.WriteLine ("Document Saved");
successful = true;
} else {
Console.WriteLine ("Unable to Save Document");
successful=false;
}
});
// Return results
return successful;
}
#endregion
#region Override Methods
public override void FinishedLaunching (UIApplication application)
{
// Start a new thread to check and see if the user has iCloud
// enabled.
new Thread(new ThreadStart(() => {
// Inform caller that we are checking for iCloud
CheckingForiCloud = true;
// Checks to see if the user of this device has iCloud
// enabled
var uburl = NSFileManager.DefaultManager.GetUrlForUbiquityContainer(null);
// Connected to iCloud?
if (uburl == null)
{
// No, inform caller
HasiCloud = false;
iCloudUrl =null;
Console.WriteLine("Unable to connect to iCloud");
InvokeOnMainThread(()=>{
var okAlertController = UIAlertController.Create ("iCloud Not Available", "Developer, please check your Entitlements.plist, Bundle ID and Provisioning Profiles.", UIAlertControllerStyle.Alert);
okAlertController.AddAction (UIAlertAction.Create ("Ok", UIAlertActionStyle.Default, null));
Window.RootViewController.PresentViewController (okAlertController, true, null);
});
}
else
{
// Yes, inform caller and save location the Application Container
HasiCloud = true;
iCloudUrl = uburl;
Console.WriteLine("Connected to iCloud");
// If we have made the connection with iCloud, start looking for documents
InvokeOnMainThread(()=>{
// Search for the default document
FindDocument ();
});
}
// Inform caller that we are no longer looking for iCloud
CheckingForiCloud = false;
})).Start();
}
// This method is invoked when the application is about to move from active to inactive state.
// OpenGL applications should use this method to pause.
public override void OnResignActivation (UIApplication application)
{
}
// This method should be used to release shared resources and it should store the application state.
// If your application supports background execution this method is called instead of WillTerminate
// when the user quits.
public override void DidEnterBackground (UIApplication application)
{
// Trap all errors
try {
// Values to include in the bookmark packet
var resources = new string[] {
NSUrl.FileSecurityKey,
NSUrl.ContentModificationDateKey,
NSUrl.FileResourceIdentifierKey,
NSUrl.FileResourceTypeKey,
NSUrl.LocalizedNameKey
};
// Create the bookmark
NSError err;
Bookmark = Document.FileUrl.CreateBookmarkData (NSUrlBookmarkCreationOptions.WithSecurityScope, resources, iCloudUrl, out err);
// Was there an error?
if (err != null) {
// Yes, report it
Console.WriteLine ("Error Creating Bookmark: {0}", err.LocalizedDescription);
}
}
catch (Exception e) {
// Report error
Console.WriteLine ("Error: {0}", e.Message);
}
}
// This method is called as part of the transition from background to active state.
public override void WillEnterForeground (UIApplication application)
{
// Is there any bookmark data?
if (Bookmark != null) {
// Trap all errors
try {
// Yes, attempt to restore it
bool isBookmarkStale;
NSError err;
var srcUrl = new NSUrl (Bookmark, NSUrlBookmarkResolutionOptions.WithSecurityScope, iCloudUrl, out isBookmarkStale, out err);
// Was there an error?
if (err != null) {
// Yes, report it
Console.WriteLine ("Error Loading Bookmark: {0}", err.LocalizedDescription);
} else {
// Load document from bookmark
OpenDocument (srcUrl);
}
}
catch (Exception e) {
// Report error
Console.WriteLine ("Error: {0}", e.Message);
}
}
}
// This method is called when the application is about to terminate. Save data, if needed.
public override void WillTerminate (UIApplication application)
{
}
#endregion
#region Events
public delegate void DocumentLoadedDelegate(GenericTextDocument document);
public event DocumentLoadedDelegate DocumentLoaded;
internal void RaiseDocumentLoaded(GenericTextDocument document) {
// Inform caller
if (this.DocumentLoaded != null) {
this.DocumentLoaded (document);
}
}
#endregion
}
}
```
> [!IMPORTANT]
> The above code includes the code from the Discovering and Listing Documents section above. It is presented here in its entirety, as it would appear in an actual application. For simplicity, this example works with a single, hard-coded file (`test.txt`) only.
The above code exposes several iCloud Drive shortcuts to make them easier to work with in the rest of the application.
Next, add the following code to any view or view container that will be using the Document Picker or working with cloud-based documents:
```csharp
using CloudKit;
...
#region Computed Properties
/// <summary>
/// Returns the delegate of the current running application
/// </summary>
/// <value>The this app.</value>
public AppDelegate ThisApp {
get { return (AppDelegate)UIApplication.SharedApplication.Delegate; }
}
#endregion
```
This adds a shortcut to get to the `AppDelegate` and access the iCloud shortcuts created above.
With this code in place, let's take a look at implementing the Document Picker View Controller in a Xamarin iOS 8 application.
## Using the Document Picker View Controller
Prior to iOS 8, it was very difficult to access Documents from another application because there was no way to discover documents outside of the application from within the app.
### Existing Behavior
[](document-picker-images/image31.png#lightbox)
Let's take a look at accessing an external document prior to iOS 8:
1. First the user would have to open the application that originally created the Document.
1. The Document is selected and the `UIDocumentInteractionController` is used to send the Document to the new application.
1. Finally, a copy of the original Document is placed in the new application's Container.
From there the Document is available for the second application to open and edit.
### Discovering Documents Outside of an App's Container
In iOS 8, an application is able to access Documents outside of its own Application Container with ease:
[](document-picker-images/image32.png#lightbox)
Using the new iCloud Document Picker ( `UIDocumentPickerViewController`), an iOS application can directly discover and access outside of its Application Container. The `UIDocumentPickerViewController` provides a mechanism for the user to grant access to and edit those discovered Documents via permissions.
An application must opt-in to have its Documents show up in the iCloud Document Picker and be available for other applications to discover and work with them. To have a Xamarin iOS 8 application share its Application Container, edit it `Info.plist` file in a standard text editor and add the following two lines to the bottom of the dictionary (between the `<dict>...</dict>` tags):
```xml
<key>NSUbiquitousContainerIsDocumentScopePublic</key>
<true/>
```
The `UIDocumentPickerViewController` provides a great new UI that allows the user to choose documents. To display the Document Picker View Controller in a Xamarin iOS 8 application, do the following:
```csharp
using MobileCoreServices;
...
// Allow the Document picker to select a range of document types
var allowedUTIs = new string[] {
UTType.UTF8PlainText,
UTType.PlainText,
UTType.RTF,
UTType.PNG,
UTType.Text,
UTType.PDF,
UTType.Image
};
// Display the picker
//var picker = new UIDocumentPickerViewController (allowedUTIs, UIDocumentPickerMode.Open);
var pickerMenu = new UIDocumentMenuViewController(allowedUTIs, UIDocumentPickerMode.Open);
pickerMenu.DidPickDocumentPicker += (sender, args) => {
// Wireup Document Picker
args.DocumentPicker.DidPickDocument += (sndr, pArgs) => {
// IMPORTANT! You must lock the security scope before you can
// access this file
var securityEnabled = pArgs.Url.StartAccessingSecurityScopedResource();
// Open the document
ThisApp.OpenDocument(pArgs.Url);
// IMPORTANT! You must release the security lock established
// above.
pArgs.Url.StopAccessingSecurityScopedResource();
};
// Display the document picker
PresentViewController(args.DocumentPicker,true,null);
};
pickerMenu.ModalPresentationStyle = UIModalPresentationStyle.Popover;
PresentViewController(pickerMenu,true,null);
UIPopoverPresentationController presentationPopover = pickerMenu.PopoverPresentationController;
if (presentationPopover!=null) {
presentationPopover.SourceView = this.View;
presentationPopover.PermittedArrowDirections = UIPopoverArrowDirection.Down;
presentationPopover.SourceRect = ((UIButton)s).Frame;
}
```
> [!IMPORTANT]
> The developer must call the `StartAccessingSecurityScopedResource` method of the `NSUrl` before an external document can be accessed. The `StopAccessingSecurityScopedResource` method must be called to release the security lock as soon as the document has been loaded.
### Sample Output
Here is an example of how the code above would display a Document Picker when run on an iPhone device:
1. The user starts the application and the main interface is displayed:
[](document-picker-images/image33.png#lightbox)
1. The user taps the **Action** Button at the top of the screen and is asked to select a **Document Provider** from the list of available providers:
[](document-picker-images/image34.png#lightbox)
1. The **Document Picker View Controller** is displayed for the selected **Document Provider**:
[](document-picker-images/image35.png#lightbox)
1. The user taps on a **Document Folder** to display its contents:
[](document-picker-images/image36.png#lightbox)
1. The user selects a **Document** and the **Document Picker** is closed.
1. The main interface is redisplayed, the **Document** is loaded from the external Container and its contents displayed.
The actual display of the Document Picker View Controller depends on the Document Providers that the user has installed on the device and which Document Picker Mode has been implement. The above example is using the Open Mode, the other mode types will be discussed in detail below.
## Managing External Documents
As discussed above, prior to iOS 8, an application could only access documents that were a part of its Application Container. In iOS 8 an application can access Documents from external sources:
[](document-picker-images/image37.png#lightbox)
When the user selects a Document from an external source, a Reference Document is written to the Application Container that points to the original Document.
To assist in adding this new ability into existing applications, several new features have been added to the `NSMetadataQuery` API. Typically, an application uses the Ubiquitous Document Scope to list documents that live within its Application Container. Using this scope, only documents within the Application Container will continue to be displayed.
Using the new Ubiquitous External Document Scope will return Documents that live outside the Application Container and return the metadata for them. The `NSMetadataItemUrlKey` will point to the URL where the Document is actually located.
Sometimes an application doesn't want to work with the Documents being pointed to by th reference. Instead, the app wants to work with the Reference Document directly. For example, the app may want to display the document in the Application's folder in the UI, or to allow the user to move the references around inside a folder.
In iOS 8, a new `NSMetadataItemUrlInLocalContainerKey` has been provided to access the Reference Document directly. This key points to the actual reference to the external document in an Application Container.
The `NSMetadataUbiquitousItemIsExternalDocumentKey` is used to test whether or not a document is external to an Application's Container. The `NSMetadataUbiquitousItemContainerDisplayNameKey` is used to access the name of the Container that is housing the original copy of an external Document.
### Why Document References are Required
The main reason that iOS 8 uses references to access external Documents is security. No application is given access to any other application's Container. Only the Document Picker can do that, because is running out-of-process and has system wide access.
The only way to get to a document outside of the Application Container is by using the Document Picker, and if the URL returned by the picker is Security Scoped. The Security Scoped URL contains just enough information to selected the document along with the scoped rights required to grant an application access to the document.
It is important to note that if the Security Scoped URL was serialized into a string and then de-serialized, the Security Information would be lost and the file would be inaccessible from the URL. The Document Reference feature provides a mechanism to get back to the files pointed to by these URLs.
So if the application acquires an `NSUrl` from one of the Reference Documents, it already has the security scope attached and can be used to access the file. For this reason, it is highly suggested that the developer use `UIDocument` because it handles all of this information and processes for them.
### Using Bookmarks
It is not always feasible to enumerate an application's Documents to get back to a specific Document, for example, when doing state restoration. iOS 8 provides a mechanism to create Bookmarks that directly target a given Document.
The following code will create a Bookmark from a `UIDocument`'s `FileUrl` property:
```csharp
// Trap all errors
try {
// Values to include in the bookmark packet
var resources = new string[] {
NSUrl.FileSecurityKey,
NSUrl.ContentModificationDateKey,
NSUrl.FileResourceIdentifierKey,
NSUrl.FileResourceTypeKey,
NSUrl.LocalizedNameKey
};
// Create the bookmark
NSError err;
Bookmark = Document.FileUrl.CreateBookmarkData (NSUrlBookmarkCreationOptions.WithSecurityScope, resources, iCloudUrl, out err);
// Was there an error?
if (err != null) {
// Yes, report it
Console.WriteLine ("Error Creating Bookmark: {0}", err.LocalizedDescription);
}
}
catch (Exception e) {
// Report error
Console.WriteLine ("Error: {0}", e.Message);
}
```
The existing Bookmark API is used to create a Bookmark against an existing `NSUrl` that can be saved and loaded to provide direct access to an external file. The following code will restore a bookmark that was created above:
```csharp
if (Bookmark != null) {
// Trap all errors
try {
// Yes, attempt to restore it
bool isBookmarkStale;
NSError err;
var srcUrl = new NSUrl (Bookmark, NSUrlBookmarkResolutionOptions.WithSecurityScope, iCloudUrl, out isBookmarkStale, out err);
// Was there an error?
if (err != null) {
// Yes, report it
Console.WriteLine ("Error Loading Bookmark: {0}", err.LocalizedDescription);
} else {
// Load document from bookmark
OpenDocument (srcUrl);
}
}
catch (Exception e) {
// Report error
Console.WriteLine ("Error: {0}", e.Message);
}
}
```
## Open vs. Import Mode and the Document Picker
The Document Picker View Controller features two different modes of operation:
1. **Open Mode** – In this mode, when the user selects and external Document, the Document Picker will create a Security Scoped Bookmark in the Application Container.
[](document-picker-images/image37.png#lightbox)
1. **Import Mode** – In this mode, when the user selects and external Document, the Document Picker will not create a Bookmark, but instead, copy the file into a Temporary Location and provide the application access to the Document at this location:
[](document-picker-images/image38.png#lightbox)
Once the application terminates for any reason, the Temporary Location is emptied and the file removed. If the application needs to maintain access to the file, it should make a copy and place it in its Application Container.
The Open Mode is useful when the application wishes to collaborate with another application and share any changes made to the document with that application. The Import Mode is used when the application does not want to share its modifications to a Document with other applications.
## Making a Document External
As noted above, an iOS 8 application does not have access to containers outside of its own Application Container. The application can write to its own container locally or into a Temporary Location, then use a special document mode to move the resulting Document outside of the Application Container to a user chosen location.
To move a Document to an external location, do the following:
1. First create a new Document in a local or temporary location.
1. Create a `NSUrl` that points to the new Document.
1. Open a new Document Picker View Controller and pass it the `NSUrl` with the Mode of `MoveToService` .
1. Once the user chooses a new location, the Document will be moved from its current location to the new location.
1. A Reference Document will be written to the app's Application Container so that the file can still be accessed by the creating application.
The following code can be used to move a Document to an external location: `var picker = new UIDocumentPickerViewController (srcURL, UIDocumentPickerMode.MoveToService);`
The Reference Document returned by the above process is exactly the same as one created by the Open Mode of the Document Picker. However, there are times that the application might wish to move a Document without keeping a reference to it.
To move a Document without generating a reference, use the `ExportToService` Mode. Example: `var picker = new UIDocumentPickerViewController (srcURL, UIDocumentPickerMode.ExportToService);`
When using the `ExportToService` mode, the Document is copied to the external Container and the existing copy is left in its original location.
## Document Provider Extensions
With iOS 8, Apple wants the end user to be able to access any of their cloud-based documents, no matter where they actually exist. To achieve this goal, iOS 8 provides a new Document Provider Extension mechanism.
### What is a Document Provider Extension?
Simply stated, a Document Provider Extension is a way for a developer, or a third-party, to provide to an application alternative document storage that can be accessed in the exact same way as the existing iCloud storage location.
The user can select one of these alternative storage locations from the Document Picker and they can use the exact same access modes (Open, Import, Move or Export) to work with files in that location.
This is implemented using two different extensions:
- **Document Picker Extension** – Provides a `UIViewController` subclass that provides a graphical interface for the user to choose a document from an alternative storage location. This subclass will be displayed as part of the Document Picker View Controller.
- **File Provide Extension** – This is a non-UI extension that deals with actually providing the files contents. These extensions are provided through File Coordination ( `NSFileCoordinator` ). This is another important case where File Coordination is required.
The following diagram shows the typical data flow when working with Document Provider Extensions:
[](document-picker-images/image39.png#lightbox)
The following process occurs:
1. The application presents a Document Picker Controller to allow the user to select a file to work with.
1. The user selects an alternative file location and the custom `UIViewController` extension is called to display the user interface.
1. The user selects a file from this location and the URL is passed back to the Document Picker.
1. The Document Picker selects the file's URL and returns it to the application for the user to work on.
1. The URL is passed to the File Coordinator to return the files contents to the application.
1. The File Coordinator calls the custom File Provider Extension to retrieve the file.
1. The contents of the file are returned to the File Coordinator.
1. The contents of the file are returned to the application.
### Security and Bookmarks
This section will take a quick look at how security and persistent file access through Bookmarks works with Document Provider Extensions. Unlike the iCloud Document Provider, which automatically saves Security and Bookmarks to the Application Container, Document Provider Extensions don't because they are not a part of the Document Reference System.
For example: in an Enterprise setting that provides its own company-wide secure datastore, administrators don't want confidential corporate information accessed or processed by the public iCloud Servers. Therefore, the built-in Document Reference System cannot be used.
The Bookmark system can still be used and it is the responsibility of the File Provider Extension to correctly process a bookmarked URL and return the contents of the Document pointed to by it.
For security purposes, iOS 8 has an Isolation Layer that persists the information about which application has access to which identifier inside which File Provider. It should be noted that all file access is controlled by this Isolation Layer.
The following diagram shows the data flow when working with Bookmarks and a Document Provider Extension:
[](document-picker-images/image40.png#lightbox)
The following process occurs:
1. The application is about to enter the background and needs to persist its state. It calls `NSUrl` to create a bookmark to a file in alternative storage.
1. `NSUrl` calls the File Provider Extension to get a persistent URL to the Document.
1. The File Provider Extension returns the URL as a string to the `NSUrl` .
1. The `NSUrl` bundles the URL into a Bookmark and returns it to the application.
1. When the Application awakes from being in the background and needs to restore state, it passes the Bookmark to `NSUrl` .
1. `NSUrl` calls the File Provider Extension with the URL of the file.
1. The File Extension Provider accesses the file and returns the location of the file to `NSUrl` .
1. The file location is bundled with security information and returned to the application.
From here, the application can access the file and work with it as normal.
### Writing Files
This section will take a quick look at how writing files to an alternative location with a Document Provider Extension works. The iOS application will use File Coordination to save information to disk inside the Application Container. Shortly after the file has been successfully written, the File Provider Extension will be notified of the change.
At this point, the File Provider Extension can start uploading the file to the alternative location (or mark the file as dirty and requiring upload).
### Creating New Document Provider Extensions
Creating new Document Provider Extensions is outside of the scope of this introductory article. This information is provided here to show that, based on the extensions a user has loaded in their iOS device, an application may have access to Document storage locations outside of the Apple provided iCloud location.
The developer should be aware of this fact when using the Document Picker and working with external Documents. They should not assume those Document are hosted in iCloud.
For more information on creating a Storage Provider or Document Picker Extension, please see the [Introduction to App Extensions](~/ios/platform/extensions.md) document.
## Migrating to iCloud Drive
On iOS 8, users can choose to continue using the existing iCloud Documents System used in iOS 7 (and prior systems) or they can choose to migrate existing Documents to the new iCloud Drive mechanism.
On Mac OS X Yosemite, Apple does not provide the backwards compatibility so all documents must be migrated to iCloud Drive or they will no longer be updated across devices.
After a user's account has been migrated to iCloud Drive, only devices using iCloud Drive will be able to propagate changes to Documents across those devices.
> [!IMPORTANT]
> Developers should be aware that the new features covered in this article are only available if the user's account has been migrated to iCloud Drive.
## Summary
This article has covered the changes to existing iCloud APIs required to support iCloud Drive and the new Document Picker View Controller. It has covered File Coordination and why it is important when working with cloud-based documents. It has covered the setup required to enable cloud-based documents in a Xamarin.iOS Application and given an introductory look at working with documents outside an app's Application Container using the Document Picker View Controller.
In addition, this article briefly covered Document Provider Extensions and why the developer should be aware of them when writing applications that can handle cloud-based documents.
## Related Links
- [DocPicker (sample)](https://docs.microsoft.com/samples/xamarin/ios-samples/ios8-docpicker)
- [Introduction to iOS 8](~/ios/platform/introduction-to-ios8.md)
- [Introduction to App Extensions](~/ios/platform/extensions.md)
| {
"pile_set_name": "Github"
} |
using NUnit.Framework;
using OpenWrap.Commands.Remote;
using OpenWrap.Commands.Remote.Messages;
using OpenWrap.Testing;
using Tests.Commands.contexts;
namespace Tests.Commands.remote.list
{
class list_remote_with_fetch : command<ListRemoteCommand>
{
public list_remote_with_fetch()
{
given_remote_config("sauron");
when_executing_command();
}
[Test]
public void data_is_correct()
{
Results.ShouldHaveOne<RemoteRepositoryData>()
.Check(x => x.Name.ShouldBe("sauron"))
.Check(x => x.Fetch.ShouldBeTrue())
.Check(x => x.Publish.ShouldBeFalse())
.ToString().ShouldBe(" 1 sauron [fetch]");
}
}
} | {
"pile_set_name": "Github"
} |
# Copyright 2014, 2015 Simon Lydell
# X11 (“MIT”) Licensed. (See LICENSE.)
# <http://es5.github.io/#A.1>
# <http://people.mozilla.org/~jorendorff/es6-draft.html#sec-ecmascript-language-lexical-grammar>
# Don’t worry, you don’t need to know CoffeeScript. It is only used for its
# readable regex syntax. Everything else is done in JavaScript in index.js.
module.exports = ///
( # <string>
([ ' " ])
(?:
(?! \2 | \\ ).
|
\\(?: \r\n | [\s\S] )
)*
(\2)?
|
`
(?:
[^ ` \\ $ ]
|
\\[\s\S]
|
\$(?!\{)
|
\$\{
(?:
[^{}]
|
\{ [^}]* \}?
)*
\}?
)*
(`)?
)
|
( # <comment>
//.*
)
|
( # <comment>
/\*
(?:
[^*]
|
\*(?!/)
)*
( \*/ )?
)
|
( # <regex>
/(?!\*)
(?:
\[
(?:
(?![ \] \\ ]).
|
\\.
)*
\]
|
(?![ / \] \\ ]).
|
\\.
)+
/
(?:
(?!
\s*
(?:
\b
|
[ \u0080-\uFFFF $ \\ ' " ~ ( { ]
|
[ + \- ! ](?!=)
|
\.?\d
)
)
|
[ g m i y u ]{1,5} \b
(?!
[ \u0080-\uFFFF $ \\ ]
|
\s*
(?:
[ + \- * % & | ^ < > ! = ? ( { ]
|
/(?! [ / * ] )
)
)
)
)
|
( # <number>
(?:
0[xX][ \d a-f A-F ]+
|
0[oO][0-7]+
|
0[bB][01]+
|
(?:
\d*\.\d+
|
\d+\.? # Support one trailing dot for integers only.
)
(?: [eE][+-]?\d+ )?
)
)
|
( # <name>
# See <http://mathiasbynens.be/notes/javascript-identifiers>.
(?!\d)
(?:
(?!\s)[ $ \w \u0080-\uFFFF ]
|
\\u[ \d a-f A-F ]{4}
|
\\u\{[ \d a-f A-F ]{1,6}\}
)+
)
|
( # <punctuator>
-- | \+\+
|
&& | \|\|
|
=>
|
\.{3}
|
(?:
[ + \- * / % & | ^ ]
|
<{1,2} | >{1,3}
|
!=? | ={1,2}
)=?
|
[ ? : ~ ]
|
[ ; , . [ \] ( ) { } ]
)
|
( # <whitespace>
\s+
)
|
( # <invalid>
^$ # Empty.
|
[\s\S] # Catch-all rule for anything not matched by the above.
)
///g
| {
"pile_set_name": "Github"
} |
-----BEGIN CERTIFICATE-----
MIICwDCCAaigAwIBAgIJAMxop1TANOUXMA0GCSqGSIb3DQEBCwUAMBUxEzARBgNV
BAoMCmJvZ28tY2hhaW4wHhcNMTcwODI4MTQzNTIzWhcNMjcwODI2MTQzNTIzWjAV
MRMwEQYDVQQKDApib2dvLWNoYWluMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
CgKCAQEA5Rmy9d0TK2CYkabdcLWP2t18G/VTI3tqQ248uLPHsu0Qwd1jnpvy+iSM
6wvj7HF28vRp7bH45LZYr0n3xBKO2Unq0FjGYrpd69f/RKSa7YiZOwRNcnvjH2sK
/V8kpQVVWtFbifNwiyTKGTsI2B3itDuAofIobSV1q18QruEDOkZDqK98zxmlFctM
IaZGCg+qVHSwoMOVSPvEffe4AApB+Y5BbBuPlx1TOLpD+chnfQxAC/SstrKgSP8W
TipezQZ707p0XO3TvEMEZg474ZBmCdxDHgOAkxJWAHh+7/G7hyzK9OiknUH6m6Rj
BEgKQM8Ts3UwjjVukXjpX04Y5e4NkwIDAQABoxMwETAPBgNVHREECDAGggR0ZXN0
MA0GCSqGSIb3DQEBCwUAA4IBAQDdlYbBnfLp87GTzA3EoflWccTKmqQXzz+cV7Ys
Y8lyFkeNkI5bjdlA2OEvpC8qwP7t5ynWlGCaRLhu8DL5xYg/phdebnoDziExq4sx
8KDs94F8o5OrTLvhF5/54XXBmlQ5CBCrij/Yh5rEZYHp0WKeGeusJoAdNYsAsWBP
ic9Kz5f1KGwVZRVwerFRHxn0d3TRApdr8Z08BEeFgf85GJ/NzgXgeVUPeY1zP29N
d2YMjQkoftv8rDyPajzkRrA8m95kqBnBq7zS1RIJsOgyVj7euxBR3KImGveGv77E
60Md73BJ+G5PmEvMvA8ISpGwIKECUoPrF78nEnNzDebkMSL4
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIDYDCCAkigAwIBAgIJAMzVAHp+DLMaMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV
BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX
aWRnaXRzIFB0eSBMdGQwHhcNMTcwODI4MTQzNTIzWhcNMjcwODI2MTQzNTIzWjBF
MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50
ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
CgKCAQEA2I/TeZbcYE4dtFHumC2BnklgZ58eoYO+GXQITbHauYmw3Zk2q6p73mfM
P8zCA7ftEOQRA9rTKC4UsWY+Q8S4HML6Z/CtYgR8ZTx+tkO2NVmf9uP0xS6rxZAJ
gcZFemnWM5nMk+4AXSR23nfVbHJ/mW1MpDOFP8wvSr1BrIrJojWVy7/9k6SuRWYK
rK7FpwaSCk+X5U9R2i/RkVvtg58NLuGqpkxSMszpb0NbaG/sw0/s1a9sPEliBPOE
HaiGOKB5xSIDXcIfoFx+3G4TwX97bT4wUP4OfwbyHZTXCWfXFGVtHq+038OrhEkc
p4WjJm54oi5FOuNy9egspDKohbfw4wIDAQABo1MwUTAdBgNVHQ4EFgQUK+qMsG3P
TWsWrsjmRtpSICfCFhEwHwYDVR0jBBgwFoAUK+qMsG3PTWsWrsjmRtpSICfCFhEw
DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEALVCboVd/Kf3ofoqS
03S9aa2hXscDcvqPxhEhRosiunM3/qnXn3+SHJGvOKciXV+A/YCvwsvX4AeMxxH3
tSg55g+2hylFfk9hyTmNRKUMxQNZSbUpGtOz6vM9o8HgBP3KzS0mvHxRI5VNN8n3
wDcmjx/GCBbUOgRPzAIKIqpzKX7PNHF2imgf2HeDhWGIowNWE/GbuRg+tCxQY5ZZ
vN9XxIttvrzTHh57sHntwUvAkc+EI5TJiD/VbUiBEfHt0vojU2lzEKuGmzKpuT/g
vSu8+pGvI3lD1bNg26zniPOUxzGW93ZKS87af+bcicVvpVxsZ+OEgxyJhGdqZDjY
IdjRtA==
-----END CERTIFICATE-----
| {
"pile_set_name": "Github"
} |
#
# Mathematical function library.
# Author: James P. Biagioni ([email protected])
# Company: University of Illinois at Chicago
# Created: 12/16/10
#
import math
# Normal distribution PDF. Formula obtained from: http://en.wikipedia.org/wiki/Normal_Distribution
def normal_distribution_pdf(x, mu, sigma, numerator=1.0):
return (numerator / math.sqrt(2.0 * math.pi * math.pow(sigma, 2.0))) * math.exp(-1.0 * (math.pow((x - mu), 2.0) / (2.0 * math.pow(sigma, 2.0))))
# Normal distribution CDF. Formula obtained from: http://en.wikipedia.org/wiki/Normal_Distribution
def normal_distribution_cdf(x, mu, sigma):
return (0.5 * (1.0 + erf( (x - mu) / math.sqrt(2.0 * math.pow(sigma, 2.0)))))
# Complementary normal distribution CDF. Formula obtained from: http://en.wikipedia.org/wiki/Cumulative_distribution_function
def complementary_normal_distribution_cdf(x, mu, sigma):
return (1.0 - normal_distribution_cdf(x, mu, sigma))
# Spring force. Formula obtained from: http://en.wikipedia.org/wiki/Hooke%27s_law
def spring_force(x, k):
return ((-1.0 * k) * x)
# Gaussian error function. Algorithm obtained from: http://www.johndcook.com/python_erf.html
def erf(x):
# constants
a1 = 0.254829592
a2 = -0.284496736
a3 = 1.421413741
a4 = -1.453152027
a5 = 1.061405429
p = 0.3275911
# Save the sign of x
sign = 1
if x < 0:
sign = -1
x = abs(x)
# A&S formula 7.1.26
t = 1.0/(1.0 + p*x)
y = 1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*math.exp(-x*x)
return sign*y
| {
"pile_set_name": "Github"
} |
function [ss,gg,tt,ff,zo]=specsub(si,fsz,pp)
%SPECSUB performs speech enhancement using spectral subtraction [SS,ZO]=(S,FSZ,P)
%
% Usage: (1) y=specsub(x,fs); % enhance the speech using default parameters
%
% Inputs:
% si input speech signal
% fsz sample frequency in Hz
% Alternatively, the input state from a previous call (see below)
% pp algorithm parameters [optional]
%
% Outputs:
% ss output enhanced speech
% gg(t,f,i) selected time-frequency values (see pp.tf below)
% tt centre of frames (in seconds)
% ff centre of frequency bins (in Hz)
% zo output state (or the 2nd argument if gg,tt,ff are omitted)
%
% The algorithm operation is controlled by a small number of parameters:
%
% pp.of % overlap factor = (fft length)/(frame increment) [2]
% pp.ti % desired frame increment [0.016 seconds]
% pp.ri % set to 1 to round ti to the nearest power of 2 samples [0]
% pp.g % subtraction domain: 1=magnitude, 2=power [1]
% pp.e % gain exponent [1]
% pp.am % max oversubtraction factor [3]
% pp.b % max noise attenutaion in power domain [0.01]
% pp.al % SNR for oversubtraction=am (set this to Inf for fixed a) [-5 dB]
% pp.ah % SNR for oversubtraction=1 [20 dB]
% pp.ne % noise estimation: 0=min statistics, 1=MMSE [0]
% pp.bt % threshold for binary gain or -1 for continuous gain [-1]
% pp.mx % input mixture gain [0]
% pp.gh % maximum gain for noise floor [1]
% pp.rf % round output signal to an exact number of frames [0]
% pp.tf % selects time-frequency planes to output in the gg() variable ['g']
% 'i' = input power spectrum
% 'I' = input complex spectrum
% 'n' = noise power spectrum
% 'g' = gain
% 'o' = output power spectrum
% 'O' = output complex spectrum
%
% Following [1], the magnitude-domain gain in each time-frequency bin is given by
% gain=mx+(1-mx)*max((1-(a*N/X)^(g/2))^(e/g),min(gh,(b*N/X)^(e/2)))
% where N and X are the powers of the noise and noisy speech respectively.
% The oversubtraction factor varies linearly between a=am for a frame SNR of al down to
% a=1 for a frame SNR of ah. To obtain a fixed value of a for all values of SNR, set al=Inf.
% Common exponent combinations are:
% g=1 e=1 Magnitude Domain spectral subtraction
% g=2 e=1 Power Domain spectral subtraction
% g=2 e=2 Wiener filtering
% Many authors use the parameters alpha=a^(g/2), beta=b^(g/2) and gamma2=e/g instead of a, b and e
% but this increases interdependence amongst the parameters.
% If bt>=0 then the max(...) expression above is thresholded to become 0 or 1.
%
% In addition it is possible to specify parameters for the noise estimation algorithm
% which implements reference [2] or [3] according to the setting of pp.ne
%
% Minimum statistics noise estimate [2]: pp.ne=0
% pp.taca % (11): smoothing time constant for alpha_c [0.0449 seconds]
% pp.tamax % (3): max smoothing time constant [0.392 seconds]
% pp.taminh % (3): min smoothing time constant (upper limit) [0.0133 seconds]
% pp.tpfall % (12): time constant for P to fall [0.064 seconds]
% pp.tbmax % (20): max smoothing time constant [0.0717 seconds]
% pp.qeqmin % (23): minimum value of Qeq [2]
% pp.qeqmax % max value of Qeq per frame [14]
% pp.av % (23)+13 lines: fudge factor for bc calculation [2.12]
% pp.td % time to take minimum over [1.536 seconds]
% pp.nu % number of subwindows to use [3]
% pp.qith % Q-inverse thresholds to select maximum noise slope [0.03 0.05 0.06 Inf ]
% pp.nsmdb % corresponding noise slope thresholds in dB/second [47 31.4 15.7 4.1]
%
% MMSE noise estimate [3]: pp.ne=1
% pp.tax % smoothing time constant for noise power estimate [0.0717 seconds](8)
% pp.tap % smoothing time constant for smoothed speech prob [0.152 seconds](23)
% pp.psthr % threshold for smoothed speech probability [0.99] (24)
% pp.pnsaf % noise probability safety value [0.01] (24)
% pp.pspri % prior speech probability [0.5] (18)
% pp.asnr % active SNR in dB [15] (18)
% pp.psini % initial speech probability [0.5] (23)
% pp.tavini % assumed speech absent time at start [0.064 seconds]
%
% If convenient, you can call specsub in chunks of arbitrary size. Thus the following are equivalent:
%
% (a) y=specsub(s,fs);
%
% (b) [y1,z]=specsub(s(1:1000),fs);
% [y2,z]=specsub(s(1001:2000),z);
% y3=specsub(s(2001:end),z);
% y=[y1; y2; y3];
%
% If the number of output arguments is either 2 or 5, the last partial frame of samples will
% be retained for overlap adding with the output from the next call to specsub().
%
% See also ssubmmse() for an alternative gain function
%
% Refs:
% [1] M. Berouti, R. Schwartz and J. Makhoul
% Enhancement of speech corrupted by acoustic noise
% Proc IEEE ICASSP, 1979, 4, 208-211
% [2] Rainer Martin.
% Noise power spectral density estimation based on optimal smoothing and minimum statistics.
% IEEE Trans. Speech and Audio Processing, 9(5):504-512, July 2001.
% [3] Gerkmann, T. & Hendriks, R. C.
% Unbiased MMSE-Based Noise Power Estimation With Low Complexity and Low Tracking Delay
% IEEE Trans Audio, Speech, Language Processing, 2012, 20, 1383-1393
% Copyright (C) Mike Brookes 2004
% Version: $Id: specsub.m 1720 2012-03-31 17:17:31Z dmb $
%
% VOICEBOX is a MATLAB toolbox for speech processing.
% Home page: http://www.ee.ic.ac.uk/hp/staff/dmb/voicebox/voicebox.html
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% This program is free software; you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation; either version 2 of the License, or
% (at your option) any later version.
%
% This program is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
% GNU General Public License for more details.
%
% You can obtain a copy of the GNU General Public License from
% http://www.gnu.org/copyleft/gpl.html or by writing to
% Free Software Foundation, Inc.,675 Mass Ave, Cambridge, MA 02139, USA.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
if numel(si)>length(si)
error('Input speech signal must be a vector not a matrix');
end
if isstruct(fsz)
fs=fsz.fs;
qq=fsz.qq;
qp=fsz.qp;
ze=fsz.ze;
s=zeros(length(fsz.si)+length(si(:)),1); % allocate space for speech
s(1:length(fsz.si))=fsz.si;
s(length(fsz.si)+1:end)=si(:);
else
fs=fsz; % sample frequency
s=si(:);
% default algorithm constants
qq.of=2; % overlap factor = (fft length)/(frame increment)
qq.ti=16e-3; % desired frame increment (16 ms)
qq.ri=0; % round ni to the nearest power of 2
qq.g=1; % subtraction domain: 1=magnitude, 2=power
qq.e=1; % gain exponent
qq.am=3; % max oversubtraction factor
qq.b=0.01; % noise floor
qq.al=-5; % SNR for maximum a (set to Inf for fixed a)
qq.ah=20; % SNR for minimum a
qq.bt=-1; % suppress binary masking
qq.ne=0; % noise estimation: 0=min statistics, 1=MMSE [0]
qq.mx=0; % no input mixing
qq.gh=1; % maximum gain
qq.tf='g'; % output the gain time-frequency plane by default
qq.rf=0;
if nargin>=3 && ~isempty(pp)
qp=pp; % save for estnoisem call
qqn=fieldnames(qq);
for i=1:length(qqn)
if isfield(pp,qqn{i})
qq.(qqn{i})=pp.(qqn{i});
end
end
else
qp=struct; % make an empty structure
end
end
% derived algorithm constants
if qq.ri
ni=pow2(nextpow2(qq.ti*fs*sqrt(0.5)));
else
ni=round(qq.ti*fs); % frame increment in samples
end
tinc=ni/fs; % true frame increment time
tf=qq.tf;
rf=qq.rf || nargout==2 || nargout==5; % round down to an exact number of frames
ne=qq.ne; % noise estimation: 0=min statistics, 1=MMSE [0]
% calculate power spectrum in frames
no=round(qq.of); % integer overlap factor
nf=ni*no; % fft length
w=sqrt(hamming(nf+1))'; w(end)=[]; % for now always use sqrt hamming window
w=w/sqrt(sum(w(1:ni:nf).^2)); % normalize to give overall gain of 1
if rf>0
rfm=''; % truncated input to an exact number of frames
else
rfm='r';
end
[y,tt]=enframe(s,w,ni,rfm);
tt=tt/fs; % frame times
yf=rfft(y,nf,2);
yp=yf.*conj(yf); % power spectrum of input speech
[nr,nf2]=size(yp); % number of frames
ff=(0:nf2-1)*fs/nf;
if isstruct(fsz)
if ne>0
[dp,ze]=estnoiseg(yp,ze); % estimate the noise using MMSE
else
[dp,ze]=estnoisem(yp,ze); % estimate the noise using minimum statistics
end
ssv=fsz.ssv;
else
if ne>0
[dp,ze]=estnoiseg(yp,tinc,qp); % estimate the noise using MMSE
else
[dp,ze]=estnoisem(yp,tinc,qp); % estimate the noise using minimum statistics
end
ssv=zeros(ni*(no-1),1); % dummy saved overlap
end
if ~nr % no data frames
ss=[];
gg=[];
else
mz=yp==0; % mask for zero power time-frequency bins (unlikely)
if qq.al<Inf
ypf=sum(yp,2);
dpf=sum(dp,2);
mzf=dpf==0; % zero noise frames = very high SNR
af=1+(qq.am-1)*(min(max(10*log10(ypf./(dpf+mzf)),qq.al),qq.ah)-qq.ah)/(qq.al-qq.ah);
af(mzf)=1; % fix the zero noise frames
else
af=repmat(qq.am,nr,1);
end
switch qq.g
case 1 % magnitude domain subtraction
v=sqrt(dp./(yp+mz));
af=sqrt(af);
bf=sqrt(qq.b);
case 2 % power domain subtraction
v=dp./(yp+mz);
bf=qq.b;
otherwise % arbitrary subtraction domain
v=(dp./(yp+mz)).^(0.5*qq.g);
af=af.^(0.5*qq.g);
bf=qq.b^(0.5*qq.g);
end
af =repmat(af,1,nf2); % replicate frame oversubtraction factors for each frequency
mf=v>=(af+bf).^(-1); % mask for noise floor limiting
g=zeros(size(v)); % reserve space for gain matrix
eg=qq.e/qq.g; % gain exponent relative to subtraction domain
gh=qq.gh;
switch eg
case 1 % Normal case
g(mf)=min(bf*v(mf),gh); % never give a gain > 1
g(~mf)=1-af(~mf).*v(~mf);
case 0.5
g(mf)=min(sqrt(bf*v(mf)),gh);
g(~mf)=sqrt(1-af(~mf).*v(~mf));
otherwise
g(mf)=min((bf*v(mf)).^eg,gh);
g(~mf)=(1-af(~mf).*v(~mf)).^eg;
end
if qq.bt>=0
g=g>qq.bt;
end
g=qq.mx+(1-qq.mx)*g; % mix in some of the input
se=(irfft((yf.*g).',nf).').*repmat(w,nr,1); % inverse dft and apply output window
ss=zeros(ni*(nr+no-1),no); % space for overlapped output speech
ss(1:ni*(no-1),end)=ssv;
for i=1:no
nm=nf*(1+floor((nr-i)/no)); % number of samples in this set
ss(1+(i-1)*ni:nm+(i-1)*ni,i)=reshape(se(i:no:nr,:)',nm,1);
end
ss=sum(ss,2);
if nargout>2 && ~isempty(tf)
gg=zeros(nr,nf2,length(tf)); % make space
for i=1:length(tf)
switch tf(i)
case 'i' % 'i' = input power spectrum
gg(:,:,i)=yp;
case 'I' % 'i' = input power spectrum
gg(:,:,i)=yf;
case 'n' % 'n' = noise power spectrum
gg(:,:,i)=dp;
case 'g' % 'g' = gain
gg(:,:,i)=g;
case 'o' % 'o' = output power spectrum
gg(:,:,i)=yp.*g.^2;
case 'O' % 'o' = output power spectrum
gg(:,:,i)=yf.*g;
end
end
end
end
if nargout==2 || nargout==5
if nr
zo.ssv=ss(end-ni*(no-1)+1:end); % save the output tail for next time
ss(end-ni*(no-1)+1:end)=[];
else
zo.ssv=ssv; %
end
zo.si=s(length(ss)+1:end); % save the tail end of the input speech signal
zo.fs=fs; % save sample frequency
zo.qq=qq; % save local parameters
zo.qp=qp; % save estnoisem parameters
zo.ze=ze; % save state of noise estimation
if nargout==2
gg=zo; % 2nd of two arguments is zo
end
elseif rf==0
ss=ss(1:length(s)); % trim to the correct length if not an exact number of frames
end
if ~nargout && nr>0
ffax=ff/1000; ax=zeros(4,1);
ax(1)=subplot(223);
imagesc(tt,ffax,20*log10(g)');
colorbar;
axis('xy');
if qq.al==Inf
title(sprintf('Filter Gain (dB): a=%.2g, b=%.3g',qq.am,qq.b));
else
title(sprintf('Filter Gain (dB): a=%.2g (%.0f to %.0fdB), b=%.3g',qq.am,qq.al,qq.ah,qq.b));
end
xlabel('Time (s)');
ylabel('Frequency (kHz)');
ax(2)=subplot(222);
imagesc(tt,ffax,10*log10(yp)');
colorbar;
axis('xy');
title('Noisy Speech (dB)');
xlabel('Time (s)');
ylabel('Frequency (kHz)');
ax(3)=subplot(224);
imagesc(tt,ffax,10*log10(yp.*g.^2)');
colorbar;
axis('xy');
title(sprintf('Enhanced Speech (dB): g=%.2g, e=%.3g',qq.g,qq.e));
xlabel('Time (s)');
ylabel('Frequency (kHz)');
ax(4)=subplot(221);
imagesc(tt,ffax,10*log10(dp)');
colorbar;
axis('xy');
title('Noise Estimate (dB)');
xlabel('Time (s)');
ylabel('Frequency (kHz)');
linkaxes(ax);
end | {
"pile_set_name": "Github"
} |
import * as socket from "socket.io"
import * as http from "http";
import * as https from "https";
import * as fs from "fs";
console.log('socket.io-server');
const server = http.createServer();
//const server = https.createServer({
// key: fs.readFileSync("cert/server-key.pem").toString(),
// cert: fs.readFileSync("cert/server-crt.pem").toString(),
// ca: fs.readFileSync("cert/ca-crt.pem").toString(),
// requestCert: true,
// rejectUnauthorized: true
//});
const io = socket(server, {
pingInterval: 10000,
pingTimeout: 5000,
transports: ["websocket"],
//path: "/path"
});
io.use((socket, next) => {
if (socket.handshake.query.token === "io") {
next();
} else {
next(new Error("Authentication error"));
}
})
io.on("connection", socket => {
console.log(`connect: ${socket.id}`);
//console.log(`cert: ${socket.client.request.client.getPeerCertificate().toString()}`)
socket.on("disconnect", reason => {
console.log(`disconnect: ${reason}`);
});
socket.on("hi", name => {
socket.emit("hi", `hi ${name}, You are connected to the server`);
});
socket.on("ack", (name, fn) => {
fn({
result: true,
message: `ack(${name})`
});
});
socket.on("bytes", (name, data) => {
const bytes = Buffer.from(data.bytes.toString() + " - server - " + name, "utf-8");
socket.emit("bytes", {
clientSource: data.source,
source: "server",
bytes
});
});
socket.on("sever disconnect", close => {
socket.disconnect(close)
});
socket.on("binary ack", (name, data, fn) => {
const bytes = Buffer.from(data.bytes.toString() + " - server - " + name, "utf-8");
fn({
clientSource: data.source,
source: "server",
bytes
});
});
socket.on("change", (val1, val2) => {
socket.emit("change", val2, val1);
});
socket.on("client message callback", (msg) => {
socket.emit("client message callback", msg + " - server", clientMsg => {
console.log(clientMsg);
socket.emit("server message callback called");
});
});
socket.on("client binary callback", (msg) => {
const binaryMessage = Buffer.from(msg.toString() + " - server", "utf-8");
socket.emit("client binary callback", binaryMessage, clientMsg => {
console.log(clientMsg);
socket.emit("server binary callback called");
});
});
});
const nsp = io.of("/nsp");
nsp.on("connection", socket => {
console.log(`connect: ${socket.id}`);
socket.on("disconnect", reason => {
console.log(`disconnect: ${reason}`);
if (reason === 'io server disconnect') {
// the disconnection was initiated by the server, you need to reconnect manually
}
});
socket.on("hi", name => {
socket.emit("hi", `hi ${name}, You are connected to the server - nsp`);
});
socket.on("ack", (name, fn) => {
fn({
result: true,
message: `ack(${name})`
});
});
socket.on("bytes", (name, data) => {
const bytes = Buffer.from(data.bytes.toString() + " - server - " + name, "utf-8");
socket.emit("bytes", {
clientSource: data.source,
source: "server",
bytes
});
});
socket.on("sever disconnect", close => {
socket.disconnect(close)
});
socket.on("binary ack", (name, data, fn) => {
const bytes = Buffer.from(data.bytes.toString() + " - server - " + name, "utf-8");
fn({
clientSource: data.source,
source: "server",
bytes
});
});
socket.on("change", (val1, val2) => {
socket.emit("change", val2, val1);
});
socket.on("change", (val1, val2) => {
socket.emit("change", val2, val1);
});
socket.on("client message callback", (msg) => {
socket.emit("client message callback", msg + " - server", clientMsg => {
console.log(clientMsg);
socket.emit("server message callback called");
});
});
socket.on("client binary callback", (msg) => {
const binaryMessage = Buffer.from(msg.toString() + " - server", "utf-8");
socket.emit("client binary callback", binaryMessage, clientMsg => {
console.log(clientMsg);
socket.emit("server binary callback called");
});
});
});
server.listen(11000, () => {
console.log(`Listening HTTPS on 11000`);
}); | {
"pile_set_name": "Github"
} |
{
"name": "flot-spline",
"description": "Flot plugin that provides spline interpolation for line graphs",
"version": "0.0.1",
"homepage": "https://github.com/johnpozy/flot-spline",
"repository": "https://github.com/johnpozy/flot-spline",
"bugs": {
"url": "https://github.com/johnpozy/flot-spline/issues"
},
"main": "js/jquery.flot.spline.js",
"license" : "SEE LICENSE IN LICENSE",
"devDependencies": {}
}
| {
"pile_set_name": "Github"
} |
<!-- start footer part -->
<!--BEGIN GENERATE_TREEVIEW-->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
$navpath
<li class="footer">$generatedby
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="$relpath$doxygen.png" alt="doxygen"/></a> $doxygenversion </li>
</ul>
</div>
<!--END GENERATE_TREEVIEW-->
<!--BEGIN !GENERATE_TREEVIEW-->
<hr class="footer"/><address class="footer"><small>
$generatedby  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="$relpath$doxygen.png" alt="doxygen"/>
</a> $doxygenversion
</small></address>
<!--END !GENERATE_TREEVIEW-->
<!-- Piwik -->
<script type="text/javascript">
var pkBaseURL = (("https:" == document.location.protocol) ? "https://stats.sylphide-consulting.com/piwik/" : "http://stats.sylphide-consulting.com/piwik/");
document.write(unescape("%3Cscript src='" + pkBaseURL + "piwik.js' type='text/javascript'%3E%3C/script%3E"));
</script><script type="text/javascript">
try {
var piwikTracker = Piwik.getTracker(pkBaseURL + "piwik.php", 20);
piwikTracker.trackPageView();
piwikTracker.enableLinkTracking();
} catch( err ) {}
</script><noscript><p><img src="http://stats.sylphide-consulting.com/piwik/piwik.php?idsite=20" style="border:0" alt="" /></p></noscript>
<!-- End Piwik Tracking Code -->
</body>
</html>
| {
"pile_set_name": "Github"
} |
Xft.dpi: 96
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="iso-8859-15"?>
<sigfile>
<binary>
<id>91D3629D74B9EF90</id>
<version>
<size>0x0000000000000052</size>
<userdefinedname>main.obj</userdefinedname>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000000">B6AAF36DF8CB730C</xref>
<xref offset="0x000000000000000C">C740C6DE5E974350</xref>
</xrefs>
</version>
</binary>
<segment>
<id>B6AAF36DF8CB730C</id>
<version>
<size>0x000000000000000C</size>
<userdefinedname>.data</userdefinedname>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000000">FFA7A3860F945C51</xref>
</xrefs>
<attribute key="align">5</attribute>
<attribute key="bitness">1</attribute>
<attribute key="color">4294967295</attribute>
<attribute key="comb">2</attribute>
<attribute key="end_ea">12</attribute>
<attribute key="flags">16</attribute>
<attribute key="org_base">0</attribute>
<attribute key="perm">6</attribute>
<attribute key="sel">3</attribute>
<attribute key="start_ea">0</attribute>
<attribute key="type">3</attribute>
</version>
</segment>
<segment>
<id>C740C6DE5E974350</id>
<version>
<size>0x0000000000000046</size>
<userdefinedname>.text$mn</userdefinedname>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000000">9C739B28F5851293</xref>
</xrefs>
<attribute key="align">3</attribute>
<attribute key="bitness">1</attribute>
<attribute key="color">4294967295</attribute>
<attribute key="comb">2</attribute>
<attribute key="end_ea">82</attribute>
<attribute key="flags">16</attribute>
<attribute key="org_base">0</attribute>
<attribute key="perm">5</attribute>
<attribute key="sel">4</attribute>
<attribute key="start_ea">12</attribute>
<attribute key="type">2</attribute>
</version>
</segment>
<segment_chunk>
<id>9C739B28F5851293</id>
<version>
<size>0x0000000000000046</size>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000000">85E954B916FE44A9</xref>
<xref offset="0x0000000000000007">BC3F4BD752C568DC</xref>
<xref offset="0x0000000000000010">FF6DE5E9532D9AC4</xref>
<xref offset="0x0000000000000024">834831F9EFA6EAE7</xref>
<xref offset="0x0000000000000030">E342643E854F05D7</xref>
</xrefs>
<blob offset="0000000000000000">558BEC33C05DC3CCCCCCCCCCCCCCCCCC558BEC51C745FC00000000E8E0FFFFFF8BE55DC3CCCCCCCCCCCCCCCCCCCCCCCC558BEC837D08057E09B801000000EB04EB0233C05DC3</blob>
</version>
</segment_chunk>
<segment_chunk>
<id>FFA7A3860F945C51</id>
<version>
<size>0x000000000000000C</size>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000000">34857FCE5B626E6C</xref>
<xref offset="0x0000000000000009">C5409031C30FB279</xref>
</xrefs>
<blob offset="0000000000000000">6120737472696E6700</blob>
</version>
</segment_chunk>
<function>
<id>85E954B916FE44A9</id>
<version>
<size>0x0000000000000007</size>
<proto>int __cdecl _main(int argc, const char **argv, const char **envp)</proto>
<flags>0x4410</flags>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">-3D029CDC</signature>
</signatures>
<xrefs>
<xref offset="0x0000000000000000" size="0x00000007">F8A91A21E1671FE0</xref>
<xref offset="0x00000000FFFFFFFF">D0801388A2D93BEB</xref>
</xrefs>
</version>
</function>
<function>
<id>E342643E854F05D7</id>
<version>
<size>0x0000000000000016</size>
<flags>0x4010</flags>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">02C99E73</signature>
</signatures>
<xrefs>
<xref offset="0x0000000000000000" size="0x00000009">EF87DDDED00A6C05</xref>
<xref offset="0x0000000000000009" size="0x00000007">72E9EDF026A8712A</xref>
<xref offset="0x0000000000000010" size="0x00000002">74E225877C7E8A94</xref>
<xref offset="0x0000000000000012" size="0x00000002">6993F8DEF74AE0AA</xref>
<xref offset="0x0000000000000014" size="0x00000002">FCB710DE5B375CB8</xref>
<xref offset="0x00000000FFFFFFFF">AC80B4C11CADF432</xref>
</xrefs>
</version>
</function>
<function>
<id>FF6DE5E9532D9AC4</id>
<version>
<size>0x0000000000000014</size>
<proto>int __thiscall _functionCaller(int argc)</proto>
<flags>0x4410</flags>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">06F529F0</signature>
</signatures>
<xrefs>
<xref offset="0x0000000000000000" size="0x00000014">1D82B5807E1DCB77</xref>
<xref offset="0x00000000FFFFFFFF">E0204CA3888AFF32</xref>
</xrefs>
</version>
</function>
<stackframe>
<id>AC80B4C11CADF432</id>
<version>
<size>0x000000000000000C</size>
<userdefinedname>$ F3C</userdefinedname>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000008">37878B07D927278C</xref>
</xrefs>
<attribute key="stack_args">0x00000000</attribute>
<attribute key="stack_lvars">0x00000000</attribute>
<attribute key="stack_regvars">0x00000004</attribute>
</version>
</stackframe>
<stackframe>
<id>D0801388A2D93BEB</id>
<version>
<size>0x0000000000000014</size>
<userdefinedname>$ FC</userdefinedname>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000008">3EDDC8E0871E2D13</xref>
<xref offset="0x000000000000000C">3E842D1749F4006C</xref>
<xref offset="0x0000000000000010">28CA12EEB6554061</xref>
</xrefs>
<attribute key="stack_args">0x00000000</attribute>
<attribute key="stack_lvars">0x00000000</attribute>
<attribute key="stack_regvars">0x00000004</attribute>
</version>
</stackframe>
<stackframe>
<id>E0204CA3888AFF32</id>
<version>
<size>0x000000000000000C</size>
<userdefinedname>$ F1C</userdefinedname>
<str_type>0</str_type>
<xrefs>
<xref offset="0x0000000000000000">BD3FBD1C3CF27FCF</xref>
</xrefs>
<attribute key="stack_args">0x00000000</attribute>
<attribute key="stack_lvars">0x00000004</attribute>
<attribute key="stack_regvars">0x00000004</attribute>
</version>
</stackframe>
<stackframe_member>
<id>28CA12EEB6554061</id>
<version>
<size>0x0000000000000004</size>
<userdefinedname>envp</userdefinedname>
<proto>const char **</proto>
<flags>0x25500400</flags>
<str_type>0</str_type>
</version>
</stackframe_member>
<stackframe_member>
<id>37878B07D927278C</id>
<version>
<size>0x0000000000000004</size>
<userdefinedname>arg_0</userdefinedname>
<flags>0x20000400</flags>
<str_type>0</str_type>
</version>
</stackframe_member>
<stackframe_member>
<id>3E842D1749F4006C</id>
<version>
<size>0x0000000000000004</size>
<userdefinedname>argv</userdefinedname>
<proto>const char **</proto>
<flags>0x25500400</flags>
<str_type>0</str_type>
</version>
</stackframe_member>
<stackframe_member>
<id>3EDDC8E0871E2D13</id>
<version>
<size>0x0000000000000004</size>
<userdefinedname>argc</userdefinedname>
<proto>int</proto>
<flags>0x20000400</flags>
<str_type>0</str_type>
</version>
</stackframe_member>
<stackframe_member>
<id>BD3FBD1C3CF27FCF</id>
<version>
<size>0x0000000000000004</size>
<userdefinedname>var_4</userdefinedname>
<flags>0x20000400</flags>
<str_type>0</str_type>
</version>
</stackframe_member>
<basic_block>
<id>1D82B5807E1DCB77</id>
<version>
<size>0x0000000000000014</size>
<userdefinedname flags="0x00000052">_functionCaller</userdefinedname>
<flags>0x2</flags>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">06F529F0</signature>
</signatures>
<offsets>
<comments offset="0000000000000000" type="nonrepeatable_comment">argv</comments>
<comments offset="0000000000000003" type="nonrepeatable_comment">argc</comments>
<valueview offset="0000000000000004" operand="0000000000000001">offset-off32</valueview>
<valueview offset="000000000000000B" operand="0000000000000000">offset-off32</valueview>
</offsets>
<xrefs>
<xref offset="0x0000000000000004">BD3FBD1C3CF27FCF</xref>
<xref offset="0x000000000000000B">85E954B916FE44A9</xref>
</xrefs>
</version>
</basic_block>
<basic_block>
<id>6993F8DEF74AE0AA</id>
<version>
<size>0x0000000000000002</size>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">28FB0CE8</signature>
</signatures>
<xrefs>
<xref offset="0x0000000000000050">FCB710DE5B375CB8</xref>
</xrefs>
</version>
</basic_block>
<basic_block>
<id>72E9EDF026A8712A</id>
<version>
<size>0x0000000000000007</size>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">7BB512F9</signature>
</signatures>
<xrefs>
<xref offset="0x0000000000000050">FCB710DE5B375CB8</xref>
</xrefs>
</version>
</basic_block>
<basic_block>
<id>74E225877C7E8A94</id>
<version>
<size>0x0000000000000002</size>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">-13BF35ED</signature>
</signatures>
<xrefs>
<xref offset="0x0000000000000050">FCB710DE5B375CB8</xref>
</xrefs>
</version>
</basic_block>
<basic_block>
<id>EF87DDDED00A6C05</id>
<version>
<size>0x0000000000000009</size>
<userdefinedname flags="0x00000052">_functionIf</userdefinedname>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">-5DF6EC63</signature>
</signatures>
<xrefs>
<xref offset="0x0000000000000003">37878B07D927278C</xref>
<xref offset="0x0000000000000045">72E9EDF026A8712A</xref>
<xref offset="0x000000000000004E">6993F8DEF74AE0AA</xref>
</xrefs>
</version>
</basic_block>
<basic_block>
<id>F8A91A21E1671FE0</id>
<version>
<size>0x0000000000000007</size>
<userdefinedname flags="0x00000052">_main</userdefinedname>
<flags>0x2</flags>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">-3D029CDC</signature>
</signatures>
</version>
</basic_block>
<basic_block>
<id>FCB710DE5B375CB8</id>
<version>
<size>0x0000000000000002</size>
<flags>0x2</flags>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">0DCF6557</signature>
</signatures>
</version>
</basic_block>
<data>
<id>34857FCE5B626E6C</id>
<version>
<size>0x0000000000000009</size>
<userdefinedname flags="0x00000054">$SG1373</userdefinedname>
<flags>0x50007561</flags>
<str_type>0</str_type>
<signatures>
<signature algo="crc32" method="firstbyte">-5F6B8AC1</signature>
</signatures>
<offsets>
<comments offset="0000000000000000" type="anterior_comment">; File Name : H:\Software\C\VerySmallCNoImport\Ya\main.obj
; Format : COFF (X386MAGIC)
; includelib "LIBCMT"
; includelib "OLDNAMES"
</comments>
</offsets>
</version>
</data>
<data>
<id>834831F9EFA6EAE7</id>
<version>
<size>0x000000000000000C</size>
<flags>0x5CC</flags>
<str_type>0</str_type>
</version>
</data>
<data>
<id>BC3F4BD752C568DC</id>
<version>
<size>0x0000000000000009</size>
<flags>0x5CC</flags>
<str_type>0</str_type>
</version>
</data>
<data>
<id>C5409031C30FB279</id>
<version>
<size>0x0000000000000003</size>
<flags>0xB0000400</flags>
<str_type>0</str_type>
</version>
</data>
</sigfile> | {
"pile_set_name": "Github"
} |
/*
This file is part of cpp-ethereum.
cpp-ethereum is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
cpp-ethereum is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <condition_variable>
#include <libethash/ethash.h>
#include <libdevcore/Log.h>
#include <libdevcore/Worker.h>
#include "BlockHeader.h"
namespace dev
{
namespace eth
{
struct Result
{
h256 value;
h256 mixHash;
};
class EthashAux
{
public:
struct LightAllocation
{
explicit LightAllocation(int epoch);
~LightAllocation();
bytesConstRef data() const;
Result compute(h256 const& _headerHash, uint64_t _nonce) const;
ethash_light_t light;
uint64_t size;
};
using LightType = std::shared_ptr<LightAllocation>;
static int toEpoch(h256 const& _seedHash);
static LightType light(int epoch);
static Result eval(int epoch, h256 const& _headerHash, uint64_t _nonce) noexcept;
private:
EthashAux() = default;
static EthashAux& get();
Mutex x_lights;
std::unordered_map<int, LightType> m_lights;
int m_cached_epoch = 0;
h256 m_cached_seed; // Seed for epoch 0 is the null hash.
};
struct WorkPackage
{
WorkPackage() = default;
explicit WorkPackage(BlockHeader const& _bh)
: boundary(_bh.boundary()),
header(_bh.hashWithout()),
epoch(static_cast<int>(_bh.number()) / ETHASH_EPOCH_LENGTH),
height(static_cast<uint64_t>(_bh.number()))
{}
explicit operator bool() const { return header != h256(); }
h256 boundary;
h256 header; ///< When h256() means "pause until notified a new work package is available".
h256 job;
int epoch = -1;
uint64_t startNonce = 0;
uint64_t height = 0;
int exSizeBits = -1;
int job_len = 8;
};
struct Solution
{
uint64_t nonce;
h256 mixHash;
WorkPackage work;
bool stale;
};
}
}
| {
"pile_set_name": "Github"
} |
metadata_confDeps := ../EmuFramework/metadata/conf.mk
include $(IMAGINE_PATH)/make/shortcut/meta-builds/android.mk
| {
"pile_set_name": "Github"
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.iot.transform.v20180120;
import java.util.ArrayList;
import java.util.List;
import com.aliyuncs.iot.model.v20180120.QueryDetailSceneRuleLogResponse;
import com.aliyuncs.iot.model.v20180120.QueryDetailSceneRuleLogResponse.Data;
import com.aliyuncs.iot.model.v20180120.QueryDetailSceneRuleLogResponse.Data.LogInfo;
import com.aliyuncs.transform.UnmarshallerContext;
public class QueryDetailSceneRuleLogResponseUnmarshaller {
public static QueryDetailSceneRuleLogResponse unmarshall(QueryDetailSceneRuleLogResponse queryDetailSceneRuleLogResponse, UnmarshallerContext _ctx) {
queryDetailSceneRuleLogResponse.setRequestId(_ctx.stringValue("QueryDetailSceneRuleLogResponse.RequestId"));
queryDetailSceneRuleLogResponse.setSuccess(_ctx.booleanValue("QueryDetailSceneRuleLogResponse.Success"));
queryDetailSceneRuleLogResponse.setErrorMessage(_ctx.stringValue("QueryDetailSceneRuleLogResponse.ErrorMessage"));
queryDetailSceneRuleLogResponse.setCode(_ctx.stringValue("QueryDetailSceneRuleLogResponse.Code"));
Data data = new Data();
data.setTotal(_ctx.integerValue("QueryDetailSceneRuleLogResponse.Data.Total"));
data.setPageSize(_ctx.integerValue("QueryDetailSceneRuleLogResponse.Data.PageSize"));
data.setCurrentPage(_ctx.integerValue("QueryDetailSceneRuleLogResponse.Data.CurrentPage"));
List<LogInfo> logList = new ArrayList<LogInfo>();
for (int i = 0; i < _ctx.lengthValue("QueryDetailSceneRuleLogResponse.Data.LogList.Length"); i++) {
LogInfo logInfo = new LogInfo();
logInfo.setCode(_ctx.stringValue("QueryDetailSceneRuleLogResponse.Data.LogList["+ i +"].Code"));
logInfo.setMessage(_ctx.stringValue("QueryDetailSceneRuleLogResponse.Data.LogList["+ i +"].Message"));
logInfo.setPkDn(_ctx.stringValue("QueryDetailSceneRuleLogResponse.Data.LogList["+ i +"].PkDn"));
logList.add(logInfo);
}
data.setLogList(logList);
queryDetailSceneRuleLogResponse.setData(data);
return queryDetailSceneRuleLogResponse;
}
} | {
"pile_set_name": "Github"
} |
# Documentation on Writing Documentation {#InternalMetadocs}
## Organization
There are two sets of documentation built:
- "Internal" - Also known as "implementation" docs, the target audience for this is someone developing the implementation of OSVR: someone working on this actual codebase, not just using it. If you're seeing this in Doxygen docs (rather than in a Git repo), you're looking at the internal docs.
- Indexes `include` and `src`, as well as `examples`
- Includes undocumented members and internal classes, and issues warnings for incomplete docs, which will show up on Jenkins.
- Defines the preprocessor macro `OSVR_DOXYGEN`
- "External" - The target audience for this is for a _user_ of the OSVR framework - someone consuming presumably either ClientKit or PluginKit. As such, this documentation set only indexes the examples and the `inc/` directory, and excludes undocumented members (!). It also excludes a handful of documentation pages, including this one.
- It defines the preprocessor symbol `OSVR_DOXYGEN_EXTERNAL` as well as `OSVR_DOXYGEN`
## Tools
- MSC (Message Sequence Chart) Diagrams
- Canonical tool and syntax reference: [mscgen](http://www.mcternan.me.uk/mscgen/)
- IDE-like tool for creating MSC diagrams: <http://sourceforge.net/projects/msc-generator/>
- Note that not all constructs supported by `mscgen` are supported in this app
- Markdown (`.md`) text files
- Can just use a text editor, many have support for syntax highlighting Markdown.
- Windows app with good preview (except of course for Doxygen-specific extensions): [MarkPad](http://code52.org/DownmarkerWPF/)
## Tips
- To be able to make a nicely structured doc tree, include something like `{#` followed by some slug like the file basename followed by `}` in the first (heading) line of any Markdown file. Then, go into `doc/Manual.dox` and add it as a subpage in the appropriate place.
## Source Code
You can use `#ifndef OSVR_DOXYGEN_EXTERNAL` and `#endif` to hide chunks of implementation details in headers from Doxygen's external build. This is particularly useful for the C++ wrappers for ClientKit and PluginKit, since they by definition have to have all their innards exposed in the headers.
For public headers, it is useful to actually fill out the brief description at the top of the file (not just leave it saying `Header` and nothing more), since it will show up in the file list in Doxygen. Don't repeat the whole contents of the file, but give an idea of why someone might care about that file.
If you have a preprocessor macro showing up in your headers that you don't really need in the documentation (think of the export macros, for instance), you can add it to the `PREDEFINED` list in `Doxyfile-common` to basically banish it from the docs. | {
"pile_set_name": "Github"
} |
{
"type": "mekanism:crushing",
"input": {
"ingredient": {
"item": "minecraft:oxeye_daisy"
}
},
"output": {
"item": "mekanism:bio_fuel",
"count": 5
}
} | {
"pile_set_name": "Github"
} |
// Copyright 2013 Matthew Baird
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package elastigo
import (
"encoding/json"
"fmt"
)
// Explain computes a score explanation for a query and a specific document.
// This can give useful feedback whether a document matches or didn’t match a specific query.
// This feature is available from version 0.19.9 and up.
// see http://www.elasticsearch.org/guide/reference/api/explain.html
func (c *Conn) Explain(index string, _type string, id string, args map[string]interface{}, query string) (Match, error) {
var url string
var retval Match
if len(_type) > 0 {
url = fmt.Sprintf("/%s/%s/_explain", index, _type)
} else {
url = fmt.Sprintf("/%s/_explain", index)
}
body, err := c.DoCommand("GET", url, args, query)
if err != nil {
return retval, err
}
if err == nil {
// marshall into json
jsonErr := json.Unmarshal(body, &retval)
if jsonErr != nil {
return retval, jsonErr
}
}
return retval, err
}
| {
"pile_set_name": "Github"
} |
# Project: zlib_1_03
# Patched for zlib 1.1.2 [email protected] 19980430
# test works out-of-the-box, installs `somewhere' on demand
# Toolflags:
CCflags = -c -depend !Depend -IC: -g -throwback -DRISCOS -fah
C++flags = -c -depend !Depend -IC: -throwback
Linkflags = -aif -c++ -o $@
ObjAsmflags = -throwback -NoCache -depend !Depend
CMHGflags =
LibFileflags = -c -l -o $@
Squeezeflags = -o $@
# change the line below to where _you_ want the library installed.
libdest = lib:zlib
# Final targets:
@.lib: @.o.adler32 @.o.compress @.o.crc32 @.o.deflate @.o.gzio \
@.o.infblock @.o.infcodes @.o.inffast @.o.inflate @.o.inftrees @.o.infutil @.o.trees \
@.o.uncompr @.o.zutil
LibFile $(LibFileflags) @.o.adler32 @.o.compress @.o.crc32 @.o.deflate \
@.o.gzio @.o.infblock @.o.infcodes @.o.inffast @.o.inflate @.o.inftrees @.o.infutil \
@.o.trees @.o.uncompr @.o.zutil
test: @.minigzip @.example @.lib
@copy @.lib @.libc A~C~DF~L~N~P~Q~RS~TV
@echo running tests: hang on.
@/@.minigzip -f -9 libc
@/@.minigzip -d libc-gz
@/@.minigzip -f -1 libc
@/@.minigzip -d libc-gz
@/@.minigzip -h -9 libc
@/@.minigzip -d libc-gz
@/@.minigzip -h -1 libc
@/@.minigzip -d libc-gz
@/@.minigzip -9 libc
@/@.minigzip -d libc-gz
@/@.minigzip -1 libc
@/@.minigzip -d libc-gz
@diff @.lib @.libc
@echo that should have reported '@.lib and @.libc identical' if you have diff.
@/@.example @.fred @.fred
@echo that will have given lots of hello!'s.
@.minigzip: @.o.minigzip @.lib C:o.Stubs
Link $(Linkflags) @.o.minigzip @.lib C:o.Stubs
@.example: @.o.example @.lib C:o.Stubs
Link $(Linkflags) @.o.example @.lib C:o.Stubs
install: @.lib
cdir $(libdest)
cdir $(libdest).h
@copy @.h.zlib $(libdest).h.zlib A~C~DF~L~N~P~Q~RS~TV
@copy @.h.zconf $(libdest).h.zconf A~C~DF~L~N~P~Q~RS~TV
@copy @.lib $(libdest).lib A~C~DF~L~N~P~Q~RS~TV
@echo okay, installed zlib in $(libdest)
clean:; remove @.minigzip
remove @.example
remove @.libc
-wipe @.o.* F~r~cV
remove @.fred
# User-editable dependencies:
.c.o:
cc $(ccflags) -o $@ $<
# Static dependencies:
# Dynamic dependencies:
o.example: c.example
o.example: h.zlib
o.example: h.zconf
o.minigzip: c.minigzip
o.minigzip: h.zlib
o.minigzip: h.zconf
o.adler32: c.adler32
o.adler32: h.zlib
o.adler32: h.zconf
o.compress: c.compress
o.compress: h.zlib
o.compress: h.zconf
o.crc32: c.crc32
o.crc32: h.zlib
o.crc32: h.zconf
o.deflate: c.deflate
o.deflate: h.deflate
o.deflate: h.zutil
o.deflate: h.zlib
o.deflate: h.zconf
o.gzio: c.gzio
o.gzio: h.zutil
o.gzio: h.zlib
o.gzio: h.zconf
o.infblock: c.infblock
o.infblock: h.zutil
o.infblock: h.zlib
o.infblock: h.zconf
o.infblock: h.infblock
o.infblock: h.inftrees
o.infblock: h.infcodes
o.infblock: h.infutil
o.infcodes: c.infcodes
o.infcodes: h.zutil
o.infcodes: h.zlib
o.infcodes: h.zconf
o.infcodes: h.inftrees
o.infcodes: h.infblock
o.infcodes: h.infcodes
o.infcodes: h.infutil
o.infcodes: h.inffast
o.inffast: c.inffast
o.inffast: h.zutil
o.inffast: h.zlib
o.inffast: h.zconf
o.inffast: h.inftrees
o.inffast: h.infblock
o.inffast: h.infcodes
o.inffast: h.infutil
o.inffast: h.inffast
o.inflate: c.inflate
o.inflate: h.zutil
o.inflate: h.zlib
o.inflate: h.zconf
o.inflate: h.infblock
o.inftrees: c.inftrees
o.inftrees: h.zutil
o.inftrees: h.zlib
o.inftrees: h.zconf
o.inftrees: h.inftrees
o.inftrees: h.inffixed
o.infutil: c.infutil
o.infutil: h.zutil
o.infutil: h.zlib
o.infutil: h.zconf
o.infutil: h.infblock
o.infutil: h.inftrees
o.infutil: h.infcodes
o.infutil: h.infutil
o.trees: c.trees
o.trees: h.deflate
o.trees: h.zutil
o.trees: h.zlib
o.trees: h.zconf
o.trees: h.trees
o.uncompr: c.uncompr
o.uncompr: h.zlib
o.uncompr: h.zconf
o.zutil: c.zutil
o.zutil: h.zutil
o.zutil: h.zlib
o.zutil: h.zconf
| {
"pile_set_name": "Github"
} |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/iotsitewise/model/UpdateAssetResult.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/UnreferencedParam.h>
#include <utility>
using namespace Aws::IoTSiteWise::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws;
UpdateAssetResult::UpdateAssetResult()
{
}
UpdateAssetResult::UpdateAssetResult(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
*this = result;
}
UpdateAssetResult& UpdateAssetResult::operator =(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
JsonView jsonValue = result.GetPayload().View();
if(jsonValue.ValueExists("assetStatus"))
{
m_assetStatus = jsonValue.GetObject("assetStatus");
}
return *this;
}
| {
"pile_set_name": "Github"
} |
/*Copyright (c) 2017 The Paradox Game Converters Project
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.*/
#include "ProvinceMapper.h"
#include "Log.h"
#include "Object.h"
#include "ParadoxParserUTF8.h"
#include "../Configuration.h"
#include "../EU4World/EU4Version.h"
provinceMapper* provinceMapper::instance = NULL;
provinceMapper::provinceMapper()
{
LOG(LogLevel::Info) << "Parsing province mappings";
shared_ptr<Object> provinceMappingObj = parser_UTF8::doParseFile("province_mappings.txt");
if (provinceMappingObj == NULL)
{
LOG(LogLevel::Error) << "Could not parse file province_mappings.txt";
exit(-1);
}
initProvinceMap(provinceMappingObj);
}
void provinceMapper::initProvinceMap(shared_ptr<Object> obj)
{
vector<shared_ptr<Object>> versions = obj->getLeaves();
if (versions.size() < 1)
{
LOG(LogLevel::Error) << "No province mapping definitions loaded";
return;
}
unsigned int mappingsIdx = getMappingsIndex(versions);
auto mappings = versions[mappingsIdx]->getLeaves();
for (auto mapping: mappings)
{
createMappings(mapping);
}
}
int provinceMapper::getMappingsIndex(vector<shared_ptr<Object>> versions)
{
unsigned int mappingsIdx;
auto saveVersion = Configuration::getEU4Version();
for (mappingsIdx = 0; mappingsIdx < versions.size(); mappingsIdx++)
{
auto mappingVersion = EU4Version(versions[mappingsIdx]->getKey());
if (saveVersion >= mappingVersion)
{
break;
}
}
LOG(LogLevel::Debug) << "Using version " << versions[mappingsIdx]->getKey() << " mappings";
return mappingsIdx;
}
void provinceMapper::createMappings(shared_ptr<Object> mapping)
{
vector<int> EU4Numbers;
vector<int> Vic2Numbers;
bool resettable = false;
for (auto mappingItem: mapping->getLeaves())
{
if (mappingItem->getKey() == "eu4")
{
EU4Numbers.push_back(stoi(mappingItem->getLeaf()));
}
else if (mappingItem->getKey() == "v2")
{
Vic2Numbers.push_back(stoi(mappingItem->getLeaf()));
}
else if (mappingItem->getKey() == "resettable")
{
resettable = true;
}
else
{
LOG(LogLevel::Warning) << "Unknown data while mapping provinces";
}
}
if (EU4Numbers.size() == 0)
{
EU4Numbers.push_back(0);
}
if (Vic2Numbers.size() == 0)
{
Vic2Numbers.push_back(0);
}
for (auto Vic2Number: Vic2Numbers)
{
if (Vic2Number != 0)
{
Vic2ToEU4ProvinceMap.insert(make_pair(Vic2Number, EU4Numbers));
if (resettable)
{
resettableProvinces.insert(Vic2Number);
}
}
}
for (auto EU4Number: EU4Numbers)
{
if (EU4Number != 0)
{
EU4ToVic2ProvinceMap.insert(make_pair(EU4Number, Vic2Numbers));
}
}
}
const vector<int> provinceMapper::GetVic2ProvinceNumbers(const int EU4ProvinceNumber)
{
auto mapping = EU4ToVic2ProvinceMap.find(EU4ProvinceNumber);
if (mapping != EU4ToVic2ProvinceMap.end())
{
return mapping->second;
}
else
{
vector<int> empty;
return empty;
}
}
const vector<int> provinceMapper::GetEU4ProvinceNumbers(int Vic2ProvinceNumber)
{
auto mapping = Vic2ToEU4ProvinceMap.find(Vic2ProvinceNumber);
if (mapping != Vic2ToEU4ProvinceMap.end())
{
return mapping->second;
}
else
{
vector<int> empty;
return empty;
}
}
bool provinceMapper::IsProvinceResettable(int Vic2ProvinceNumber)
{
if (resettableProvinces.count(Vic2ProvinceNumber) > 0)
{
return true;
}
else
{
return false;
}
} | {
"pile_set_name": "Github"
} |
/*
All modification made by Intel Corporation: © 2016 Intel Corporation
All contributions by the University of California:
Copyright (c) 2014, 2015, The Regents of the University of California (Regents)
All rights reserved.
All other contributions:
Copyright (c) 2014, 2015, the respective contributors
All rights reserved.
For the list of contributors go to https://github.com/BVLC/caffe/blob/master/CONTRIBUTORS.md
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef MKL2017_SUPPORTED
#include <algorithm>
#include <vector>
#include "gtest/gtest.h"
#include "caffe/blob.hpp"
#include "caffe/common.hpp"
#include "caffe/filler.hpp"
#include "caffe/layers/mkl_layers.hpp"
#include "caffe/test/test_caffe_main.hpp"
#include "caffe/test/test_gradient_check_util.hpp"
using std::min;
using std::max;
namespace caffe {
template <typename TypeParam>
class MKLLRNLayerTest : public MultiDeviceTest<TypeParam> {
typedef typename TypeParam::Dtype Dtype;
protected:
MKLLRNLayerTest()
: epsilon_(Dtype(1e-5)),
blob_bottom_(new Blob<Dtype>()),
blob_top_(new Blob<Dtype>()) {}
virtual void SetUp() {
Caffe::set_random_seed(1701);
blob_bottom_->Reshape(2, 7, 3, 3);
// fill the values
FillerParameter filler_param;
GaussianFiller<Dtype> filler(filler_param);
filler.Fill(this->blob_bottom_);
blob_bottom_vec_.push_back(blob_bottom_);
blob_top_vec_.push_back(blob_top_);
}
virtual ~MKLLRNLayerTest() { delete blob_bottom_; delete blob_top_; }
void ReferenceLRNForward(const Blob<Dtype>& blob_bottom,
const LayerParameter& layer_param, Blob<Dtype>* blob_top);
Dtype epsilon_;
Blob<Dtype>* const blob_bottom_;
Blob<Dtype>* const blob_top_;
vector<Blob<Dtype>*> blob_bottom_vec_;
vector<Blob<Dtype>*> blob_top_vec_;
};
template <typename TypeParam>
void MKLLRNLayerTest<TypeParam>::ReferenceLRNForward(
const Blob<Dtype>& blob_bottom, const LayerParameter& layer_param,
Blob<Dtype>* blob_top) {
typedef typename TypeParam::Dtype Dtype;
blob_top->Reshape(blob_bottom.num(), blob_bottom.channels(),
blob_bottom.height(), blob_bottom.width());
Dtype* top_data = blob_top->mutable_cpu_data();
LRNParameter lrn_param = layer_param.lrn_param();
Dtype alpha = lrn_param.alpha();
Dtype beta = lrn_param.beta();
int size = lrn_param.local_size();
switch (lrn_param.norm_region()) {
case LRNParameter_NormRegion_ACROSS_CHANNELS:
for (int n = 0; n < blob_bottom.num(); ++n) {
for (int c = 0; c < blob_bottom.channels(); ++c) {
for (int h = 0; h < blob_bottom.height(); ++h) {
for (int w = 0; w < blob_bottom.width(); ++w) {
int c_start = c - (size - 1) / 2;
int c_end = min(c_start + size, blob_bottom.channels());
c_start = max(c_start, 0);
Dtype scale = 1.;
for (int i = c_start; i < c_end; ++i) {
Dtype value = blob_bottom.data_at(n, i, h, w);
scale += value * value * alpha / size;
}
*(top_data + blob_top->offset(n, c, h, w)) =
blob_bottom.data_at(n, c, h, w) / pow(scale, beta);
}
}
}
}
break;
case LRNParameter_NormRegion_WITHIN_CHANNEL:
for (int n = 0; n < blob_bottom.num(); ++n) {
for (int c = 0; c < blob_bottom.channels(); ++c) {
for (int h = 0; h < blob_bottom.height(); ++h) {
int h_start = h - (size - 1) / 2;
int h_end = min(h_start + size, blob_bottom.height());
h_start = max(h_start, 0);
for (int w = 0; w < blob_bottom.width(); ++w) {
Dtype scale = 1.;
int w_start = w - (size - 1) / 2;
int w_end = min(w_start + size, blob_bottom.width());
w_start = max(w_start, 0);
for (int nh = h_start; nh < h_end; ++nh) {
for (int nw = w_start; nw < w_end; ++nw) {
Dtype value = blob_bottom.data_at(n, c, nh, nw);
scale += value * value * alpha / (size * size);
}
}
*(top_data + blob_top->offset(n, c, h, w)) =
blob_bottom.data_at(n, c, h, w) / pow(scale, beta);
}
}
}
}
break;
default:
LOG(FATAL) << "Unknown normalization region.";
}
}
typedef ::testing::Types<CPUDevice<float>,
CPUDevice<double> > TestDtypesCPU;
TYPED_TEST_CASE(MKLLRNLayerTest, TestDtypesCPU);
TYPED_TEST(MKLLRNLayerTest, TestSetupAcrossChannels) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
MKLLRNLayer<Dtype> layer(layer_param);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
EXPECT_EQ(this->blob_top_->num(), 2);
EXPECT_EQ(this->blob_top_->channels(), 7);
EXPECT_EQ(this->blob_top_->height(), 3);
EXPECT_EQ(this->blob_top_->width(), 3);
}
#if 0
TYPED_TEST(MKLLRNLayerTest, TestForwardAcrossChannels) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
MKLLRNLayer<Dtype> layer(layer_param);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
Blob<Dtype> top_reference;
this->ReferenceLRNForward(*(this->blob_bottom_), layer_param,
&top_reference);
for (int i = 0; i < this->blob_bottom_->count(); ++i) {
EXPECT_NEAR(this->blob_top_->cpu_data()[i], top_reference.cpu_data()[i],
this->epsilon_);
}
}
#endif
TYPED_TEST(MKLLRNLayerTest, TestForwardAcrossChannelsLargeRegion) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
layer_param.mutable_lrn_param()->set_local_size(15);
MKLLRNLayer<Dtype> layer(layer_param);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
Blob<Dtype> top_reference;
this->ReferenceLRNForward(*(this->blob_bottom_), layer_param,
&top_reference);
for (int i = 0; i < this->blob_bottom_->count(); ++i) {
EXPECT_NEAR(this->blob_top_->cpu_data()[i], top_reference.cpu_data()[i],
this->epsilon_);
}
}
#if 0
TYPED_TEST(MKLLRNLayerTest, TestGradientAcrossChannels) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
MKLLRNLayer<Dtype> layer(layer_param);
GradientChecker<Dtype> checker(1e-2, 1e-2);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
for (int i = 0; i < this->blob_top_->count(); ++i) {
this->blob_top_->mutable_cpu_diff()[i] = 1.;
}
vector<bool> propagate_down(this->blob_bottom_vec_.size(), true);
layer.Backward(this->blob_top_vec_, propagate_down,
this->blob_bottom_vec_);
// for (int i = 0; i < this->blob_bottom_->count(); ++i) {
// std::cout << "CPU diff " << this->blob_bottom_->cpu_diff()[i]
// << std::endl;
// }
checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
this->blob_top_vec_);
}
#endif
TYPED_TEST(MKLLRNLayerTest, TestGradientAcrossChannelsLargeRegion) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
layer_param.mutable_lrn_param()->set_local_size(15);
MKLLRNLayer<Dtype> layer(layer_param);
GradientChecker<Dtype> checker(1e-2, 1e-2);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
for (int i = 0; i < this->blob_top_->count(); ++i) {
this->blob_top_->mutable_cpu_diff()[i] = 1.;
}
vector<bool> propagate_down(this->blob_bottom_vec_.size(), true);
layer.Backward(this->blob_top_vec_, propagate_down,
this->blob_bottom_vec_);
// for (int i = 0; i < this->blob_bottom_->count(); ++i) {
// std::cout << "CPU diff " << this->blob_bottom_->cpu_diff()[i]
// << std::endl;
// }
checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
this->blob_top_vec_);
}
#if 0
TYPED_TEST(MKLLRNLayerTest, TestSetupWithinChannel) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
layer_param.mutable_lrn_param()->set_norm_region(
LRNParameter_NormRegion_WITHIN_CHANNEL);
layer_param.mutable_lrn_param()->set_local_size(3);
MKLLRNLayer<Dtype> layer(layer_param);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
EXPECT_EQ(this->blob_top_->num(), 2);
EXPECT_EQ(this->blob_top_->channels(), 7);
EXPECT_EQ(this->blob_top_->height(), 3);
EXPECT_EQ(this->blob_top_->width(), 3);
}
TYPED_TEST(MKLLRNLayerTest, TestForwardWithinChannel) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
layer_param.mutable_lrn_param()->set_norm_region(
LRNParameter_NormRegion_WITHIN_CHANNEL);
layer_param.mutable_lrn_param()->set_local_size(3);
MKLLRNLayer<Dtype> layer(layer_param);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
Blob<Dtype> top_reference;
this->ReferenceLRNForward(*(this->blob_bottom_), layer_param,
&top_reference);
for (int i = 0; i < this->blob_bottom_->count(); ++i) {
EXPECT_NEAR(this->blob_top_->cpu_data()[i], top_reference.cpu_data()[i],
this->epsilon_);
}
}
TYPED_TEST(MKLLRNLayerTest, TestGradientWithinChannel) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
layer_param.mutable_lrn_param()->set_norm_region(
LRNParameter_NormRegion_WITHIN_CHANNEL);
layer_param.mutable_lrn_param()->set_local_size(3);
MKLLRNLayer<Dtype> layer(layer_param);
GradientChecker<Dtype> checker(1e-2, 1e-2);
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
for (int i = 0; i < this->blob_top_->count(); ++i) {
this->blob_top_->mutable_cpu_diff()[i] = 1.;
}
checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
this->blob_top_vec_);
}
#endif
} // namespace caffe
#endif // #ifdef MKL2017_SUPPORTED
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2013-2015 RoboVM AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robovm.apple.intents;
/*<imports>*/
import java.io.*;
import java.nio.*;
import java.util.*;
import org.robovm.objc.*;
import org.robovm.objc.annotation.*;
import org.robovm.objc.block.*;
import org.robovm.rt.*;
import org.robovm.rt.annotation.*;
import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
import org.robovm.apple.foundation.*;
import org.robovm.apple.eventkit.*;
import org.robovm.apple.corelocation.*;
/*</imports>*/
/*<javadoc>*/
/**
* @since Available in iOS 10.0 and later.
*/
/*</javadoc>*/
/*<annotations>*/@Library("Intents") @NativeClass/*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/INPriceRange/*</name>*/
extends /*<extends>*/NSObject/*</extends>*/
/*<implements>*/implements NSSecureCoding/*</implements>*/ {
/*<ptr>*/public static class INPriceRangePtr extends Ptr<INPriceRange, INPriceRangePtr> {}/*</ptr>*/
/*<bind>*/static { ObjCRuntime.bind(INPriceRange.class); }/*</bind>*/
/*<constants>*//*</constants>*/
/*<constructors>*/
protected INPriceRange() {}
protected INPriceRange(Handle h, long handle) { super(h, handle); }
protected INPriceRange(SkipInit skipInit) { super(skipInit); }
@Method(selector = "initWithRangeBetweenPrice:andPrice:currencyCode:")
public INPriceRange(NSDecimalNumber firstPrice, NSDecimalNumber secondPrice, String currencyCode) { super((SkipInit) null); initObject(init(firstPrice, secondPrice, currencyCode)); }
@Method(selector = "initWithMaximumPrice:currencyCode:")
public static INPriceRange createWithMaximumPrice(NSDecimalNumber maximumPrice, String currencyCode) {
INPriceRange res = new INPriceRange((SkipInit) null);
res.initObject(res.initWithMaximumPrice(maximumPrice, currencyCode));
return res;
}
@Method(selector = "initWithMinimumPrice:currencyCode:")
public static INPriceRange createWithMinimumPrice(NSDecimalNumber minimumPrice, String currencyCode) {
INPriceRange res = new INPriceRange((SkipInit) null);
res.initObject(res.initWithMinimumPrice(minimumPrice, currencyCode));
return res;
}
@Method(selector = "initWithPrice:currencyCode:")
public INPriceRange(NSDecimalNumber price, String currencyCode) { super((SkipInit) null); initObject(init(price, currencyCode)); }
@Method(selector = "initWithCoder:")
public INPriceRange(NSCoder coder) { super((SkipInit) null); initObject(init(coder)); }
/*</constructors>*/
/*<properties>*/
@Property(selector = "minimumPrice")
public native NSDecimalNumber getMinimumPrice();
@Property(selector = "maximumPrice")
public native NSDecimalNumber getMaximumPrice();
@Property(selector = "currencyCode")
public native String getCurrencyCode();
@Property(selector = "supportsSecureCoding")
public static native boolean supportsSecureCoding();
/*</properties>*/
/*<members>*//*</members>*/
/*<methods>*/
@Method(selector = "initWithRangeBetweenPrice:andPrice:currencyCode:")
protected native @Pointer long init(NSDecimalNumber firstPrice, NSDecimalNumber secondPrice, String currencyCode);
@Method(selector = "initWithMaximumPrice:currencyCode:")
protected native @Pointer long initWithMaximumPrice(NSDecimalNumber maximumPrice, String currencyCode);
@Method(selector = "initWithMinimumPrice:currencyCode:")
protected native @Pointer long initWithMinimumPrice(NSDecimalNumber minimumPrice, String currencyCode);
@Method(selector = "initWithPrice:currencyCode:")
protected native @Pointer long init(NSDecimalNumber price, String currencyCode);
@Method(selector = "encodeWithCoder:")
public native void encode(NSCoder coder);
@Method(selector = "initWithCoder:")
protected native @Pointer long init(NSCoder coder);
/*</methods>*/
}
| {
"pile_set_name": "Github"
} |
/* ------------------------------------------------------------------
* Copyright (C) 2008 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/****************************************************************************************
Portions of this file are derived from the following 3GPP standard:
3GPP TS 26.073
ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
Available from http://www.3gpp.org
(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
Permission to distribute, modify and use this file under the standard license
terms listed above has been obtained from the copyright holder.
****************************************************************************************/
/*
------------------------------------------------------------------------------
Pathname: ./audio/gsm-amr/c/src/dec_amr.c
Funtions: Decoder_amr_init
Decoder_amr_reset
Decoder_amr
Date: 04/11/2000
------------------------------------------------------------------------------
REVISION HISTORY
Description: Updated template used to PV coding template. First attempt at
optimizing C code.
Description: Synchronized file with UMTS version 3.2.0. Updated coding
template. Removed unnecessary include files.
Description: Made the following changes per comments from Phase 2/3 review:
1. Fixed Input/Output descriptions by adding typedefs and
clarifying definitions.
2. Used #define instead of hard-coded loop counts.
3. Modified FOR loops to count down.
4. Copied function descriptions from dec_amr.h header file.
5. Defined one local variable per line.
Description: Copied function descriptions from header file (forgot to do it
prior to checking-in -- oops!).
Description: Removed the function decoder_amr_exit.
The decoder_amr related structure is no longer dynamically allocated.
Also, modified function calls throughout to reflect the fact that the members
of the structure Decoder_amrState are no longer pointers to be set via
malloc, but full-blown structures. (Changes of the type D_plsfState *lsfState
to D_plsfState lsfState)
Description: Fixed bug in the Decoder_amr() caused non-bit exactness. The
Overflow flag was inadvertently taken out of the IF statement
that calls agc2 and Syn_filt. It was restored to the original
form. Updated copyright year.
Description: Adding changes for EPOC regarding pOverflow being passed in
rather than being a global variable.
Description: Initialize overflow flag in Decoder_amr_init() and
Decoder_amr_reset(). Initialize pointer to overflow flag in
Decoder_amr().
Description: Changed round function name to pv_round to avoid conflict with
round function in C standard library.
Description: Replaced OSCL mem type functions and eliminated include
files that now are chosen by OSCL definitions
Description: Replaced "int" and/or "char" with defined types.
Added proper casting (Word32) to some left shifting operations
Description:
------------------------------------------------------------------------------
MODULE DESCRIPTION
This file contains the function used to decode one speech frame using a given
codec mode. The functions used to initialize, reset, and exit are also
included in this file.
------------------------------------------------------------------------------
*/
/*----------------------------------------------------------------------------
; INCLUDES
----------------------------------------------------------------------------*/
#include "dec_amr.h"
#include "typedef.h"
#include "cnst.h"
#include "copy.h"
#include "set_zero.h"
#include "syn_filt.h"
#include "d_plsf.h"
#include "agc.h"
#include "int_lpc.h"
#include "dec_gain.h"
#include "dec_lag3.h"
#include "dec_lag6.h"
#include "d2_9pf.h"
#include "d2_11pf.h"
#include "d3_14pf.h"
#include "d4_17pf.h"
#include "d8_31pf.h"
#include "d1035pf.h"
#include "pred_lt.h"
#include "d_gain_p.h"
#include "d_gain_c.h"
#include "dec_gain.h"
#include "ec_gains.h"
#include "ph_disp.h"
#include "c_g_aver.h"
#include "int_lsf.h"
#include "lsp_lsf.h"
#include "lsp_avg.h"
#include "bgnscd.h"
#include "ex_ctrl.h"
#include "sqrt_l.h"
#include "frame.h"
#include "bitno_tab.h"
#include "b_cn_cod.h"
#include "basic_op.h"
#include "oscl_mem.h"
/*----------------------------------------------------------------------------
; MACROS
; Define module specific macros here
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; DEFINES
; Include all pre-processor statements here. Include conditional
; compile variables also.
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; LOCAL FUNCTION DEFINITIONS
; Function Prototype declaration
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; LOCAL VARIABLE DEFINITIONS
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
/*
------------------------------------------------------------------------------
FUNCTION NAME: Decoder_amr_init
------------------------------------------------------------------------------
INPUT AND OUTPUT DEFINITIONS
Inputs:
state = pointer to a pointer to structures of type Decoder_amrState
Outputs:
structure pointed to by the pointer which is pointed to by state is
initialized to each field's initial values
state pointer points to the address of the memory allocated by
Decoder_amr_init function
Returns:
return_value = 0, if the initialization was successful; -1, otherwise (int)
Global Variables Used:
None
Local Variables Needed:
None
------------------------------------------------------------------------------
FUNCTION DESCRIPTION
This function allocates and initializes state memory used by the Decoder_amr
function. It stores the pointer to the filter status structure in state. This
pointer has to be passed to Decoder_amr in each call. The function returns
0, if initialization was successful and -1, otherwise.
------------------------------------------------------------------------------
REQUIREMENTS
None
------------------------------------------------------------------------------
REFERENCES
dec_amr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
------------------------------------------------------------------------------
PSEUDO-CODE
int Decoder_amr_init (Decoder_amrState **state)
{
Decoder_amrState* s;
Word16 i;
if (state == (Decoder_amrState **) NULL){
fprintf(stderr, "Decoder_amr_init: invalid parameter\n");
return -1;
}
*state = NULL;
// allocate memory
if ((s= (Decoder_amrState *) malloc(sizeof(Decoder_amrState))) == NULL){
fprintf(stderr, "Decoder_amr_init: can not malloc state structure\n");
return -1;
}
s->T0_lagBuff = 40;
s->inBackgroundNoise = 0;
s->voicedHangover = 0;
for (i = 0; i < 9; i++)
s->ltpGainHistory[i] = 0;
s->lsfState = NULL;
s->ec_gain_p_st = NULL;
s->ec_gain_c_st = NULL;
s->pred_state = NULL;
s->ph_disp_st = NULL;
s->dtxDecoderState = NULL;
if (D_plsf_init(&s->lsfState) ||
ec_gain_pitch_init(&s->ec_gain_p_st) ||
ec_gain_code_init(&s->ec_gain_c_st) ||
gc_pred_init(&s->pred_state) ||
Cb_gain_average_init(&s->Cb_gain_averState) ||
lsp_avg_init(&s->lsp_avg_st) ||
Bgn_scd_init(&s->background_state) ||
ph_disp_init(&s->ph_disp_st) ||
dtx_dec_init(&s->dtxDecoderState)) {
Decoder_amr_exit(&s);
return -1;
}
Decoder_amr_reset(s, (enum Mode)0);
*state = s;
return 0;
}
------------------------------------------------------------------------------
RESOURCES USED [optional]
When the code is written for a specific target processor the
the resources used should be documented below.
HEAP MEMORY USED: x bytes
STACK MEMORY USED: x bytes
CLOCK CYCLES: (cycle count equation for this function) + (variable
used to represent cycle count for each subroutine
called)
where: (cycle count variable) = cycle count for [subroutine
name]
------------------------------------------------------------------------------
CAUTION [optional]
[State any special notes, constraints or cautions for users of this function]
------------------------------------------------------------------------------
*/
Word16 Decoder_amr_init(Decoder_amrState *s)
{
Word16 i;
if (s == (Decoder_amrState *) NULL)
{
/* fprint(stderr, "Decoder_amr_init: invalid parameter\n"); */
return(-1);
}
s->T0_lagBuff = 40;
s->inBackgroundNoise = 0;
s->voicedHangover = 0;
/* Initialize overflow Flag */
s->overflow = 0;
for (i = 0; i < LTP_GAIN_HISTORY_LEN; i++)
{
s->ltpGainHistory[i] = 0;
}
D_plsf_reset(&s->lsfState);
ec_gain_pitch_reset(&s->ec_gain_p_st);
ec_gain_code_reset(&s->ec_gain_c_st);
Cb_gain_average_reset(&s->Cb_gain_averState);
lsp_avg_reset(&s->lsp_avg_st);
Bgn_scd_reset(&s->background_state);
ph_disp_reset(&s->ph_disp_st);
dtx_dec_reset(&s->dtxDecoderState);
gc_pred_reset(&s->pred_state);
Decoder_amr_reset(s, MR475);
return(0);
}
/****************************************************************************/
/*
------------------------------------------------------------------------------
FUNCTION NAME: Decoder_amr_reset
------------------------------------------------------------------------------
INPUT AND OUTPUT DEFINITIONS
Inputs:
state = pointer to a structure of type Decoder_amrState
mode = codec mode (enum Mode)
Outputs:
structure pointed to by state is initialized to its reset value
Returns:
return_value = 0, if reset was successful; -1, otherwise (int)
Global Variables Used:
None
Local Variables Needed:
None
------------------------------------------------------------------------------
FUNCTION DESCRIPTION
This function resets the state memory used by the Decoder_amr function. It
returns a 0, if reset was successful and -1, otherwise.
------------------------------------------------------------------------------
REQUIREMENTS
None
------------------------------------------------------------------------------
REFERENCES
dec_amr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
------------------------------------------------------------------------------
PSEUDO-CODE
int Decoder_amr_reset (Decoder_amrState *state, enum Mode mode)
{
Word16 i;
if (state == (Decoder_amrState *) NULL){
fprintf(stderr, "Decoder_amr_reset: invalid parameter\n");
return -1;
}
// Initialize static pointer
state->exc = state->old_exc + PIT_MAX + L_INTERPOL;
// Static vectors to zero
Set_zero (state->old_exc, PIT_MAX + L_INTERPOL);
if (mode != MRDTX)
Set_zero (state->mem_syn, M);
// initialize pitch sharpening
state->sharp = SHARPMIN;
state->old_T0 = 40;
// Initialize state->lsp_old []
if (mode != MRDTX) {
Copy(lsp_init_data, &state->lsp_old[0], M);
}
// Initialize memories of bad frame handling
state->prev_bf = 0;
state->prev_pdf = 0;
state->state = 0;
state->T0_lagBuff = 40;
state->inBackgroundNoise = 0;
state->voicedHangover = 0;
if (mode != MRDTX) {
for (i=0;i<9;i++)
state->excEnergyHist[i] = 0;
}
for (i = 0; i < 9; i++)
state->ltpGainHistory[i] = 0;
Cb_gain_average_reset(state->Cb_gain_averState);
if (mode != MRDTX)
lsp_avg_reset(state->lsp_avg_st);
D_plsf_reset(state->lsfState);
ec_gain_pitch_reset(state->ec_gain_p_st);
ec_gain_code_reset(state->ec_gain_c_st);
if (mode != MRDTX)
gc_pred_reset(state->pred_state);
Bgn_scd_reset(state->background_state);
state->nodataSeed = 21845;
ph_disp_reset(state->ph_disp_st);
if (mode != MRDTX)
dtx_dec_reset(state->dtxDecoderState);
return 0;
}
------------------------------------------------------------------------------
RESOURCES USED [optional]
When the code is written for a specific target processor the
the resources used should be documented below.
HEAP MEMORY USED: x bytes
STACK MEMORY USED: x bytes
CLOCK CYCLES: (cycle count equation for this function) + (variable
used to represent cycle count for each subroutine
called)
where: (cycle count variable) = cycle count for [subroutine
name]
------------------------------------------------------------------------------
CAUTION [optional]
[State any special notes, constraints or cautions for users of this function]
------------------------------------------------------------------------------
*/
Word16 Decoder_amr_reset(Decoder_amrState *state, enum Mode mode)
{
Word16 i;
if (state == (Decoder_amrState *) NULL)
{
/* fprint(stderr, "Decoder_amr_reset: invalid parameter\n"); */
return(-1);
}
/* Initialize static pointer */
state->exc = state->old_exc + PIT_MAX + L_INTERPOL;
/* Static vectors to zero */
oscl_memset(state->old_exc, 0, sizeof(Word16)*(PIT_MAX + L_INTERPOL));
if (mode != MRDTX)
{
oscl_memset(state->mem_syn, 0, sizeof(Word16)*M);
}
/* initialize pitch sharpening */
state->sharp = SHARPMIN;
state->old_T0 = 40;
/* Initialize overflow Flag */
state->overflow = 0;
/* Initialize state->lsp_old [] */
if (mode != MRDTX)
{
state->lsp_old[0] = 30000;
state->lsp_old[1] = 26000;
state->lsp_old[2] = 21000;
state->lsp_old[3] = 15000;
state->lsp_old[4] = 8000;
state->lsp_old[5] = 0;
state->lsp_old[6] = -8000;
state->lsp_old[7] = -15000;
state->lsp_old[8] = -21000;
state->lsp_old[9] = -26000;
}
/* Initialize memories of bad frame handling */
state->prev_bf = 0;
state->prev_pdf = 0;
state->state = 0;
state->T0_lagBuff = 40;
state->inBackgroundNoise = 0;
state->voicedHangover = 0;
if (mode != MRDTX)
{
for (i = 0;i < EXC_ENERGY_HIST_LEN;i++)
{
state->excEnergyHist[i] = 0;
}
}
for (i = 0; i < LTP_GAIN_HISTORY_LEN; i++)
{
state->ltpGainHistory[i] = 0;
}
Cb_gain_average_reset(&(state->Cb_gain_averState));
if (mode != MRDTX)
{
lsp_avg_reset(&(state->lsp_avg_st));
}
D_plsf_reset(&(state->lsfState));
ec_gain_pitch_reset(&(state->ec_gain_p_st));
ec_gain_code_reset(&(state->ec_gain_c_st));
if (mode != MRDTX)
{
gc_pred_reset(&(state->pred_state));
}
Bgn_scd_reset(&(state->background_state));
state->nodataSeed = 21845;
ph_disp_reset(&(state->ph_disp_st));
if (mode != MRDTX)
{
dtx_dec_reset(&(state->dtxDecoderState));
}
return(0);
}
/****************************************************************************/
/*
------------------------------------------------------------------------------
FUNCTION NAME: Decoder_amr
------------------------------------------------------------------------------
INPUT AND OUTPUT DEFINITIONS
Inputs:
st = pointer to a structure of type Decoder_amrState
mode = codec mode (enum Mode)
parm = buffer of synthesis parameters (Word16)
frame_type = received frame type (enum RXFrameType)
synth = buffer containing synthetic speech (Word16)
A_t = buffer containing decoded LP filter in 4 subframes (Word16)
Outputs:
structure pointed to by st contains the newly calculated decoder
parameters
synth buffer contains the decoded speech samples
A_t buffer contains the decoded LP filter parameters
Returns:
return_value = 0 (int)
Global Variables Used:
None
Local Variables Needed:
None
------------------------------------------------------------------------------
FUNCTION DESCRIPTION
This function performs the decoding of one speech frame for a given codec
mode.
------------------------------------------------------------------------------
REQUIREMENTS
None
------------------------------------------------------------------------------
REFERENCES
dec_amr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
------------------------------------------------------------------------------
PSEUDO-CODE
int Decoder_amr (
Decoder_amrState *st, // i/o : State variables
enum Mode mode, // i : AMR mode
Word16 parm[], // i : vector of synthesis parameters
(PRM_SIZE)
enum RXFrameType frame_type, // i : received frame type
Word16 synth[], // o : synthesis speech (L_FRAME)
Word16 A_t[] // o : decoded LP filter in 4 subframes
(AZ_SIZE)
)
{
// LPC coefficients
Word16 *Az; // Pointer on A_t
// LSPs
Word16 lsp_new[M];
Word16 lsp_mid[M];
// LSFs
Word16 prev_lsf[M];
Word16 lsf_i[M];
// Algebraic codevector
Word16 code[L_SUBFR];
// excitation
Word16 excp[L_SUBFR];
Word16 exc_enhanced[L_SUBFR];
// Scalars
Word16 i, i_subfr;
Word16 T0, T0_frac, index, index_mr475 = 0;
Word16 gain_pit, gain_code, gain_code_mix, pit_sharp, pit_flag, pitch_fac;
Word16 t0_min, t0_max;
Word16 delta_frc_low, delta_frc_range;
Word16 tmp_shift;
Word16 temp;
Word32 L_temp;
Word16 flag4;
Word16 carefulFlag;
Word16 excEnergy;
Word16 subfrNr;
Word16 evenSubfr = 0;
Word16 bfi = 0; // bad frame indication flag
Word16 pdfi = 0; // potential degraded bad frame flag
enum DTXStateType newDTXState; // SPEECH , DTX, DTX_MUTE
// find the new DTX state SPEECH OR DTX
newDTXState = rx_dtx_handler(st->dtxDecoderState, frame_type);
// DTX actions
if (sub(newDTXState, SPEECH) != 0 )
{
Decoder_amr_reset (st, MRDTX);
dtx_dec(st->dtxDecoderState,
st->mem_syn,
st->lsfState,
st->pred_state,
st->Cb_gain_averState,
newDTXState,
mode,
parm, synth, A_t);
// update average lsp
Lsf_lsp(st->lsfState->past_lsf_q, st->lsp_old, M);
lsp_avg(st->lsp_avg_st, st->lsfState->past_lsf_q);
goto the_end;
}
// SPEECH action state machine
if ((sub(frame_type, RX_SPEECH_BAD) == 0) ||
(sub(frame_type, RX_NO_DATA) == 0) ||
(sub(frame_type, RX_ONSET) == 0))
{
bfi = 1;
if ((sub(frame_type, RX_NO_DATA) == 0) ||
(sub(frame_type, RX_ONSET) == 0))
{
build_CN_param(&st->nodataSeed,
prmno[mode],
bitno[mode],
parm);
}
}
else if (sub(frame_type, RX_SPEECH_DEGRADED) == 0)
{
pdfi = 1;
}
if (bfi != 0)
{
st->state = add (st->state, 1);
}
else if (sub (st->state, 6) == 0)
{
st->state = 5;
}
else
{
st->state = 0;
}
if (sub (st->state, 6) > 0)
{
st->state = 6;
}
// If this frame is the first speech frame after CNI period,
// set the BFH state machine to an appropriate state depending
// on whether there was DTX muting before start of speech or not
// If there was DTX muting, the first speech frame is muted.
// If there was no DTX muting, the first speech frame is not
// muted. The BFH state machine starts from state 5, however, to
// keep the audible noise resulting from a SID frame which is
// erroneously interpreted as a good speech frame as small as
// possible (the decoder output in this case is quickly muted)
if (sub(st->dtxDecoderState->dtxGlobalState, DTX) == 0)
{
st->state = 5;
st->prev_bf = 0;
}
else if (sub(st->dtxDecoderState->dtxGlobalState, DTX_MUTE) == 0)
{
st->state = 5;
st->prev_bf = 1;
}
// save old LSFs for CB gain smoothing
Copy (st->lsfState->past_lsf_q, prev_lsf, M);
// decode LSF parameters and generate interpolated lpc coefficients
for the 4 subframes
if (sub (mode, MR122) != 0)
{
D_plsf_3(st->lsfState, mode, bfi, parm, lsp_new);
// Advance synthesis parameters pointer
parm += 3;
Int_lpc_1to3(st->lsp_old, lsp_new, A_t);
}
else
{
D_plsf_5 (st->lsfState, bfi, parm, lsp_mid, lsp_new);
// Advance synthesis parameters pointer
parm += 5;
Int_lpc_1and3 (st->lsp_old, lsp_mid, lsp_new, A_t);
}
// update the LSPs for the next frame
for (i = 0; i < M; i++)
{
st->lsp_old[i] = lsp_new[i];
}
*------------------------------------------------------------------------*
* Loop for every subframe in the analysis frame *
*------------------------------------------------------------------------*
* The subframe size is L_SUBFR and the loop is repeated L_FRAME/L_SUBFR *
* times *
* - decode the pitch delay *
* - decode algebraic code *
* - decode pitch and codebook gains *
* - find the excitation and compute synthesis speech *
*------------------------------------------------------------------------*
// pointer to interpolated LPC parameters
Az = A_t;
evenSubfr = 0;
subfrNr = -1;
for (i_subfr = 0; i_subfr < L_FRAME; i_subfr += L_SUBFR)
{
subfrNr = add(subfrNr, 1);
evenSubfr = sub(1, evenSubfr);
// flag for first and 3th subframe
pit_flag = i_subfr;
if (sub (i_subfr, L_FRAME_BY2) == 0)
{
if (sub(mode, MR475) != 0 && sub(mode, MR515) != 0)
{
pit_flag = 0;
}
}
// pitch index
index = *parm++;
*-------------------------------------------------------*
* - decode pitch lag and find adaptive codebook vector. *
*-------------------------------------------------------*
if (sub(mode, MR122) != 0)
{
// flag4 indicates encoding with 4 bit resolution;
// this is needed for mode MR475, MR515, MR59 and MR67
flag4 = 0;
if ((sub (mode, MR475) == 0) ||
(sub (mode, MR515) == 0) ||
(sub (mode, MR59) == 0) ||
(sub (mode, MR67) == 0) ) {
flag4 = 1;
}
*-------------------------------------------------------*
* - get ranges for the t0_min and t0_max *
* - only needed in delta decoding *
*-------------------------------------------------------*
delta_frc_low = 5;
delta_frc_range = 9;
if ( sub(mode, MR795) == 0 )
{
delta_frc_low = 10;
delta_frc_range = 19;
}
t0_min = sub(st->old_T0, delta_frc_low);
if (sub(t0_min, PIT_MIN) < 0)
{
t0_min = PIT_MIN;
}
t0_max = add(t0_min, delta_frc_range);
if (sub(t0_max, PIT_MAX) > 0)
{
t0_max = PIT_MAX;
t0_min = sub(t0_max, delta_frc_range);
}
Dec_lag3 (index, t0_min, t0_max, pit_flag, st->old_T0,
&T0, &T0_frac, flag4);
st->T0_lagBuff = T0;
if (bfi != 0)
{
if (sub (st->old_T0, PIT_MAX) < 0)
{ // Graceful pitch
st->old_T0 = add(st->old_T0, 1); // degradation
}
T0 = st->old_T0;
T0_frac = 0;
if ( st->inBackgroundNoise != 0 &&
sub(st->voicedHangover, 4) > 0 &&
((sub(mode, MR475) == 0 ) ||
(sub(mode, MR515) == 0 ) ||
(sub(mode, MR59) == 0) )
)
{
T0 = st->T0_lagBuff;
}
}
Pred_lt_3or6 (st->exc, T0, T0_frac, L_SUBFR, 1);
}
else
{
Dec_lag6 (index, PIT_MIN_MR122,
PIT_MAX, pit_flag, &T0, &T0_frac);
if ( bfi == 0 && (pit_flag == 0 || sub (index, 61) < 0))
{
}
else
{
st->T0_lagBuff = T0;
T0 = st->old_T0;
T0_frac = 0;
}
Pred_lt_3or6 (st->exc, T0, T0_frac, L_SUBFR, 0);
}
*-------------------------------------------------------*
* - (MR122 only: Decode pitch gain.) *
* - Decode innovative codebook. *
* - set pitch sharpening factor *
*-------------------------------------------------------*
if (sub (mode, MR475) == 0 || sub (mode, MR515) == 0)
{ // MR475, MR515
index = *parm++; // index of position
i = *parm++; // signs
decode_2i40_9bits (subfrNr, i, index, code);
pit_sharp = shl (st->sharp, 1);
}
else if (sub (mode, MR59) == 0)
{ // MR59
index = *parm++; // index of position
i = *parm++; // signs
decode_2i40_11bits (i, index, code);
pit_sharp = shl (st->sharp, 1);
}
else if (sub (mode, MR67) == 0)
{ // MR67
index = *parm++; // index of position
i = *parm++; // signs
decode_3i40_14bits (i, index, code);
pit_sharp = shl (st->sharp, 1);
}
else if (sub (mode, MR795) <= 0)
{ // MR74, MR795
index = *parm++; // index of position
i = *parm++; // signs
decode_4i40_17bits (i, index, code);
pit_sharp = shl (st->sharp, 1);
}
else if (sub (mode, MR102) == 0)
{ // MR102
dec_8i40_31bits (parm, code);
parm += 7;
pit_sharp = shl (st->sharp, 1);
}
else
{ // MR122
index = *parm++;
if (bfi != 0)
{
ec_gain_pitch (st->ec_gain_p_st, st->state, &gain_pit);
}
else
{
gain_pit = d_gain_pitch (mode, index);
}
ec_gain_pitch_update (st->ec_gain_p_st, bfi, st->prev_bf,
&gain_pit);
dec_10i40_35bits (parm, code);
parm += 10;
// pit_sharp = gain_pit;
// if (pit_sharp > 1.0) pit_sharp = 1.0;
pit_sharp = shl (gain_pit, 1);
}
*-------------------------------------------------------*
* - Add the pitch contribution to code[]. *
*-------------------------------------------------------*
for (i = T0; i < L_SUBFR; i++)
{
temp = mult (code[i - T0], pit_sharp);
code[i] = add (code[i], temp);
}
*------------------------------------------------------------*
* - Decode codebook gain (MR122) or both pitch *
* gain and codebook gain (all others) *
* - Update pitch sharpening "sharp" with quantized gain_pit *
*------------------------------------------------------------*
if (sub (mode, MR475) == 0)
{
// read and decode pitch and code gain
if (evenSubfr != 0)
{
index_mr475 = *parm++; // index of gain(s)
}
if (bfi == 0)
{
Dec_gain(st->pred_state, mode, index_mr475, code,
evenSubfr, &gain_pit, &gain_code);
}
else
{
ec_gain_pitch (st->ec_gain_p_st, st->state, &gain_pit);
ec_gain_code (st->ec_gain_c_st, st->pred_state, st->state,
&gain_code);
}
ec_gain_pitch_update (st->ec_gain_p_st, bfi, st->prev_bf,
&gain_pit);
ec_gain_code_update (st->ec_gain_c_st, bfi, st->prev_bf,
&gain_code);
pit_sharp = gain_pit;
if (sub (pit_sharp, SHARPMAX) > 0)
{
pit_sharp = SHARPMAX;
}
}
else if ((sub (mode, MR74) <= 0) ||
(sub (mode, MR102) == 0))
{
// read and decode pitch and code gain
index = *parm++; // index of gain(s)
if (bfi == 0)
{
Dec_gain(st->pred_state, mode, index, code,
evenSubfr, &gain_pit, &gain_code);
}
else
{
ec_gain_pitch (st->ec_gain_p_st, st->state, &gain_pit);
ec_gain_code (st->ec_gain_c_st, st->pred_state, st->state,
&gain_code);
}
ec_gain_pitch_update (st->ec_gain_p_st, bfi, st->prev_bf,
&gain_pit);
ec_gain_code_update (st->ec_gain_c_st, bfi, st->prev_bf,
&gain_code);
pit_sharp = gain_pit;
if (sub (pit_sharp, SHARPMAX) > 0)
{
pit_sharp = SHARPMAX;
}
if (sub (mode, MR102) == 0)
{
if (sub (st->old_T0, add(L_SUBFR, 5)) > 0)
{
pit_sharp = shr(pit_sharp, 2);
}
}
}
else
{
// read and decode pitch gain
index = *parm++; // index of gain(s)
if (sub (mode, MR795) == 0)
{
// decode pitch gain
if (bfi != 0)
{
ec_gain_pitch (st->ec_gain_p_st, st->state, &gain_pit);
}
else
{
gain_pit = d_gain_pitch (mode, index);
}
ec_gain_pitch_update (st->ec_gain_p_st, bfi, st->prev_bf,
&gain_pit);
// read and decode code gain
index = *parm++;
if (bfi == 0)
{
d_gain_code (st->pred_state, mode, index, code, &gain_code);
}
else
{
ec_gain_code (st->ec_gain_c_st, st->pred_state, st->state,
&gain_code);
}
ec_gain_code_update (st->ec_gain_c_st, bfi, st->prev_bf,
&gain_code);
pit_sharp = gain_pit;
if (sub (pit_sharp, SHARPMAX) > 0)
{
pit_sharp = SHARPMAX;
}
}
else
{ // MR122
if (bfi == 0)
{
d_gain_code (st->pred_state, mode, index, code, &gain_code);
}
else
{
ec_gain_code (st->ec_gain_c_st, st->pred_state, st->state,
&gain_code);
}
ec_gain_code_update (st->ec_gain_c_st, bfi, st->prev_bf,
&gain_code);
pit_sharp = gain_pit;
}
}
// store pitch sharpening for next subframe
// (for modes which use the previous pitch gain for
// pitch sharpening in the search phase)
// do not update sharpening in even subframes for MR475
if (sub(mode, MR475) != 0 || evenSubfr == 0)
{
st->sharp = gain_pit;
if (sub (st->sharp, SHARPMAX) > 0)
{
st->sharp = SHARPMAX;
}
}
pit_sharp = shl (pit_sharp, 1);
if (sub (pit_sharp, 16384) > 0)
{
for (i = 0; i < L_SUBFR; i++)
{
temp = mult (st->exc[i], pit_sharp);
L_temp = L_mult (temp, gain_pit);
if (sub(mode, MR122)==0)
{
L_temp = L_shr (L_temp, 1);
}
excp[i] = pv_round (L_temp);
}
}
*-------------------------------------------------------*
* - Store list of LTP gains needed in the source *
* characteristic detector (SCD) *
*-------------------------------------------------------*
if ( bfi == 0 )
{
for (i = 0; i < 8; i++)
{
st->ltpGainHistory[i] = st->ltpGainHistory[i+1];
}
st->ltpGainHistory[8] = gain_pit;
}
*-------------------------------------------------------*
* - Limit gain_pit if in background noise and BFI *
* for MR475, MR515, MR59 *
*-------------------------------------------------------*
if ( (st->prev_bf != 0 || bfi != 0) && st->inBackgroundNoise != 0 &&
((sub(mode, MR475) == 0) ||
(sub(mode, MR515) == 0) ||
(sub(mode, MR59) == 0))
)
{
if ( sub (gain_pit, 12288) > 0) // if (gain_pit > 0.75) in Q14
gain_pit = add( shr( sub(gain_pit, 12288), 1 ), 12288 );
// gain_pit = (gain_pit-0.75)/2.0 + 0.75;
if ( sub (gain_pit, 14745) > 0) // if (gain_pit > 0.90) in Q14
{
gain_pit = 14745;
}
}
*-------------------------------------------------------*
* Calculate CB mixed gain *
*-------------------------------------------------------*
Int_lsf(prev_lsf, st->lsfState->past_lsf_q, i_subfr, lsf_i);
gain_code_mix = Cb_gain_average(
st->Cb_gain_averState, mode, gain_code,
lsf_i, st->lsp_avg_st->lsp_meanSave, bfi,
st->prev_bf, pdfi, st->prev_pdf,
st->inBackgroundNoise, st->voicedHangover);
// make sure that MR74, MR795, MR122 have original code_gain
if ((sub(mode, MR67) > 0) && (sub(mode, MR102) != 0) )
// MR74, MR795, MR122
{
gain_code_mix = gain_code;
}
*-------------------------------------------------------*
* - Find the total excitation. *
* - Find synthesis speech corresponding to st->exc[]. *
*-------------------------------------------------------*
if (sub(mode, MR102) <= 0) // MR475, MR515, MR59, MR67, MR74, MR795, MR102
{
pitch_fac = gain_pit;
tmp_shift = 1;
}
else // MR122
{
pitch_fac = shr (gain_pit, 1);
tmp_shift = 2;
}
// copy unscaled LTP excitation to exc_enhanced (used in phase
* dispersion below) and compute total excitation for LTP feedback
for (i = 0; i < L_SUBFR; i++)
{
exc_enhanced[i] = st->exc[i];
// st->exc[i] = gain_pit*st->exc[i] + gain_code*code[i];
L_temp = L_mult (st->exc[i], pitch_fac);
// 12.2: Q0 * Q13
// 7.4: Q0 * Q14
L_temp = L_mac (L_temp, code[i], gain_code);
// 12.2: Q12 * Q1
// 7.4: Q13 * Q1
L_temp = L_shl (L_temp, tmp_shift); // Q16
st->exc[i] = pv_round (L_temp);
}
*-------------------------------------------------------*
* - Adaptive phase dispersion *
*-------------------------------------------------------*
ph_disp_release(st->ph_disp_st); // free phase dispersion adaption
if ( ((sub(mode, MR475) == 0) ||
(sub(mode, MR515) == 0) ||
(sub(mode, MR59) == 0)) &&
sub(st->voicedHangover, 3) > 0 &&
st->inBackgroundNoise != 0 &&
bfi != 0 )
{
ph_disp_lock(st->ph_disp_st); // Always Use full Phase Disp.
} // if error in bg noise
// apply phase dispersion to innovation (if enabled) and
compute total excitation for synthesis part
ph_disp(st->ph_disp_st, mode,
exc_enhanced, gain_code_mix, gain_pit, code,
pitch_fac, tmp_shift);
*-------------------------------------------------------*
* - The Excitation control module are active during BFI.*
* - Conceal drops in signal energy if in bg noise. *
*-------------------------------------------------------*
L_temp = 0;
for (i = 0; i < L_SUBFR; i++)
{
L_temp = L_mac (L_temp, exc_enhanced[i], exc_enhanced[i] );
}
L_temp = L_shr (L_temp, 1); // excEnergy = sqrt(L_temp) in Q0
L_temp = sqrt_l_exp(L_temp, &temp); // function result
L_temp = L_shr(L_temp, add( shr(temp, 1), 15));
L_temp = L_shr(L_temp, 2); // To cope with 16-bit and
excEnergy = extract_l(L_temp); // scaling in ex_ctrl()
if ( ((sub (mode, MR475) == 0) ||
(sub (mode, MR515) == 0) ||
(sub (mode, MR59) == 0)) &&
sub(st->voicedHangover, 5) > 0 &&
st->inBackgroundNoise != 0 &&
sub(st->state, 4) < 0 &&
( (pdfi != 0 && st->prev_pdf != 0) ||
bfi != 0 ||
st->prev_bf != 0) )
{
carefulFlag = 0;
if ( pdfi != 0 && bfi == 0 )
{
carefulFlag = 1;
}
Ex_ctrl(exc_enhanced,
excEnergy,
st->excEnergyHist,
st->voicedHangover,
st->prev_bf,
carefulFlag);
}
if ( st->inBackgroundNoise != 0 &&
( bfi != 0 || st->prev_bf != 0 ) &&
sub(st->state, 4) < 0 )
{
; // do nothing!
}
else
{
// Update energy history for all modes
for (i = 0; i < 8; i++)
{
st->excEnergyHist[i] = st->excEnergyHist[i+1];
}
st->excEnergyHist[8] = excEnergy;
}
*-------------------------------------------------------*
* Excitation control module end. *
*-------------------------------------------------------*
if (sub (pit_sharp, 16384) > 0)
{
for (i = 0; i < L_SUBFR; i++)
{
excp[i] = add (excp[i], exc_enhanced[i]);
}
agc2 (exc_enhanced, excp, L_SUBFR);
Overflow = 0;
Syn_filt (Az, excp, &synth[i_subfr], L_SUBFR,
st->mem_syn, 0);
}
else
{
Overflow = 0;
Syn_filt (Az, exc_enhanced, &synth[i_subfr], L_SUBFR,
st->mem_syn, 0);
}
if (Overflow != 0) // Test for overflow
{
for (i = 0; i < PIT_MAX + L_INTERPOL + L_SUBFR; i++)
{
st->old_exc[i] = shr(st->old_exc[i], 2);
}
for (i = 0; i < L_SUBFR; i++)
{
exc_enhanced[i] = shr(exc_enhanced[i], 2);
}
Syn_filt(Az, exc_enhanced, &synth[i_subfr], L_SUBFR, st->mem_syn, 1);
}
else
{
Copy(&synth[i_subfr+L_SUBFR-M], st->mem_syn, M);
}
*--------------------------------------------------*
* Update signal for next frame. *
* -> shift to the left by L_SUBFR st->exc[] *
*--------------------------------------------------*
Copy (&st->old_exc[L_SUBFR], &st->old_exc[0], PIT_MAX + L_INTERPOL);
// interpolated LPC parameters for next subframe
Az += MP1;
// store T0 for next subframe
st->old_T0 = T0;
}
*-------------------------------------------------------*
* Call the Source Characteristic Detector which updates *
* st->inBackgroundNoise and st->voicedHangover. *
*-------------------------------------------------------*
st->inBackgroundNoise = Bgn_scd(st->background_state,
&(st->ltpGainHistory[0]),
&(synth[0]),
&(st->voicedHangover) );
dtx_dec_activity_update(st->dtxDecoderState,
st->lsfState->past_lsf_q,
synth);
// store bfi for next subframe
st->prev_bf = bfi;
st->prev_pdf = pdfi;
*--------------------------------------------------*
* Calculate the LSF averages on the eight *
* previous frames *
*--------------------------------------------------*
lsp_avg(st->lsp_avg_st, st->lsfState->past_lsf_q);
the_end:
st->dtxDecoderState->dtxGlobalState = newDTXState;
return 0;
}
------------------------------------------------------------------------------
RESOURCES USED [optional]
When the code is written for a specific target processor the
the resources used should be documented below.
HEAP MEMORY USED: x bytes
STACK MEMORY USED: x bytes
CLOCK CYCLES: (cycle count equation for this function) + (variable
used to represent cycle count for each subroutine
called)
where: (cycle count variable) = cycle count for [subroutine
name]
------------------------------------------------------------------------------
CAUTION [optional]
[State any special notes, constraints or cautions for users of this function]
------------------------------------------------------------------------------
*/
void Decoder_amr(
Decoder_amrState *st, /* i/o : State variables */
enum Mode mode, /* i : AMR mode */
Word16 parm[], /* i : vector of synthesis parameters
(PRM_SIZE) */
enum RXFrameType frame_type, /* i : received frame type */
Word16 synth[], /* o : synthesis speech (L_FRAME) */
Word16 A_t[] /* o : decoded LP filter in 4 subframes
(AZ_SIZE) */
)
{
/* LPC coefficients */
Word16 *Az; /* Pointer on A_t */
/* LSPs */
Word16 lsp_new[M];
Word16 lsp_mid[M];
/* LSFs */
Word16 prev_lsf[M];
Word16 lsf_i[M];
/* Algebraic codevector */
Word16 code[L_SUBFR];
/* excitation */
Word16 excp[L_SUBFR];
Word16 exc_enhanced[L_SUBFR];
/* Scalars */
Word16 i;
Word16 i_subfr;
Word16 T0;
Word16 T0_frac;
Word16 index;
Word16 index_mr475 = 0;
Word16 gain_pit;
Word16 gain_code;
Word16 gain_code_mix;
Word16 pit_sharp;
Word16 pit_flag;
Word16 pitch_fac;
Word16 t0_min;
Word16 t0_max;
Word16 delta_frc_low;
Word16 delta_frc_range;
Word16 tmp_shift;
Word16 temp;
Word32 L_temp;
Word16 flag4;
Word16 carefulFlag;
Word16 excEnergy;
Word16 subfrNr;
Word16 evenSubfr = 0;
Word16 bfi = 0; /* bad frame indication flag */
Word16 pdfi = 0; /* potential degraded bad frame flag */
enum DTXStateType newDTXState; /* SPEECH , DTX, DTX_MUTE */
Flag *pOverflow = &(st->overflow); /* Overflow flag */
/* find the new DTX state SPEECH OR DTX */
newDTXState = rx_dtx_handler(&(st->dtxDecoderState), frame_type, pOverflow);
/* DTX actions */
if (newDTXState != SPEECH)
{
Decoder_amr_reset(st, MRDTX);
dtx_dec(&(st->dtxDecoderState),
st->mem_syn,
&(st->lsfState),
&(st->pred_state),
&(st->Cb_gain_averState),
newDTXState,
mode,
parm, synth, A_t, pOverflow);
/* update average lsp */
Lsf_lsp(
st->lsfState.past_lsf_q,
st->lsp_old,
M,
pOverflow);
lsp_avg(
&(st->lsp_avg_st),
st->lsfState.past_lsf_q,
pOverflow);
goto the_end;
}
/* SPEECH action state machine */
if ((frame_type == RX_SPEECH_BAD) || (frame_type == RX_NO_DATA) ||
(frame_type == RX_ONSET))
{
bfi = 1;
if ((frame_type == RX_NO_DATA) || (frame_type == RX_ONSET))
{
build_CN_param(&st->nodataSeed,
prmno[mode],
bitno[mode],
parm,
pOverflow);
}
}
else if (frame_type == RX_SPEECH_DEGRADED)
{
pdfi = 1;
}
if (bfi != 0)
{
st->state += 1;
}
else if (st->state == 6)
{
st->state = 5;
}
else
{
st->state = 0;
}
if (st->state > 6)
{
st->state = 6;
}
/* If this frame is the first speech frame after CNI period, */
/* set the BFH state machine to an appropriate state depending */
/* on whether there was DTX muting before start of speech or not */
/* If there was DTX muting, the first speech frame is muted. */
/* If there was no DTX muting, the first speech frame is not */
/* muted. The BFH state machine starts from state 5, however, to */
/* keep the audible noise resulting from a SID frame which is */
/* erroneously interpreted as a good speech frame as small as */
/* possible (the decoder output in this case is quickly muted) */
if (st->dtxDecoderState.dtxGlobalState == DTX)
{
st->state = 5;
st->prev_bf = 0;
}
else if (st->dtxDecoderState.dtxGlobalState == DTX_MUTE)
{
st->state = 5;
st->prev_bf = 1;
}
/* save old LSFs for CB gain smoothing */
Copy(st->lsfState.past_lsf_q, prev_lsf, M);
/* decode LSF parameters and generate interpolated lpc coefficients
for the 4 subframes */
if (mode != MR122)
{
D_plsf_3(
&(st->lsfState),
mode,
bfi,
parm,
lsp_new,
pOverflow);
/* Advance synthesis parameters pointer */
parm += 3;
Int_lpc_1to3(
st->lsp_old,
lsp_new,
A_t,
pOverflow);
}
else
{
D_plsf_5(
&(st->lsfState),
bfi,
parm,
lsp_mid,
lsp_new,
pOverflow);
/* Advance synthesis parameters pointer */
parm += 5;
Int_lpc_1and3(
st->lsp_old,
lsp_mid,
lsp_new,
A_t,
pOverflow);
}
/* update the LSPs for the next frame */
for (i = 0; i < M; i++)
{
st->lsp_old[i] = lsp_new[i];
}
/*------------------------------------------------------------------------*
* Loop for every subframe in the analysis frame *
*------------------------------------------------------------------------*
* The subframe size is L_SUBFR and the loop is repeated L_FRAME/L_SUBFR *
* times *
* - decode the pitch delay *
* - decode algebraic code *
* - decode pitch and codebook gains *
* - find the excitation and compute synthesis speech *
*------------------------------------------------------------------------*/
/* pointer to interpolated LPC parameters */
Az = A_t;
evenSubfr = 0;
subfrNr = -1;
for (i_subfr = 0; i_subfr < L_FRAME; i_subfr += L_SUBFR)
{
subfrNr += 1;
evenSubfr = 1 - evenSubfr;
/* flag for first and 3th subframe */
pit_flag = i_subfr;
if (i_subfr == L_FRAME_BY2)
{
if ((mode != MR475) && (mode != MR515))
{
pit_flag = 0;
}
}
/* pitch index */
index = *parm++;
/*-------------------------------------------------------*
* - decode pitch lag and find adaptive codebook vector. *
*-------------------------------------------------------*/
if (mode != MR122)
{
/* flag4 indicates encoding with 4 bit resolution; */
/* this is needed for mode MR475, MR515, MR59 and MR67 */
flag4 = 0;
if ((mode == MR475) || (mode == MR515) || (mode == MR59) ||
(mode == MR67))
{
flag4 = 1;
}
/*-------------------------------------------------------*
* - get ranges for the t0_min and t0_max *
* - only needed in delta decoding *
*-------------------------------------------------------*/
delta_frc_low = 5;
delta_frc_range = 9;
if (mode == MR795)
{
delta_frc_low = 10;
delta_frc_range = 19;
}
t0_min = sub(st->old_T0, delta_frc_low, pOverflow);
if (t0_min < PIT_MIN)
{
t0_min = PIT_MIN;
}
t0_max = add(t0_min, delta_frc_range, pOverflow);
if (t0_max > PIT_MAX)
{
t0_max = PIT_MAX;
t0_min = t0_max - delta_frc_range;
}
Dec_lag3(index, t0_min, t0_max, pit_flag, st->old_T0,
&T0, &T0_frac, flag4, pOverflow);
st->T0_lagBuff = T0;
if (bfi != 0)
{
if (st->old_T0 < PIT_MAX)
{ /* Graceful pitch */
st->old_T0 += 1; /* degradation */
}
T0 = st->old_T0;
T0_frac = 0;
if ((st->inBackgroundNoise != 0) && (st->voicedHangover > 4) &&
((mode == MR475) || (mode == MR515) || (mode == MR59)))
{
T0 = st->T0_lagBuff;
}
}
Pred_lt_3or6(st->exc, T0, T0_frac, L_SUBFR, 1, pOverflow);
}
else
{
Dec_lag6(index, PIT_MIN_MR122,
PIT_MAX, pit_flag, &T0, &T0_frac, pOverflow);
if (!(bfi == 0 && (pit_flag == 0 || index < 61)))
{
st->T0_lagBuff = T0;
T0 = st->old_T0;
T0_frac = 0;
}
Pred_lt_3or6(st->exc, T0, T0_frac, L_SUBFR, 0, pOverflow);
}
/*-------------------------------------------------------*
* - (MR122 only: Decode pitch gain.) *
* - Decode innovative codebook. *
* - set pitch sharpening factor *
*-------------------------------------------------------*/
if ((mode == MR475) || (mode == MR515))
{ /* MR475, MR515 */
index = *parm++; /* index of position */
i = *parm++; /* signs */
decode_2i40_9bits(subfrNr, i, index, code, pOverflow);
L_temp = (Word32)st->sharp << 1;
if (L_temp != (Word32)((Word16) L_temp))
{
pit_sharp = (st->sharp > 0) ? MAX_16 : MIN_16;
}
else
{
pit_sharp = (Word16) L_temp;
}
}
else if (mode == MR59)
{ /* MR59 */
index = *parm++; /* index of position */
i = *parm++; /* signs */
decode_2i40_11bits(i, index, code);
L_temp = (Word32)st->sharp << 1;
if (L_temp != (Word32)((Word16) L_temp))
{
pit_sharp = (st->sharp > 0) ? MAX_16 : MIN_16;
}
else
{
pit_sharp = (Word16) L_temp;
}
}
else if (mode == MR67)
{ /* MR67 */
index = *parm++; /* index of position */
i = *parm++; /* signs */
decode_3i40_14bits(i, index, code);
L_temp = (Word32)st->sharp << 1;
if (L_temp != (Word32)((Word16) L_temp))
{
pit_sharp = (st->sharp > 0) ? MAX_16 : MIN_16;
}
else
{
pit_sharp = (Word16) L_temp;
}
}
else if (mode <= MR795)
{ /* MR74, MR795 */
index = *parm++; /* index of position */
i = *parm++; /* signs */
decode_4i40_17bits(i, index, code);
L_temp = (Word32)st->sharp << 1;
if (L_temp != (Word32)((Word16) L_temp))
{
pit_sharp = (st->sharp > 0) ? MAX_16 : MIN_16;
}
else
{
pit_sharp = (Word16) L_temp;
}
}
else if (mode == MR102)
{ /* MR102 */
dec_8i40_31bits(parm, code, pOverflow);
parm += 7;
L_temp = (Word32)st->sharp << 1;
if (L_temp != (Word32)((Word16) L_temp))
{
pit_sharp = (st->sharp > 0) ? MAX_16 : MIN_16;
}
else
{
pit_sharp = (Word16) L_temp;
}
}
else
{ /* MR122 */
index = *parm++;
if (bfi != 0)
{
ec_gain_pitch(
&(st->ec_gain_p_st),
st->state,
&gain_pit,
pOverflow);
}
else
{
gain_pit = d_gain_pitch(mode, index);
}
ec_gain_pitch_update(
&(st->ec_gain_p_st),
bfi,
st->prev_bf,
&gain_pit,
pOverflow);
dec_10i40_35bits(parm, code);
parm += 10;
/* pit_sharp = gain_pit; */
/* if (pit_sharp > 1.0) pit_sharp = 1.0; */
L_temp = (Word32)gain_pit << 1;
if (L_temp != (Word32)((Word16) L_temp))
{
pit_sharp = (gain_pit > 0) ? MAX_16 : MIN_16;
}
else
{
pit_sharp = (Word16) L_temp;
}
}
/*-------------------------------------------------------*
* - Add the pitch contribution to code[]. *
*-------------------------------------------------------*/
for (i = T0; i < L_SUBFR; i++)
{
temp = mult(*(code + i - T0), pit_sharp, pOverflow);
*(code + i) = add(*(code + i), temp, pOverflow);
}
/*------------------------------------------------------------*
* - Decode codebook gain (MR122) or both pitch *
* gain and codebook gain (all others) *
* - Update pitch sharpening "sharp" with quantized gain_pit *
*------------------------------------------------------------*/
if (mode == MR475)
{
/* read and decode pitch and code gain */
if (evenSubfr != 0)
{
index_mr475 = *parm++; /* index of gain(s) */
}
if (bfi == 0)
{
Dec_gain(
&(st->pred_state),
mode,
index_mr475,
code,
evenSubfr,
&gain_pit,
&gain_code,
pOverflow);
}
else
{
ec_gain_pitch(
&(st->ec_gain_p_st),
st->state,
&gain_pit,
pOverflow);
ec_gain_code(
&(st->ec_gain_c_st),
&(st->pred_state),
st->state,
&gain_code,
pOverflow);
}
ec_gain_pitch_update(
&st->ec_gain_p_st,
bfi,
st->prev_bf,
&gain_pit,
pOverflow);
ec_gain_code_update(
&st->ec_gain_c_st,
bfi,
st->prev_bf,
&gain_code,
pOverflow);
pit_sharp = gain_pit;
if (pit_sharp > SHARPMAX)
{
pit_sharp = SHARPMAX;
}
}
else if ((mode <= MR74) || (mode == MR102))
{
/* read and decode pitch and code gain */
index = *parm++; /* index of gain(s) */
if (bfi == 0)
{
Dec_gain(
&(st->pred_state),
mode,
index,
code,
evenSubfr,
&gain_pit,
&gain_code,
pOverflow);
}
else
{
ec_gain_pitch(
&(st->ec_gain_p_st),
st->state,
&gain_pit,
pOverflow);
ec_gain_code(
&(st->ec_gain_c_st),
&(st->pred_state),
st->state,
&gain_code,
pOverflow);
}
ec_gain_pitch_update(
&(st->ec_gain_p_st),
bfi,
st->prev_bf,
&gain_pit,
pOverflow);
ec_gain_code_update(
&(st->ec_gain_c_st),
bfi,
st->prev_bf,
&gain_code,
pOverflow);
pit_sharp = gain_pit;
if (pit_sharp > SHARPMAX)
{
pit_sharp = SHARPMAX;
}
if (mode == MR102)
{
if (st->old_T0 > (L_SUBFR + 5))
{
if (pit_sharp < 0)
{
pit_sharp = ~((~pit_sharp) >> 2);
}
else
{
pit_sharp = pit_sharp >> 2;
}
}
}
}
else
{
/* read and decode pitch gain */
index = *parm++; /* index of gain(s) */
if (mode == MR795)
{
/* decode pitch gain */
if (bfi != 0)
{
ec_gain_pitch(
&(st->ec_gain_p_st),
st->state,
&gain_pit,
pOverflow);
}
else
{
gain_pit = d_gain_pitch(mode, index);
}
ec_gain_pitch_update(
&(st->ec_gain_p_st),
bfi,
st->prev_bf,
&gain_pit,
pOverflow);
/* read and decode code gain */
index = *parm++;
if (bfi == 0)
{
d_gain_code(
&(st->pred_state),
mode,
index,
code,
&gain_code,
pOverflow);
}
else
{
ec_gain_code(
&(st->ec_gain_c_st),
&(st->pred_state),
st->state,
&gain_code,
pOverflow);
}
ec_gain_code_update(
&(st->ec_gain_c_st),
bfi,
st->prev_bf,
&gain_code,
pOverflow);
pit_sharp = gain_pit;
if (pit_sharp > SHARPMAX)
{
pit_sharp = SHARPMAX;
}
}
else
{ /* MR122 */
if (bfi == 0)
{
d_gain_code(
&(st->pred_state),
mode,
index,
code,
&gain_code,
pOverflow);
}
else
{
ec_gain_code(
&(st->ec_gain_c_st),
&(st->pred_state),
st->state,
&gain_code,
pOverflow);
}
ec_gain_code_update(
&(st->ec_gain_c_st),
bfi,
st->prev_bf,
&gain_code,
pOverflow);
pit_sharp = gain_pit;
}
}
/* store pitch sharpening for next subframe */
/* (for modes which use the previous pitch gain for */
/* pitch sharpening in the search phase) */
/* do not update sharpening in even subframes for MR475 */
if ((mode != MR475) || (evenSubfr == 0))
{
st->sharp = gain_pit;
if (st->sharp > SHARPMAX)
{
st->sharp = SHARPMAX;
}
}
pit_sharp = shl(pit_sharp, 1, pOverflow);
if (pit_sharp > 16384)
{
for (i = 0; i < L_SUBFR; i++)
{
temp = mult(st->exc[i], pit_sharp, pOverflow);
L_temp = L_mult(temp, gain_pit, pOverflow);
if (mode == MR122)
{
if (L_temp < 0)
{
L_temp = ~((~L_temp) >> 1);
}
else
{
L_temp = L_temp >> 1;
}
}
*(excp + i) = pv_round(L_temp, pOverflow);
}
}
/*-------------------------------------------------------*
* - Store list of LTP gains needed in the source *
* characteristic detector (SCD) *
*-------------------------------------------------------*/
if (bfi == 0)
{
for (i = 0; i < 8; i++)
{
st->ltpGainHistory[i] = st->ltpGainHistory[i+1];
}
st->ltpGainHistory[8] = gain_pit;
}
/*-------------------------------------------------------*
* - Limit gain_pit if in background noise and BFI *
* for MR475, MR515, MR59 *
*-------------------------------------------------------*/
if ((st->prev_bf != 0 || bfi != 0) && st->inBackgroundNoise != 0 &&
((mode == MR475) || (mode == MR515) || (mode == MR59)))
{
if (gain_pit > 12288) /* if (gain_pit > 0.75) in Q14*/
{
gain_pit = ((gain_pit - 12288) >> 1) + 12288;
/* gain_pit = (gain_pit-0.75)/2.0 + 0.75; */
}
if (gain_pit > 14745) /* if (gain_pit > 0.90) in Q14*/
{
gain_pit = 14745;
}
}
/*-------------------------------------------------------*
* Calculate CB mixed gain *
*-------------------------------------------------------*/
Int_lsf(
prev_lsf,
st->lsfState.past_lsf_q,
i_subfr,
lsf_i,
pOverflow);
gain_code_mix =
Cb_gain_average(
&(st->Cb_gain_averState),
mode,
gain_code,
lsf_i,
st->lsp_avg_st.lsp_meanSave,
bfi,
st->prev_bf,
pdfi,
st->prev_pdf,
st->inBackgroundNoise,
st->voicedHangover,
pOverflow);
/* make sure that MR74, MR795, MR122 have original code_gain*/
if ((mode > MR67) && (mode != MR102))
/* MR74, MR795, MR122 */
{
gain_code_mix = gain_code;
}
/*-------------------------------------------------------*
* - Find the total excitation. *
* - Find synthesis speech corresponding to st->exc[]. *
*-------------------------------------------------------*/
if (mode <= MR102) /* MR475, MR515, MR59, MR67, MR74, MR795, MR102*/
{
pitch_fac = gain_pit;
tmp_shift = 1;
}
else /* MR122 */
{
if (gain_pit < 0)
{
pitch_fac = ~((~gain_pit) >> 1);
}
else
{
pitch_fac = gain_pit >> 1;
}
tmp_shift = 2;
}
/* copy unscaled LTP excitation to exc_enhanced (used in phase
* dispersion below) and compute total excitation for LTP feedback
*/
for (i = 0; i < L_SUBFR; i++)
{
exc_enhanced[i] = st->exc[i];
/* st->exc[i] = gain_pit*st->exc[i] + gain_code*code[i]; */
L_temp = L_mult(st->exc[i], pitch_fac, pOverflow);
/* 12.2: Q0 * Q13 */
/* 7.4: Q0 * Q14 */
L_temp = L_mac(L_temp, code[i], gain_code, pOverflow);
/* 12.2: Q12 * Q1 */
/* 7.4: Q13 * Q1 */
L_temp = L_shl(L_temp, tmp_shift, pOverflow); /* Q16 */
st->exc[i] = pv_round(L_temp, pOverflow);
}
/*-------------------------------------------------------*
* - Adaptive phase dispersion *
*-------------------------------------------------------*/
ph_disp_release(&(st->ph_disp_st)); /* free phase dispersion adaption */
if (((mode == MR475) || (mode == MR515) || (mode == MR59)) &&
(st->voicedHangover > 3) && (st->inBackgroundNoise != 0) &&
(bfi != 0))
{
ph_disp_lock(&(st->ph_disp_st)); /* Always Use full Phase Disp. */
} /* if error in bg noise */
/* apply phase dispersion to innovation (if enabled) and
compute total excitation for synthesis part */
ph_disp(
&(st->ph_disp_st),
mode,
exc_enhanced,
gain_code_mix,
gain_pit,
code,
pitch_fac,
tmp_shift,
pOverflow);
/*-------------------------------------------------------*
* - The Excitation control module are active during BFI.*
* - Conceal drops in signal energy if in bg noise. *
*-------------------------------------------------------*/
L_temp = 0;
for (i = 0; i < L_SUBFR; i++)
{
L_temp = L_mac(L_temp, *(exc_enhanced + i), *(exc_enhanced + i), pOverflow);
}
/* excEnergy = sqrt(L_temp) in Q0 */
if (L_temp < 0)
{
L_temp = ~((~L_temp) >> 1);
}
else
{
L_temp = L_temp >> 1;
}
L_temp = sqrt_l_exp(L_temp, &temp, pOverflow);
/* To cope with 16-bit and scaling in ex_ctrl() */
L_temp = L_shr(L_temp, (Word16)((temp >> 1) + 15), pOverflow);
if (L_temp < 0)
{
excEnergy = (Word16)(~((~L_temp) >> 2));
}
else
{
excEnergy = (Word16)(L_temp >> 2);
}
if (((mode == MR475) || (mode == MR515) || (mode == MR59)) &&
(st->voicedHangover > 5) && (st->inBackgroundNoise != 0) &&
(st->state < 4) &&
((pdfi != 0 && st->prev_pdf != 0) || bfi != 0 || st->prev_bf != 0))
{
carefulFlag = 0;
if (pdfi != 0 && bfi == 0)
{
carefulFlag = 1;
}
Ex_ctrl(exc_enhanced,
excEnergy,
st->excEnergyHist,
st->voicedHangover,
st->prev_bf,
carefulFlag, pOverflow);
}
if (!((st->inBackgroundNoise != 0) && (bfi != 0 || st->prev_bf != 0) &&
(st->state < 4)))
{
/* Update energy history for all modes */
for (i = 0; i < 8; i++)
{
st->excEnergyHist[i] = st->excEnergyHist[i+1];
}
st->excEnergyHist[8] = excEnergy;
}
/*-------------------------------------------------------*
* Excitation control module end. *
*-------------------------------------------------------*/
if (pit_sharp > 16384)
{
for (i = 0; i < L_SUBFR; i++)
{
*(excp + i) = add(*(excp + i), *(exc_enhanced + i), pOverflow);
}
agc2(exc_enhanced, excp, L_SUBFR, pOverflow);
*pOverflow = 0;
Syn_filt(Az, excp, &synth[i_subfr], L_SUBFR,
st->mem_syn, 0);
}
else
{
*pOverflow = 0;
Syn_filt(Az, exc_enhanced, &synth[i_subfr], L_SUBFR,
st->mem_syn, 0);
}
if (*pOverflow != 0) /* Test for overflow */
{
for (i = PIT_MAX + L_INTERPOL + L_SUBFR - 1; i >= 0; i--)
{
if (st->old_exc[i] < 0)
{
st->old_exc[i] = ~((~st->old_exc[i]) >> 2);
}
else
{
st->old_exc[i] = st->old_exc[i] >> 2;
}
}
for (i = L_SUBFR - 1; i >= 0; i--)
{
if (*(exc_enhanced + i) < 0)
{
*(exc_enhanced + i) = ~((~(*(exc_enhanced + i))) >> 2);
}
else
{
*(exc_enhanced + i) = *(exc_enhanced + i) >> 2;
}
}
Syn_filt(Az, exc_enhanced, &synth[i_subfr], L_SUBFR, st->mem_syn, 1);
}
else
{
Copy(&synth[i_subfr+L_SUBFR-M], st->mem_syn, M);
}
/*--------------------------------------------------*
* Update signal for next frame. *
* -> shift to the left by L_SUBFR st->exc[] *
*--------------------------------------------------*/
Copy(&st->old_exc[L_SUBFR], &st->old_exc[0], PIT_MAX + L_INTERPOL);
/* interpolated LPC parameters for next subframe */
Az += MP1;
/* store T0 for next subframe */
st->old_T0 = T0;
}
/*-------------------------------------------------------*
* Call the Source Characteristic Detector which updates *
* st->inBackgroundNoise and st->voicedHangover. *
*-------------------------------------------------------*/
st->inBackgroundNoise =
Bgn_scd(
&(st->background_state),
&(st->ltpGainHistory[0]),
&(synth[0]),
&(st->voicedHangover),
pOverflow);
dtx_dec_activity_update(
&(st->dtxDecoderState),
st->lsfState.past_lsf_q,
synth,
pOverflow);
/* store bfi for next subframe */
st->prev_bf = bfi;
st->prev_pdf = pdfi;
/*--------------------------------------------------*
* Calculate the LSF averages on the eight *
* previous frames *
*--------------------------------------------------*/
lsp_avg(
&(st->lsp_avg_st),
st->lsfState.past_lsf_q,
pOverflow);
the_end:
st->dtxDecoderState.dtxGlobalState = newDTXState;
// return(0);
}
| {
"pile_set_name": "Github"
} |
author=%1
author_website=%2
icon=logo.svg
name=%3
supported_platforms=%4
url=%5
version=1.0
| {
"pile_set_name": "Github"
} |
64-bit Windows on AMD64/Intel EM64T is somewhat supported in the 7.0
release. A collector can be built with Microsoft Visual C++ 2005.
The resulting test programs have been known to work at least once.
More testing would clearly be helpful.
Currently only NT_X64_STATIC_THREADS_MAKEFILE has been used in
this environment. Copy this file to MAKEFILE, and then type "nmake"
in a Visual C++ command line window to build the static library
and the usual test programs. To verify that the colllector is
at least somewhat functional, run gctest.exe. This should create
gctest.exe.log after a few seconds.
This process is completely analogous to NT_STATIC_THREADS_MAKEFILE
for the 32-bit version.
Note that currently a few warnings are still generated by default,
and a number of others have been explicitly turned off in the makefile.
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#import "WXPBGeneratedMessage.h"
@class NSMutableArray, NSString;
@interface Log_IMOperation : WXPBGeneratedMessage
{
}
+ (void)initialize;
// Remaining properties
@property(nonatomic) int clientVersion; // @dynamic clientVersion;
@property(retain, nonatomic) NSString *currChatName; // @dynamic currChatName;
@property(nonatomic) int device; // @dynamic device;
@property(nonatomic) int ds; // @dynamic ds;
@property(nonatomic) int importDs; // @dynamic importDs;
@property(retain, nonatomic) NSMutableArray *oplist; // @dynamic oplist;
@property(nonatomic) long long timeStamp; // @dynamic timeStamp;
@property(nonatomic) unsigned long long uin; // @dynamic uin;
@end
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 25 2017 03:49:04).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <Cocoa/NSDatePicker.h>
@interface NSDatePicker (PhotoDateAdditions)
- (void)setPhotoDate:(double)arg1;
- (double)photoDate;
@end
| {
"pile_set_name": "Github"
} |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1
import (
serializer "k8s.io/apimachinery/pkg/runtime/serializer"
rest "k8s.io/client-go/rest"
v1 "k8s.io/code-generator/_examples/apiserver/apis/example2/v1"
"k8s.io/code-generator/_examples/apiserver/clientset/versioned/scheme"
)
type SecondExampleV1Interface interface {
RESTClient() rest.Interface
TestTypesGetter
}
// SecondExampleV1Client is used to interact with features provided by the example.test.apiserver.code-generator.k8s.io group.
type SecondExampleV1Client struct {
restClient rest.Interface
}
func (c *SecondExampleV1Client) TestTypes(namespace string) TestTypeInterface {
return newTestTypes(c, namespace)
}
// NewForConfig creates a new SecondExampleV1Client for the given config.
func NewForConfig(c *rest.Config) (*SecondExampleV1Client, error) {
config := *c
if err := setConfigDefaults(&config); err != nil {
return nil, err
}
client, err := rest.RESTClientFor(&config)
if err != nil {
return nil, err
}
return &SecondExampleV1Client{client}, nil
}
// NewForConfigOrDie creates a new SecondExampleV1Client for the given config and
// panics if there is an error in the config.
func NewForConfigOrDie(c *rest.Config) *SecondExampleV1Client {
client, err := NewForConfig(c)
if err != nil {
panic(err)
}
return client
}
// New creates a new SecondExampleV1Client for the given RESTClient.
func New(c rest.Interface) *SecondExampleV1Client {
return &SecondExampleV1Client{c}
}
func setConfigDefaults(config *rest.Config) error {
gv := v1.SchemeGroupVersion
config.GroupVersion = &gv
config.APIPath = "/apis"
config.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: scheme.Codecs}
if config.UserAgent == "" {
config.UserAgent = rest.DefaultKubernetesUserAgent()
}
return nil
}
// RESTClient returns a RESTClient that is used to communicate
// with API server by this client implementation.
func (c *SecondExampleV1Client) RESTClient() rest.Interface {
if c == nil {
return nil
}
return c.restClient
}
| {
"pile_set_name": "Github"
} |
---
id: "json-file-loader"
title: "@graphql-tools/json-file-loader"
sidebar_label: "json-file-loader"
---
### Classes
* [JsonFileLoader](/docs/api/classes/_loaders_json_file_src_index_.jsonfileloader)
### Interfaces
* [JsonFileLoaderOptions](/docs/api/interfaces/_loaders_json_file_src_index_.jsonfileloaderoptions)
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<project
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>me.zhyd.springboot</groupId>
<artifactId>springboot-learning</artifactId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<artifactId>springboot-servlet</artifactId>
<name>springboot-servlet</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
unmanic.__init__.py
Written by: Josh.5 <[email protected]>
Date: 21 February 2017, (11:11 AM)
Copyright:
Copyright (C) Josh Sunnex - All Rights Reserved
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
import warnings
from .unlogger import UnmanicLogger
__all__ = (
'UnmanicLogger',
)
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2003-2020 Rony Shapiro <[email protected]>.
* All rights reserved. Use of the code is allowed under the
* Artistic License 2.0 terms, as specified in the LICENSE file
* distributed with this code, or available from
* http://www.opensource.org/licenses/artistic-license-2.0.php
*/
#ifndef __MFILTERSAX2HANDLERS_H
#define __MFILTERSAX2HANDLERS_H
// MFilterSAX2Handlers.h : header file
//
#include "../../PWSFilters.h"
class PWSFilters;
// MSXML includes
#include <msxml6.h>
// -----------------------------------------------------------------------
class MFilterSAX2ErrorHandler: public ISAXErrorHandler
{
public:
// Local variables and functions
stringT m_strValidationResult;
BOOL bErrorsFound;
// Standard functions
MFilterSAX2ErrorHandler();
virtual ~MFilterSAX2ErrorHandler();
virtual HRESULT STDMETHODCALLTYPE error(
/* [in] */ struct ISAXLocator * pLocator,
/* [in] */ const wchar_t * pwchErrorMessage,
/* [in] */ HRESULT hrErrorCode);
// This must be correctly implemented, if your handler must be a COM Object
// the current implementation is NOT thread-safe
long __stdcall QueryInterface(const struct _GUID &,void ** );
unsigned long __stdcall AddRef(void);
unsigned long __stdcall Release(void);
virtual HRESULT STDMETHODCALLTYPE fatalError(
/* [in] */ struct ISAXLocator * pLocator,
/* [in] */ const wchar_t * pwchErrorMessage,
/* [in] */ HRESULT hrErrorCode);
virtual HRESULT STDMETHODCALLTYPE ignorableWarning(
/* [in] */ struct ISAXLocator * pLocator,
/* [in] */ const wchar_t * pwchErrorMessage,
/* [in] */HRESULT hrErrorCode);
private:
// REQUIRED variable
ULONG m_refCnt;
};
// -----------------------------------------------------------------------
class MFilterSAX2ContentHandler: public ISAXContentHandler
{
public:
// Local variables & function
stringT m_strXMLErrors;
PWSFilters *m_MapXMLFilters; // So as not to confuse with UI & core
FilterPool m_FPool;
int m_type;
// Standard functions
MFilterSAX2ContentHandler();
virtual ~MFilterSAX2ContentHandler();
void SetVariables(Asker *pAsker, PWSFilters *mapfilters, const FilterPool fpool,
const bool &bValidation)
{m_pAsker = pAsker; m_MapXMLFilters = mapfilters, m_FPool = fpool; m_bValidation = bValidation;}
void SetSchemaVersion(BSTR *schema_version)
{m_pSchema_Version = schema_version;}
// This must be correctly implemented, if your handler must be a COM Object
// the current implementation is NOT thread-safe
long __stdcall QueryInterface(const struct _GUID &,void ** );
unsigned long __stdcall AddRef(void);
unsigned long __stdcall Release(void);
virtual HRESULT STDMETHODCALLTYPE putDocumentLocator(
/* [in] */ ISAXLocator __RPC_FAR *pLocator);
virtual HRESULT STDMETHODCALLTYPE startDocument(void);
virtual HRESULT STDMETHODCALLTYPE endDocument(void);
virtual HRESULT STDMETHODCALLTYPE startPrefixMapping(
/* [in] */ const wchar_t __RPC_FAR *pwchPrefix,
/* [in] */ int cchPrefix,
/* [in] */ const wchar_t __RPC_FAR *pwchUri,
/* [in] */ int cchUri);
virtual HRESULT STDMETHODCALLTYPE endPrefixMapping(
/* [in] */ const wchar_t __RPC_FAR *pwchPrefix,
/* [in] */ int cchPrefix);
virtual HRESULT STDMETHODCALLTYPE startElement(
/* [in] */ const wchar_t __RPC_FAR *pwchNamespaceUri,
/* [in] */ int cchNamespaceUri,
/* [in] */ const wchar_t __RPC_FAR *pwchLocalName,
/* [in] */ int cchLocalName,
/* [in] */ const wchar_t __RPC_FAR *pwchRawName,
/* [in] */ int cchRawName,
/* [in] */ ISAXAttributes __RPC_FAR *pAttributes);
virtual HRESULT STDMETHODCALLTYPE endElement(
/* [in] */ const wchar_t __RPC_FAR *pwchNamespaceUri,
/* [in] */ int cchNamespaceUri,
/* [in] */ const wchar_t __RPC_FAR *pwchLocalName,
/* [in] */ int cchLocalName,
/* [in] */ const wchar_t __RPC_FAR *pwchRawName,
/* [in] */ int cchRawName);
virtual HRESULT STDMETHODCALLTYPE characters(
/* [in] */ const wchar_t __RPC_FAR *pwchChars,
/* [in] */ int cchChars);
virtual HRESULT STDMETHODCALLTYPE ignorableWhitespace(
/* [in] */ const wchar_t __RPC_FAR *pwchChars,
/* [in] */ int cchChars);
virtual HRESULT STDMETHODCALLTYPE processingInstruction(
/* [in] */ const wchar_t __RPC_FAR *pwchTarget,
/* [in] */ int cchTarget,
/* [in] */ const wchar_t __RPC_FAR *pwchData,
/* [in] */ int cchData);
virtual HRESULT STDMETHODCALLTYPE skippedEntity(
/* [in] */ const wchar_t __RPC_FAR *pwchName,
/* [in] */ int cchName);
private:
// Local variables
st_filters *cur_filter;
st_FilterRow *cur_filterentry;
Asker *m_pAsker;
StringX m_sxElemContent;
BSTR * m_pSchema_Version;
int m_iXMLVersion, m_iSchemaVersion;
bool m_bEntryBeingProcessed;
bool m_bValidation;
// REQUIRED variable
ULONG m_refCnt;
};
#endif /* __MFILTERSAX2HANDLERS_H */
| {
"pile_set_name": "Github"
} |
{
"compilerOptions": {
"target": "es5",
"module": "es2015",
"sourceMap": false,
"moduleResolution": "node",
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"declaration": true,
"outDir": "../component",
"lib": ["es2015", "dom"]
},
"angularCompilerOptions": {
"skipTemplateCodegen": true
}
}
| {
"pile_set_name": "Github"
} |
// Protocol Buffers for Go with Gadgets
//
// Copyright (c) 2018, The GoGo Authors. All rights reserved.
// http://github.com/gogo/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package proto
type float64Value struct {
Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *float64Value) Reset() { *m = float64Value{} }
func (*float64Value) ProtoMessage() {}
func (*float64Value) String() string { return "float64<string>" }
type float32Value struct {
Value float32 `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *float32Value) Reset() { *m = float32Value{} }
func (*float32Value) ProtoMessage() {}
func (*float32Value) String() string { return "float32<string>" }
type int64Value struct {
Value int64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *int64Value) Reset() { *m = int64Value{} }
func (*int64Value) ProtoMessage() {}
func (*int64Value) String() string { return "int64<string>" }
type uint64Value struct {
Value uint64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *uint64Value) Reset() { *m = uint64Value{} }
func (*uint64Value) ProtoMessage() {}
func (*uint64Value) String() string { return "uint64<string>" }
type int32Value struct {
Value int32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *int32Value) Reset() { *m = int32Value{} }
func (*int32Value) ProtoMessage() {}
func (*int32Value) String() string { return "int32<string>" }
type uint32Value struct {
Value uint32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *uint32Value) Reset() { *m = uint32Value{} }
func (*uint32Value) ProtoMessage() {}
func (*uint32Value) String() string { return "uint32<string>" }
type boolValue struct {
Value bool `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *boolValue) Reset() { *m = boolValue{} }
func (*boolValue) ProtoMessage() {}
func (*boolValue) String() string { return "bool<string>" }
type stringValue struct {
Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *stringValue) Reset() { *m = stringValue{} }
func (*stringValue) ProtoMessage() {}
func (*stringValue) String() string { return "string<string>" }
type bytesValue struct {
Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *bytesValue) Reset() { *m = bytesValue{} }
func (*bytesValue) ProtoMessage() {}
func (*bytesValue) String() string { return "[]byte<string>" }
func init() {
RegisterType((*float64Value)(nil), "gogo.protobuf.proto.DoubleValue")
RegisterType((*float32Value)(nil), "gogo.protobuf.proto.FloatValue")
RegisterType((*int64Value)(nil), "gogo.protobuf.proto.Int64Value")
RegisterType((*uint64Value)(nil), "gogo.protobuf.proto.UInt64Value")
RegisterType((*int32Value)(nil), "gogo.protobuf.proto.Int32Value")
RegisterType((*uint32Value)(nil), "gogo.protobuf.proto.UInt32Value")
RegisterType((*boolValue)(nil), "gogo.protobuf.proto.BoolValue")
RegisterType((*stringValue)(nil), "gogo.protobuf.proto.StringValue")
RegisterType((*bytesValue)(nil), "gogo.protobuf.proto.BytesValue")
}
| {
"pile_set_name": "Github"
} |
/******************************************************************************
* Copyright (c) 2004, 2008 IBM Corporation
* All rights reserved.
* This program and the accompanying materials
* are made available under the terms of the BSD License
* which accompanies this distribution, and is available at
* http://www.opensource.org/licenses/bsd-license.php
*
* Contributors:
* IBM Corporation - initial implementation
*****************************************************************************/
/*>>>>>>>>>>>>>>>>>>>>>>> DEFINITIONS & DECLARATIONS <<<<<<<<<<<<<<<<<<<<*/
#include <udp.h>
#include <sys/socket.h>
#include <dhcp.h>
//#include <dhcpv6.h>
#include <dns.h>
#ifdef USE_MTFTP
#include <mtftp.h>
#else
#include <tftp.h>
#endif
/*>>>>>>>>>>>>>>>>>>>>>>>>>>>>> LOCAL VARIABLES <<<<<<<<<<<<<<<<<<<<<<<<<*/
#ifdef USE_MTFTP
uint16_t net_tftp_uport;
uint16_t net_mtftp_uport;
void net_set_tftp_port(uint16_t tftp_port) {
net_tftp_uport = tftp_port;
}
void net_set_mtftp_port(uint16_t tftp_port) {
net_mtftp_uport = tftp_port;
}
#endif
/*>>>>>>>>>>>>>>>>>>>>>>>>>>>>> IMPLEMENTATION <<<<<<<<<<<<<<<<<<<<<<<<<<*/
/**
* NET: Handles UDP-packets according to Receive-handle diagram.
*
* @param udp_packet UDP-packet to be handled
* @param packetsize Length of the packet
* @return ZERO - packet handled successfully;
* NON ZERO - packet was not handled (e.g. bad format)
* @see receive_ether
* @see udphdr
*/
int8_t
handle_udp(uint8_t * udp_packet, int32_t packetsize) {
struct udphdr * udph = (struct udphdr *) udp_packet;
if (packetsize < sizeof(struct udphdr))
return -1; // packet is too small
switch (htons(udph -> uh_dport)) {
case UDPPORT_BOOTPC:
if (udph -> uh_sport == htons(UDPPORT_BOOTPS))
return handle_dhcp(udp_packet + sizeof(struct udphdr),
packetsize - sizeof(struct udphdr));
else
return -1;
case UDPPORT_DNSC:
if (udph -> uh_sport == htons(UDPPORT_DNSS))
return handle_dns(udp_packet + sizeof(struct udphdr),
packetsize - sizeof(struct udphdr));
else
return -1;
/*
case UDPPORT_DHCPV6C:
return handle_dhcpv6(udp_packet+sizeof(struct udphdr),
packetsize - sizeof(struct udphdr));
*/
case UDPPORT_TFTPC:
#ifdef USE_MTFTP
return handle_tftp(udp_packet + sizeof(struct udphdr),
packetsize - sizeof(struct udphdr));
#else
return handle_tftp(udp_packet, packetsize);
#endif
default:
#ifdef USE_MTFTP
if (htons(udph -> uh_dport) == net_tftp_uport)
return handle_tftp(udp_packet + sizeof(struct udphdr),
packetsize - sizeof(struct udphdr));
else if (htons(udph -> uh_dport) == net_mtftp_uport)
return handle_tftp(udp_packet + sizeof(struct udphdr),
packetsize - sizeof(struct udphdr));
#endif
return -1;
}
}
/**
* NET: This function handles situation when "Destination unreachable"
* ICMP-error occurs during sending UDP-packet.
*
* @param err_code Error Code (e.g. "Host unreachable")
* @param packet original UDP-packet
* @param packetsize length of the packet
* @see handle_icmp
*/
void
handle_udp_dun(uint8_t * udp_packet, uint32_t packetsize, uint8_t err_code) {
struct udphdr * udph = (struct udphdr *) udp_packet;
if (packetsize < sizeof(struct udphdr))
return; // packet is too small
switch (htons(udph -> uh_sport)) {
case UDPPORT_TFTPC:
handle_tftp_dun(err_code);
break;
}
}
/**
* NET: Creates UDP-packet. Places UDP-header in a packet and fills it
* with corresponding information.
* <p>
* Use this function with similar functions for other network layers
* (fill_ethhdr, fill_iphdr, fill_dnshdr, fill_btphdr).
*
* @param packet Points to the place where UDP-header must be placed.
* @param packetsize Size of the packet in bytes incl. this hdr and data.
* @param src_port UDP source port
* @param dest_port UDP destination port
* @see udphdr
* @see fill_ethhdr
* @see fill_iphdr
* @see fill_dnshdr
* @see fill_btphdr
*/
void
fill_udphdr(uint8_t * packet, uint16_t packetsize,
uint16_t src_port, uint16_t dest_port) {
struct udphdr * udph = (struct udphdr *) packet;
udph -> uh_sport = htons(src_port);
udph -> uh_dport = htons(dest_port);
udph -> uh_ulen = htons(packetsize);
udph -> uh_sum = htons(0);
}
| {
"pile_set_name": "Github"
} |
import numpy as np
import scipy.interpolate
import scipy.ndimage
def rebin2D(a, newdims, method='linear', centre=False, minusone=False):
'''Arbitrary resampling of source array to new dimension sizes.
Currently only supports maintaining the same number of dimensions.
To use 1-D arrays, first promote them to shape (x,1).
Uses the same parameters and creates the same co-ordinate lookup points
as IDL''s congrid routine, which apparently originally came from a VAX/VMS
routine of the same name.
method:
neighbour - closest value from original data
nearest and linear - uses n x 1-D interpolations using
scipy.interpolate.interp1d
(see Numerical Recipes for validity of use of n 1-D interpolations)
spline - uses ndimage.map_coordinates
centre:
True - interpolation points are at the centres of the bins
False - points are at the front edge of the bin
minusone:
For example- inarray.shape = (i,j) & new dimensions = (x,y)
False - inarray is resampled by factors of (i/x) * (j/y)
True - inarray is resampled by(i-1)/(x-1) * (j-1)/(y-1)
This prevents extrapolation one element beyond bounds of input array.
'''
if not a.dtype in [np.float64, np.float32]:
a = np.cast[float](a)
m1 = np.cast[int](minusone)
ofs = np.cast[int](centre) * 0.5
old = np.array(a.shape)
ndims = len(a.shape)
if len(newdims) != ndims:
print "[congrid] dimensions error. " \
"This routine currently only support " \
"rebinning to the same number of dimensions."
return None
newdims = np.asarray(newdims, dtype=float)
dimlist = []
if method == 'neighbour':
for i in range(ndims):
base = np.indices(newdims)[i]
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
cd = np.array( dimlist ).round().astype(int)
newa = a[list( cd )]
return newa
elif method in ['nearest','linear']:
# calculate new dims
for i in range(ndims):
base = np.arange( newdims[i] )
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
# specify old dims
olddims = [np.arange(i, dtype = n.float) for i in list(a.shape)]
# first interpolation - for ndims = any
mint = scipy.interpolate.interp1d(olddims[-1], a, kind=method)
newa = mint(dimlist[-1])
trorder = [ndims - 1] + range(ndims - 1)
for i in range( ndims - 2, -1, -1 ):
newa = newa.transpose(trorder)
mint = scipy.interpolate.interp1d(olddims[i], newa, kind=method)
newa = mint(dimlist[i])
if ndims > 1:
# need one more transpose to return to original dimensions
newa = newa.transpose(trorder)
return newa
elif method in ['spline']:
oslices = [slice(0,j) for j in old]
oldcoords = n.ogrid[oslices]
nslices = [slice(0,j) for j in list(newdims)]
newcoords = n.mgrid[nslices]
newcoords_dims = range(n.rank(newcoords))
#make first index last
newcoords_dims.append(newcoords_dims.pop(0))
newcoords_tr = newcoords.transpose(newcoords_dims)
# makes a view that affects newcoords
newcoords_tr += ofs
deltas = (n.asarray(old) - m1) / (newdims - m1)
newcoords_tr *= deltas
newcoords_tr -= ofs
newa = scipy.ndimage.map_coordinates(a, newcoords)
return newa
else:
print("Congrid error: Unrecognized interpolation type.\n "
"Currently only \'neighbour\', \'nearest\',\'linear\', "
"and \'spline\' are supported.")
return None
| {
"pile_set_name": "Github"
} |
// Created by cgo -godefs - DO NOT EDIT
// cgo -godefs types_openbsd.go
package pty
type ptmget struct {
Cfd int32
Sfd int32
Cn [16]int8
Sn [16]int8
}
var ioctl_PTMGET = 0x40287401
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: c1691582d0b66dd418d76b806a774e7d
TrueTypeFontImporter:
serializedVersion: 2
fontSize: 16
forceTextureCase: -2
characterSpacing: 1
characterPadding: 0
includeFontData: 1
use2xBehaviour: 0
fontNames: []
customCharacters:
fontRenderingMode: 0
userData:
| {
"pile_set_name": "Github"
} |
package cdn
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// SetDomainServerCertificate invokes the cdn.SetDomainServerCertificate API synchronously
func (client *Client) SetDomainServerCertificate(request *SetDomainServerCertificateRequest) (response *SetDomainServerCertificateResponse, err error) {
response = CreateSetDomainServerCertificateResponse()
err = client.DoAction(request, response)
return
}
// SetDomainServerCertificateWithChan invokes the cdn.SetDomainServerCertificate API asynchronously
func (client *Client) SetDomainServerCertificateWithChan(request *SetDomainServerCertificateRequest) (<-chan *SetDomainServerCertificateResponse, <-chan error) {
responseChan := make(chan *SetDomainServerCertificateResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.SetDomainServerCertificate(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// SetDomainServerCertificateWithCallback invokes the cdn.SetDomainServerCertificate API asynchronously
func (client *Client) SetDomainServerCertificateWithCallback(request *SetDomainServerCertificateRequest, callback func(response *SetDomainServerCertificateResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *SetDomainServerCertificateResponse
var err error
defer close(result)
response, err = client.SetDomainServerCertificate(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// SetDomainServerCertificateRequest is the request struct for api SetDomainServerCertificate
type SetDomainServerCertificateRequest struct {
*requests.RpcRequest
ServerCertificate string `position:"Query" name:"ServerCertificate"`
PrivateKey string `position:"Query" name:"PrivateKey"`
ServerCertificateStatus string `position:"Query" name:"ServerCertificateStatus"`
SecurityToken string `position:"Query" name:"SecurityToken"`
CertType string `position:"Query" name:"CertType"`
ForceSet string `position:"Query" name:"ForceSet"`
CertName string `position:"Query" name:"CertName"`
DomainName string `position:"Query" name:"DomainName"`
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
}
// SetDomainServerCertificateResponse is the response struct for api SetDomainServerCertificate
type SetDomainServerCertificateResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
}
// CreateSetDomainServerCertificateRequest creates a request to invoke SetDomainServerCertificate API
func CreateSetDomainServerCertificateRequest() (request *SetDomainServerCertificateRequest) {
request = &SetDomainServerCertificateRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Cdn", "2018-05-10", "SetDomainServerCertificate", "", "")
request.Method = requests.POST
return
}
// CreateSetDomainServerCertificateResponse creates a response to parse from SetDomainServerCertificate response
func CreateSetDomainServerCertificateResponse() (response *SetDomainServerCertificateResponse) {
response = &SetDomainServerCertificateResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
| {
"pile_set_name": "Github"
} |
## CNCF Graduated Project Logos
*Note: GitHub Flavored Markdown used in the Readme doesn't support background colors. The white logos below are displayed on the light grey of tables.*
#### Kubernetes Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/kubernetes/horizontal/color/kubernetes-horizontal-color.png" width="200"></td>
<td><img src="/projects/kubernetes/stacked/color/kubernetes-stacked-color.png" width="95"></td>
<td><img src="/projects/kubernetes/icon/color/kubernetes-icon-color.png" width="75"></td>
<td><img src="/projects/kubernetes/horizontal/color/kubernetes-horizontal-color.svg" width="200"></td>
<td><img src="/projects/kubernetes/stacked/color/kubernetes-stacked-color.svg" width="95"></td>
<td><img src="/projects/kubernetes/icon/color/kubernetes-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/kubernetes/horizontal/black/kubernetes-horizontal-black.png" width="200"></td>
<td><img src="/projects/kubernetes/stacked/black/kubernetes-stacked-black.png" width="95"></td>
<td><img src="/projects/kubernetes/icon/black/kubernetes-icon-black.png" width="75"></td>
<td><img src="/projects/kubernetes/horizontal/black/kubernetes-horizontal-black.svg" width="200"></td>
<td><img src="/projects/kubernetes/stacked/black/kubernetes-stacked-black.svg" width="95"></td>
<td><img src="/projects/kubernetes/icon/black/kubernetes-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/kubernetes/horizontal/white/kubernetes-horizontal-white.png" width="200"></td>
<td><img src="/projects/kubernetes/stacked/white/kubernetes-stacked-white.png" width="95"></td>
<td><img src="/projects/kubernetes/icon/white/kubernetes-icon-white.png" width="75"></td>
<td><img src="/projects/kubernetes/horizontal/white/kubernetes-horizontal-white.svg" width="200"></td>
<td><img src="/projects/kubernetes/stacked/white/kubernetes-stacked-white.svg" width="95"></td>
<td><img src="/projects/kubernetes/icon/white/kubernetes-icon-white.svg" width="75"></td>
</tr>
<tr>
<th>all blue</th>
<td><img src="/projects/kubernetes/horizontal/all-blue-color/kubernetes-horizontal-all-blue-color.png" width="200"></td>
<td><img src="/projects/kubernetes/stacked/all-blue-color/kubernetes-stacked-all-blue-color.png" width="95"></td>
<td><img src="/projects/kubernetes/icon/color/kubernetes-icon-color.png" width="75"></td>
<td><img src="/projects/kubernetes/horizontal/all-blue-color/kubernetes-horizontal-all-blue-color.svg" width="200"></td>
<td><img src="/projects/kubernetes/stacked/all-blue-color/kubernetes-stacked-all-blue-color.svg" width="95"></td>
<td><img src="/projects/kubernetes/icon/color/kubernetes-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>white text</th>
<td><img src="/projects/kubernetes/horizontal/white-text/kubernetes-horizontal-white-text.png" width="200"></td>
<td><img src="/projects/kubernetes/stacked/white-text/kubernetes-stacked-white-text.png" width="95"></td>
<td><img src="/projects/kubernetes/icon/color/kubernetes-icon-color.png" width="75"></td>
<td><img src="/projects/kubernetes/horizontal/white-text/kubernetes-horizontal-white-text.svg" width="200"></td>
<td><img src="/projects/kubernetes/stacked/white-text/kubernetes-stacked-white-text.svg" width="95"></td>
<td><img src="/projects/kubernetes/icon/color/kubernetes-icon-color.svg" width="75"></td>
</tr>
</table>
#### Prometheus Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/prometheus/horizontal/color/prometheus-horizontal-color.png" width="200"></td>
<td><img src="/projects/prometheus/stacked/color/prometheus-stacked-color.png" width="95"></td>
<td><img src="/projects/prometheus/icon/color/prometheus-icon-color.png" width="75"></td>
<td><img src="/projects/prometheus/horizontal/color/prometheus-horizontal-color.svg" width="200"></td>
<td><img src="/projects/prometheus/stacked/color/prometheus-stacked-color.svg" width="95"></td>
<td><img src="/projects/prometheus/icon/color/prometheus-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/prometheus/horizontal/black/prometheus-horizontal-black.png" width="200"></td>
<td><img src="/projects/prometheus/stacked/black/prometheus-stacked-black.png" width="95"></td>
<td><img src="/projects/prometheus/icon/black/prometheus-icon-black.png" width="75"></td>
<td><img src="/projects/prometheus/horizontal/black/prometheus-horizontal-black.svg" width="200"></td>
<td><img src="/projects/prometheus/stacked/black/prometheus-stacked-black.svg" width="95"></td>
<td><img src="/projects/prometheus/icon/black/prometheus-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/prometheus/horizontal/white/prometheus-horizontal-white.png" width="200"></td>
<td><img src="/projects/prometheus/stacked/white/prometheus-stacked-white.png" width="95"></td>
<td><img src="/projects/prometheus/icon/white/prometheus-icon-white.png" width="75"></td>
<td><img src="/projects/prometheus/horizontal/white/prometheus-horizontal-white.svg" width="200"></td>
<td><img src="/projects/prometheus/stacked/white/prometheus-stacked-white.svg" width="95"></td>
<td><img src="/projects/prometheus/icon/white/prometheus-icon-white.svg" width="75"></td>
</tr>
</table>
#### Envoy Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/envoy/horizontal/color/envoy-horizontal-color.png" width="200"></td>
<td><img src="/projects/envoy/stacked/color/envoy-stacked-color.png" width="95"></td>
<td><img src="/projects/envoy/icon/color/envoy-icon-color.png" width="75"></td>
<td><img src="/projects/envoy/horizontal/color/envoy-horizontal-color.svg" width="200"></td>
<td><img src="/projects/envoy/stacked/color/envoy-stacked-color.svg" width="95"></td>
<td><img src="/projects/envoy/icon/color/envoy-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/envoy/horizontal/black/envoy-horizontal-black.png" width="200"></td>
<td><img src="/projects/envoy/stacked/black/envoy-stacked-black.png" width="95"></td>
<td><img src="/projects/envoy/icon/black/envoy-icon-black.png" width="75"></td>
<td><img src="/projects/envoy/horizontal/black/envoy-horizontal-black.svg" width="200"></td>
<td><img src="/projects/envoy/stacked/black/envoy-stacked-black.svg" width="95"></td>
<td><img src="/projects/envoy/icon/black/envoy-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/envoy/horizontal/white/envoy-horizontal-white.png" width="200"></td>
<td><img src="/projects/envoy/stacked/white/envoy-stacked-white.png" width="95"></td>
<td><img src="/projects/envoy/icon/white/envoy-icon-white.png" width="75"></td>
<td><img src="/projects/envoy/horizontal/white/envoy-horizontal-white.svg" width="200"></td>
<td><img src="/projects/envoy/stacked/white/envoy-stacked-white.svg" width="95"></td>
<td><img src="/projects/envoy/icon/white/envoy-icon-white.svg" width="75"></td>
</tr>
</table>
#### CoreDNS Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/coredns/horizontal/color/coredns-horizontal-color.png" width="200"></td>
<td><img src="/projects/coredns/stacked/color/coredns-stacked-color.png" width="95"></td>
<td><img src="/projects/coredns/icon/color/coredns-icon-color.png" width="75"></td>
<td><img src="/projects/coredns/horizontal/color/coredns-horizontal-color.svg" width="200"></td>
<td><img src="/projects/coredns/stacked/color/coredns-stacked-color.svg" width="95"></td>
<td><img src="/projects/coredns/icon/color/coredns-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/coredns/horizontal/black/coredns-horizontal-black.png" width="200"></td>
<td><img src="/projects/coredns/stacked/black/coredns-stacked-black.png" width="95"></td>
<td><img src="/projects/coredns/icon/black/coredns-icon-black.png" width="75"></td>
<td><img src="/projects/coredns/horizontal/black/coredns-horizontal-black.svg" width="200"></td>
<td><img src="/projects/coredns/stacked/black/coredns-stacked-black.svg" width="95"></td>
<td><img src="/projects/coredns/icon/black/coredns-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/coredns/horizontal/white/coredns-horizontal-white.png" width="200"></td>
<td><img src="/projects/coredns/stacked/white/coredns-stacked-white.png" width="95"></td>
<td><img src="/projects/coredns/icon/white/coredns-icon-white.png" width="75"></td>
<td><img src="/projects/coredns/horizontal/white/coredns-horizontal-white.svg" width="200"></td>
<td><img src="/projects/coredns/stacked/white/coredns-stacked-white.svg" width="95"></td>
<td><img src="/projects/coredns/icon/white/coredns-icon-white.svg" width="75"></td>
</tr>
<tr>
<th>solid-color</th>
<td><img src="/projects/coredns/horizontal/solid-color/coredns-horizontal-solid-color.png" width="200"></td>
<td><img src="/projects/coredns/stacked/solid-color/coredns-stacked-solid-color.png" width="95"></td>
<td><img src="/projects/coredns/icon/solid-color/coredns-icon-solid-color.png" width="75"></td>
<td><img src="/projects/coredns/horizontal/solid-color/coredns-horizontal-solid-color.svg" width="200"></td>
<td><img src="/projects/coredns/stacked/solid-color/coredns-stacked-solid-color.svg" width="95"></td>
<td><img src="/projects/coredns/icon/solid-color/coredns-icon-solid-color.svg" width="75"></td>
</tr>
<tr>
<th>grey</th>
<td><img src="/projects/coredns/horizontal/grey/coredns-horizontal-grey.png" width="200"></td>
<td><img src="/projects/coredns/stacked/grey/coredns-stacked-grey.png" width="95"></td>
<td><img src="/projects/coredns/icon/grey/coredns-icon-grey.png" width="75"></td>
<td><img src="/projects/coredns/horizontal/grey/coredns-horizontal-grey.svg" width="200"></td>
<td><img src="/projects/coredns/stacked/grey/coredns-stacked-grey.svg" width="95"></td>
<td><img src="/projects/coredns/icon/grey/coredns-icon-grey.svg" width="75"></td>
</tr>
</table>
#### containerd Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/containerd/horizontal/color/containerd-horizontal-color.png" width="200"></td>
<td><img src="/projects/containerd/stacked/color/containerd-stacked-color.png" width="95"></td>
<td><img src="/projects/containerd/icon/color/containerd-icon-color.png" width="75"></td>
<td><img src="/projects/containerd/horizontal/color/containerd-horizontal-color.svg" width="200"></td>
<td><img src="/projects/containerd/stacked/color/containerd-stacked-color.svg" width="95"></td>
<td><img src="/projects/containerd/icon/color/containerd-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/containerd/horizontal/black/containerd-horizontal-black.png" width="200"></td>
<td><img src="/projects/containerd/stacked/black/containerd-stacked-black.png" width="95"></td>
<td><img src="/projects/containerd/icon/black/containerd-icon-black.png" width="75"></td>
<td><img src="/projects/containerd/horizontal/black/containerd-horizontal-black.svg" width="200"></td>
<td><img src="/projects/containerd/stacked/black/containerd-stacked-black.svg" width="95"></td>
<td><img src="/projects/containerd/icon/black/containerd-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/containerd/horizontal/white/containerd-horizontal-white.png" width="200"></td>
<td><img src="/projects/containerd/stacked/white/containerd-stacked-white.png" width="95"></td>
<td><img src="/projects/containerd/icon/white/containerd-icon-white.png" width="75"></td>
<td><img src="/projects/containerd/horizontal/white/containerd-horizontal-white.svg" width="200"></td>
<td><img src="/projects/containerd/stacked/white/containerd-stacked-white.svg" width="95"></td>
<td><img src="/projects/containerd/icon/white/containerd-icon-white.svg" width="75"></td>
</tr>
</table>
#### Fluentd Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/fluentd/horizontal/color/fluentd-horizontal-color.png" width="200"></td>
<td><img src="/projects/fluentd/stacked/color/fluentd-stacked-color.png" width="95"></td>
<td><img src="/projects/fluentd/icon/color/fluentd-icon-color.png" width="75"></td>
<td><img src="/projects/fluentd/horizontal/color/fluentd-horizontal-color.svg" width="200"></td>
<td><img src="/projects/fluentd/stacked/color/fluentd-stacked-color.svg" width="95"></td>
<td><img src="/projects/fluentd/icon/color/fluentd-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/fluentd/horizontal/black/fluentd-horizontal-black.png" width="200"></td>
<td><img src="/projects/fluentd/stacked/black/fluentd-stacked-black.png" width="95"></td>
<td><img src="/projects/fluentd/icon/black/fluentd-icon-black.png" width="75"></td>
<td><img src="/projects/fluentd/horizontal/black/fluentd-horizontal-black.svg" width="200"></td>
<td><img src="/projects/fluentd/stacked/black/fluentd-stacked-black.svg" width="95"></td>
<td><img src="/projects/fluentd/icon/black/fluentd-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/fluentd/horizontal/white/fluentd-horizontal-white.png" width="200"></td>
<td><img src="/projects/fluentd/stacked/white/fluentd-stacked-white.png" width="95"></td>
<td><img src="/projects/fluentd/icon/white/fluentd-icon-white.png" width="75"></td>
<td><img src="/projects/fluentd/horizontal/white/fluentd-horizontal-white.svg" width="200"></td>
<td><img src="/projects/fluentd/stacked/white/fluentd-stacked-white.svg" width="95"></td>
<td><img src="/projects/fluentd/icon/white/fluentd-icon-white.svg" width="75"></td>
</tr>
</table>
#### Jaeger Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/jaeger/horizontal/color/jaeger-horizontal-color.png" width="200"></td>
<td><img src="/projects/jaeger/stacked/color/jaeger-stacked-color.png" width="95"></td>
<td><img src="/projects/jaeger/icon/color/jaeger-icon-color.png" width="75"></td>
<td><img src="/projects/jaeger/horizontal/color/jaeger-horizontal-color.svg" width="200"></td>
<td><img src="/projects/jaeger/stacked/color/jaeger-stacked-color.svg" width="95"></td>
<td><img src="/projects/jaeger/icon/color/jaeger-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/jaeger/horizontal/black/jaeger-horizontal-black.png" width="200"></td>
<td><img src="/projects/jaeger/stacked/black/jaeger-stacked-black.png" width="95"></td>
<td><img src="/projects/jaeger/icon/black/jaeger-icon-black.png" width="75"></td>
<td><img src="/projects/jaeger/horizontal/black/jaeger-horizontal-black.svg" width="200"></td>
<td><img src="/projects/jaeger/stacked/black/jaeger-stacked-black.svg" width="95"></td>
<td><img src="/projects/jaeger/icon/black/jaeger-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/jaeger/horizontal/white/jaeger-horizontal-white.png" width="200"></td>
<td><img src="/projects/jaeger/stacked/white/jaeger-stacked-white.png" width="95"></td>
<td><img src="/projects/jaeger/icon/white/jaeger-icon-white.png" width="75"></td>
<td><img src="/projects/jaeger/horizontal/white/jaeger-horizontal-white.svg" width="200"></td>
<td><img src="/projects/jaeger/stacked/white/jaeger-stacked-white.svg" width="95"></td>
<td><img src="/projects/jaeger/icon/white/jaeger-icon-white.svg" width="75"></td>
</tr>
<tr colspan="7"></tr>
<tr>
<th>reverse-color</th>
<td><img src="/projects/jaeger/horizontal/reverse-color/jaeger-horizontal-reverse-color.png" width="200"></td>
<td><img src="/projects/jaeger/stacked/reverse-color/jaeger-stacked-reverse-color.png" width="95"></td>
<td><img src="/projects/jaeger/icon/reverse-color/jaeger-icon-reverse-color.png" width="75"></td>
<td><img src="/projects/jaeger/horizontal/reverse-color/jaeger-horizontal-reverse-color.svg" width="200"></td>
<td><img src="/projects/jaeger/stacked/reverse-color/jaeger-stacked-reverse-color.svg" width="95"></td>
<td><img src="/projects/jaeger/icon/reverse-color/jaeger-icon-reverse-color.svg" width="75"></td>
</tr>
</table>
#### Vitess Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/vitess/horizontal/color/vitess-horizontal-color.png" width="200"></td>
<td><img src="/projects/vitess/stacked/color/vitess-stacked-color.png" width="95"></td>
<td><img src="/projects/vitess/icon/color/vitess-icon-color.png" width="75"></td>
<td><img src="/projects/vitess/horizontal/color/vitess-horizontal-color.svg" width="200"></td>
<td><img src="/projects/vitess/stacked/color/vitess-stacked-color.svg" width="95"></td>
<td><img src="/projects/vitess/icon/color/vitess-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/vitess/horizontal/black/vitess-horizontal-black.png" width="200"></td>
<td><img src="/projects/vitess/stacked/black/vitess-stacked-black.png" width="95"></td>
<td><img src="/projects/vitess/icon/black/vitess-icon-black.png" width="75"></td>
<td><img src="/projects/vitess/horizontal/black/vitess-horizontal-black.svg" width="200"></td>
<td><img src="/projects/vitess/stacked/black/vitess-stacked-black.svg" width="95"></td>
<td><img src="/projects/vitess/icon/black/vitess-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/vitess/horizontal/white/vitess-horizontal-white.png" width="200"></td>
<td><img src="/projects/vitess/stacked/white/vitess-stacked-white.png" width="95"></td>
<td><img src="/projects/vitess/icon/white/vitess-icon-white.png" width="75"></td>
<td><img src="/projects/vitess/horizontal/white/vitess-horizontal-white.svg" width="200"></td>
<td><img src="/projects/vitess/stacked/white/vitess-stacked-white.svg" width="95"></td>
<td><img src="/projects/vitess/icon/white/vitess-icon-white.svg" width="75"></td>
</tr>
<tr>
<th>grey</th>
<td><img src="/projects/vitess/horizontal/grey/vitess-horizontal-grey.png" width="200"></td>
<td><img src="/projects/vitess/stacked/grey/vitess-stacked-grey.png" width="95"></td>
<td><img src="/projects/vitess/icon/grey/vitess-icon-grey.png" width="75"></td>
<td><img src="/projects/vitess/horizontal/grey/vitess-horizontal-grey.svg" width="200"></td>
<td><img src="/projects/vitess/stacked/grey/vitess-stacked-grey.svg" width="95"></td>
<td><img src="/projects/vitess/icon/grey/vitess-icon-grey.svg" width="75"></td>
</tr>
</table>
#### Helm Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/helm/horizontal/color/helm-horizontal-color.png" width="200"></td>
<td><img src="/projects/helm/stacked/color/helm-stacked-color.png" width="95"></td>
<td><img src="/projects/helm/icon/color/helm-icon-color.png" width="75"></td>
<td><img src="/projects/helm/horizontal/color/helm-horizontal-color.svg" width="200"></td>
<td><img src="/projects/helm/stacked/color/helm-stacked-color.svg" width="95"></td>
<td><img src="/projects/helm/icon/color/helm-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/helm/horizontal/black/helm-horizontal-black.png" width="200"></td>
<td><img src="/projects/helm/stacked/black/helm-stacked-black.png" width="95"></td>
<td><img src="/projects/helm/icon/black/helm-icon-black.png" width="75"></td>
<td><img src="/projects/helm/horizontal/black/helm-horizontal-black.svg" width="200"></td>
<td><img src="/projects/helm/stacked/black/helm-stacked-black.svg" width="95"></td>
<td><img src="/projects/helm/icon/black/helm-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/helm/horizontal/white/helm-horizontal-white.png" width="200"></td>
<td><img src="/projects/helm/stacked/white/helm-stacked-white.png" width="95"></td>
<td><img src="/projects/helm/icon/white/helm-icon-white.png" width="75"></td>
<td><img src="/projects/helm/horizontal/white/helm-horizontal-white.svg" width="200"></td>
<td><img src="/projects/helm/stacked/white/helm-stacked-white.svg" width="95"></td>
<td><img src="/projects/helm/icon/white/helm-icon-white.svg" width="75"></td>
</tr>
</table>
#### Harbor Logos
<table>
<tr>
<th colspan="7"></th>
</tr>
<tr>
<th></th>
<th colspan="3">PNG</th>
<th colspan="3">SVG</th>
</tr>
<tr>
<th></th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
<th>horizontal</th>
<th>stacked</th>
<th>icon</th>
</tr>
<tr>
<th>color</th>
<td><img src="/projects/harbor/horizontal/color/harbor-horizontal-color.png" width="200"></td>
<td><img src="/projects/harbor/stacked/color/harbor-stacked-color.png" width="95"></td>
<td><img src="/projects/harbor/icon/color/harbor-icon-color.png" width="75"></td>
<td><img src="/projects/harbor/horizontal/color/harbor-horizontal-color.svg" width="200"></td>
<td><img src="/projects/harbor/stacked/color/harbor-stacked-color.svg" width="95"></td>
<td><img src="/projects/harbor/icon/color/harbor-icon-color.svg" width="75"></td>
</tr>
<tr>
<th>black</th>
<td><img src="/projects/harbor/horizontal/black/harbor-horizontal-black.png" width="200"></td>
<td><img src="/projects/harbor/stacked/black/harbor-stacked-black.png" width="95"></td>
<td><img src="/projects/harbor/icon/black/harbor-icon-black.png" width="75"></td>
<td><img src="/projects/harbor/horizontal/black/harbor-horizontal-black.svg" width="200"></td>
<td><img src="/projects/harbor/stacked/black/harbor-stacked-black.svg" width="95"></td>
<td><img src="/projects/harbor/icon/black/harbor-icon-black.svg" width="75"></td>
</tr>
<tr>
<th>white</th>
<td><img src="/projects/harbor/horizontal/white/harbor-horizontal-white.png" width="200"></td>
<td><img src="/projects/harbor/stacked/white/harbor-stacked-white.png" width="95"></td>
<td><img src="/projects/harbor/icon/white/harbor-icon-white.png" width="75"></td>
<td><img src="/projects/harbor/horizontal/white/harbor-horizontal-white.svg" width="200"></td>
<td><img src="/projects/harbor/stacked/white/harbor-stacked-white.svg" width="95"></td>
<td><img src="/projects/harbor/icon/white/harbor-icon-white.svg" width="75"></td>
</tr>
</table>
Use of any trademark or logo is subject to the trademark policy available at https://www.linuxfoundation.org/trademark-usage
Questions? Please email [[email protected]](mailto:[email protected]).
| {
"pile_set_name": "Github"
} |
// This project was built with MultiProcessing and
// will not execute with the standard Processing software.
// MultiProcessing was developed by Simon Greenwold for the
// course 872a Model Based Design at the Yale School of
// Architecture in Winter 2003
// Julia Stanat
// 872a _ Model Based Design - Yale School of Architecture
// FINAL PROJECT - December 19 2003
// Professor = Simon Greenwold
color spawnedBeeFill;
color spawnedBeeStroke;
HoneyBee drone = new HoneyBee();
class HoneyBee extends Particle {
public HoneyBee() {}
color beeFill;
color beeStroke;
float radius = 12;
void setup() {
ellipseMode(CENTER_DIAMETER);
}
void firstLoop() {
addMagnet(this, 0.4);
enableCollision(radius + 2);
}
void loop() {
push();
//translate(pos[0], pos[1], pos[2]);
//noStroke();
stroke(beeStroke);
fill(beeFill);
//noFill();
ellipse(pos[0], pos[1],radius*2,radius*2);
pop();
}
}
void setup(){
size(1024,512);
rectMode(CENTER_DIAMETER);
}
void firstLoop() {
background(0);
noCursor();
gravity(0);
spawnedBeeFill = color(0, 0, 0, 2);
spawnedBeeStroke = color(255, 0, 0, 15);
for (int i = 0; i < 100; i++) {
HoneyBee bee2 = (HoneyBee)spawnParticle(drone);
bee2.beeStroke = spawnedBeeStroke;
bee2.beeFill = spawnedBeeFill;
bee2.pos[0] = random(width/3.0);
bee2.pos[1] = random(height);
bee2.pos[2] = random(0);
}
spawnedBeeStroke = color(0, 255, 0, 5);
for (int i = 0; i < 100; i++) {
HoneyBee bee2 = (HoneyBee)spawnParticle(drone);
bee2.beeStroke = spawnedBeeStroke;
bee2.beeFill = spawnedBeeFill;
bee2.pos[0] = random(width/3.0) + 2 * (width/3.0);
bee2.pos[1] = random(height);
bee2.pos[2] = random(0);
}
}
void loop(){
}
| {
"pile_set_name": "Github"
} |
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
// <iterator>
// insert_iterator
// insert_iterator(Cont& x, Cont::iterator i);
#include <iterator>
#include <vector>
#include "nasty_containers.h"
#include "test_macros.h"
template <class C>
void
test(C c)
{
std::insert_iterator<C> i(c, c.begin());
}
int main(int, char**)
{
test(std::vector<int>());
test(nasty_vector<int>());
return 0;
}
| {
"pile_set_name": "Github"
} |
/*
IMPORTANT! This file is auto-generated each time you save your
project - if you alter its contents, your changes may be overwritten!
*/
#include "AppConfig.h"
#include <juce_audio_plugin_client/juce_audio_plugin_client_AUv3.mm>
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2015-2102 RonCoo(http://www.roncoo.com) Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.roncoo.pay.user.enums;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 资金流入类型
* 龙果学院:www.roncoo.com
* @author:Peter
*/
public enum FundInfoTypeEnum {
PLAT_RECEIVES("平台收款"),
MERCHANT_RECEIVES("商家收款");
/** 描述 */
private String desc;
private FundInfoTypeEnum(String desc) {
this.desc = desc;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public static Map<String, Map<String, Object>> toMap() {
FundInfoTypeEnum[] ary = FundInfoTypeEnum.values();
Map<String, Map<String, Object>> enumMap = new HashMap<String, Map<String, Object>>();
for (int num = 0; num < ary.length; num++) {
Map<String, Object> map = new HashMap<String, Object>();
String key = ary[num].name();
map.put("desc", ary[num].getDesc());
enumMap.put(key, map);
}
return enumMap;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public static List toList() {
FundInfoTypeEnum[] ary = FundInfoTypeEnum.values();
List list = new ArrayList();
for (int i = 0; i < ary.length; i++) {
Map<String, String> map = new HashMap<String, String>();
map.put("desc", ary[i].getDesc());
map.put("name", ary[i].name());
list.add(map);
}
return list;
}
public static FundInfoTypeEnum getEnum(String name) {
FundInfoTypeEnum[] arry = FundInfoTypeEnum.values();
for (int i = 0; i < arry.length; i++) {
if (arry[i].name().equalsIgnoreCase(name)) {
return arry[i];
}
}
return null;
}
/**
* 取枚举的json字符串
*
* @return
*/
public static String getJsonStr() {
FundInfoTypeEnum[] enums = FundInfoTypeEnum.values();
StringBuffer jsonStr = new StringBuffer("[");
for (FundInfoTypeEnum senum : enums) {
if (!"[".equals(jsonStr.toString())) {
jsonStr.append(",");
}
jsonStr.append("{id:'").append(senum).append("',desc:'").append(senum.getDesc()).append("'}");
}
jsonStr.append("]");
return jsonStr.toString();
}
}
| {
"pile_set_name": "Github"
} |
dev tun
proto tcp
remote br-sp-smart.serverlocation.co 54
resolv-retry infinite
client
auth-user-pass /config/openvpn-credentials.txt
nobind
persist-key
persist-tun
remote-cert-tls server
compress lzo
reneg-sec 0
cipher AES-256-CBC
auth SHA256
# Uncomment following line if you use MS Windows
# block-outside-dns
verb 3
<ca>
-----BEGIN CERTIFICATE-----
MIIFOTCCBCGgAwIBAgIJALHEFe9IQlCzMA0GCSqGSIb3DQEBCwUAMIHDMQswCQYD
VQQGEwJTQzENMAsGA1UECBMETWFoZTERMA8GA1UEBxMIVmljdG9yaWExHTAbBgNV
BAoTFEdsb2JhbCBTdGVhbHRoLCBJbmMuMQwwCgYDVQQLEwNWUE4xIDAeBgNVBAMT
F0dsb2JhbCBTdGVhbHRoLCBJbmMuIENBMRswGQYDVQQpExJzZXJ2ZXJsb2NhdGlv
bi1rZXkxJjAkBgkqhkiG9w0BCQEWF2FkbWluQHNlcnZlcmxvY2F0aW9uLmNvMB4X
DTE1MDIyNTIwMDIzMFoXDTI1MDIyMjIwMDIzMFowgcMxCzAJBgNVBAYTAlNDMQ0w
CwYDVQQIEwRNYWhlMREwDwYDVQQHEwhWaWN0b3JpYTEdMBsGA1UEChMUR2xvYmFs
IFN0ZWFsdGgsIEluYy4xDDAKBgNVBAsTA1ZQTjEgMB4GA1UEAxMXR2xvYmFsIFN0
ZWFsdGgsIEluYy4gQ0ExGzAZBgNVBCkTEnNlcnZlcmxvY2F0aW9uLWtleTEmMCQG
CSqGSIb3DQEJARYXYWRtaW5Ac2VydmVybG9jYXRpb24uY28wggEiMA0GCSqGSIb3
DQEBAQUAA4IBDwAwggEKAoIBAQDA94FmLbk3VPchYZmBCTc0okUFO6AwTn8trAVX
r6GVypCDmuWyCPAzCG47qT2rBlWPJMXYbmtJEq/Vrh9gcU7LYw4NQjSnXnBQ10wX
c3B+mG4x807IBwH87N2Fl6ZbL5mChIdssUalS3QyARc5Xp6YAJrX3I/UninPXYjz
jSxvMrSTnFHwS757F1vLv5z5+Udahz22+u+sqdkN31EnAsM917/fOpkWo0fd/x0r
59d0wYSeqRzqCf9UoQff08/8b+XN+kmR82S7othHEaLXBCgdXHk/lrp5zy4n1+AF
lwEXx51UNS8u5YUHlX0orJC1lTJfWjCvTWo2u/XC5iXcrEGbAgMBAAGjggEsMIIB
KDAdBgNVHQ4EFgQU69+VyGvTYVeqitctj3s/q7vcEbcwgfgGA1UdIwSB8DCB7YAU
69+VyGvTYVeqitctj3s/q7vcEbehgcmkgcYwgcMxCzAJBgNVBAYTAlNDMQ0wCwYD
VQQIEwRNYWhlMREwDwYDVQQHEwhWaWN0b3JpYTEdMBsGA1UEChMUR2xvYmFsIFN0
ZWFsdGgsIEluYy4xDDAKBgNVBAsTA1ZQTjEgMB4GA1UEAxMXR2xvYmFsIFN0ZWFs
dGgsIEluYy4gQ0ExGzAZBgNVBCkTEnNlcnZlcmxvY2F0aW9uLWtleTEmMCQGCSqG
SIb3DQEJARYXYWRtaW5Ac2VydmVybG9jYXRpb24uY2+CCQCxxBXvSEJQszAMBgNV
HRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBYkrR6R3QmQ04zWc5r4C7fhR7N
+rOqljrpbMXL6QfJTQJbAX2EJeHEyhjYh6xf4I3LWiM1rpSdJi8CbMagSRZulBqQ
v9ceszpFOpaoM4kgfDKWW+Z7R4cOZxZKmym1heuvcLcqMwOEk0qN7b6fyipSci38
/LnVdMHDLqnJUndTjhtN6sHmCKrBx9I3V9Yp1CAHUnEvX8mZAYKjbdhuhKhwaMiq
wOVCxXj8f872XtjATq/y1Y21vI8yv94NsK1C0zK+FBzxWWnXXQTzYBsNfCoZpox5
7LaXKtnKPSsaucbDlB2ECLqAydp8Q0f2pj0hF3X7mi5NmHEKqKc8T5ROar4D
-----END CERTIFICATE-----
</ca>
<tls-auth>
-----BEGIN OpenVPN Static key V1-----
acc96c671aa10916c48eedf8c73acc83
09554c946bf0c5864d981ce628768aba
2a04d57b9e5fcef13d7a4e251c9afd09
527f4d809c59f22e25347cc2bd841005
023142ac6ae19f62ba76f5d3b3d68429
637514306fcd0fd3a27b4e5bdcd92915
ec7028ffaa2666dcb88addb8e5bbb154
cf87875cd2708d039d7b5546d8b105f7
3e1be598404ff064f6fadb1182dc7893
2dec2636b585fce6e878d881ccc26a35
31bf864cd046cb2b2d2c1df66da63539
34f5b093f5c52cc2b21e96703bf563c2
3ecdd9b4669abb96065fdc300e5c09d2
1696be7a137470618ea8acb8216aab9a
5145ca4f4dd6edc2a5f354993027b875
6fddddb99b664bcde0a64823045b2858
-----END OpenVPN Static key V1-----
</tls-auth>
key-direction 1
| {
"pile_set_name": "Github"
} |
FILE_CONTENT
| {
"pile_set_name": "Github"
} |
cite 'about-alias'
about-alias 'puppet bolt aliases'
# Aliases
alias bolt='bolt command run --tty --no-host-key-check'
alias boltas='bolt -p -u'
alias sudobolt='bolt --run-as root --sudo-password'
alias sudoboltas='sudobolt -p -u'
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2015 MongoDB Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "CLibMongoC_mongoc-prelude.h"
#ifndef MONGOC_MEMCMP_PRIVATE_H
#define MONGOC_MEMCMP_PRIVATE_H
#include <CLibMongoC_bson.h>
#include "CLibMongoC_mongoc-config.h"
/* WARNING: mongoc_memcmp() must be used to verify if two secret keys
* are equal, in constant time.
* It returns 0 if the keys are equal, and -1 if they differ.
* This function is not designed for lexicographical comparisons.
*/
int
mongoc_memcmp (const void *const b1, const void *const b2, size_t len);
#endif /* MONGOC_MEMCMP_PRIVATE_H */
| {
"pile_set_name": "Github"
} |
// Unity C# reference source
// Copyright (c) Unity Technologies. For terms of use, see
// https://unity3d.com/legal/licenses/Unity_Reference_Only_License
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using UnityEditor.AnimatedValues;
namespace UnityEditor
{
/// <summary>
/// Prompts the end-user to add 2D colliders if non exist for 2D effector to work with.
/// </summary>
[CustomEditor(typeof(PointEffector2D), true)]
[CanEditMultipleObjects]
internal class PointEffector2DEditor : Effector2DEditor
{
readonly AnimBool m_ShowForceRollout = new AnimBool();
SerializedProperty m_ForceMagnitude;
SerializedProperty m_ForceVariation;
SerializedProperty m_ForceSource;
SerializedProperty m_ForceTarget;
SerializedProperty m_ForceMode;
SerializedProperty m_DistanceScale;
static readonly AnimBool m_ShowDampingRollout = new AnimBool();
SerializedProperty m_Drag;
SerializedProperty m_AngularDrag;
public override void OnEnable()
{
base.OnEnable();
m_ShowForceRollout.value = true;
m_ShowForceRollout.valueChanged.AddListener(Repaint);
m_ForceMagnitude = serializedObject.FindProperty("m_ForceMagnitude");
m_ForceVariation = serializedObject.FindProperty("m_ForceVariation");
m_ForceSource = serializedObject.FindProperty("m_ForceSource");
m_ForceTarget = serializedObject.FindProperty("m_ForceTarget");
m_ForceMode = serializedObject.FindProperty("m_ForceMode");
m_DistanceScale = serializedObject.FindProperty("m_DistanceScale");
m_ShowDampingRollout.valueChanged.AddListener(Repaint);
m_Drag = serializedObject.FindProperty("m_Drag");
m_AngularDrag = serializedObject.FindProperty("m_AngularDrag");
}
public override void OnDisable()
{
base.OnDisable();
m_ShowForceRollout.valueChanged.RemoveListener(Repaint);
m_ShowDampingRollout.valueChanged.RemoveListener(Repaint);
}
public override void OnInspectorGUI()
{
base.OnInspectorGUI();
serializedObject.Update();
// Force.
m_ShowForceRollout.target = EditorGUILayout.Foldout(m_ShowForceRollout.target, "Force", true);
if (EditorGUILayout.BeginFadeGroup(m_ShowForceRollout.faded))
{
EditorGUILayout.PropertyField(m_ForceMagnitude);
EditorGUILayout.PropertyField(m_ForceVariation);
EditorGUILayout.PropertyField(m_DistanceScale);
EditorGUILayout.PropertyField(m_ForceSource);
EditorGUILayout.PropertyField(m_ForceTarget);
EditorGUILayout.PropertyField(m_ForceMode);
EditorGUILayout.Space();
}
EditorGUILayout.EndFadeGroup();
// Drag.
m_ShowDampingRollout.target = EditorGUILayout.Foldout(m_ShowDampingRollout.target, "Damping", true);
if (EditorGUILayout.BeginFadeGroup(m_ShowDampingRollout.faded))
{
EditorGUILayout.PropertyField(m_Drag);
EditorGUILayout.PropertyField(m_AngularDrag);
}
EditorGUILayout.EndFadeGroup();
serializedObject.ApplyModifiedProperties();
}
}
}
| {
"pile_set_name": "Github"
} |
---
title: Host ASP.NET Core in a Windows Service
author: rick-anderson
description: Learn how to host an ASP.NET Core app in a Windows Service.
monikerRange: '>= aspnetcore-2.1'
ms.author: riande
ms.custom: mvc
ms.date: 02/07/2020
no-loc: ["ASP.NET Core Identity", cookie, Cookie, Blazor, "Blazor Server", "Blazor WebAssembly", "Identity", "Let's Encrypt", Razor, SignalR]
uid: host-and-deploy/windows-service
---
# Host ASP.NET Core in a Windows Service
::: moniker range=">= aspnetcore-3.0"
An ASP.NET Core app can be hosted on Windows as a [Windows Service](/dotnet/framework/windows-services/introduction-to-windows-service-applications) without using IIS. When hosted as a Windows Service, the app automatically starts after server reboots.
[View or download sample code](https://github.com/dotnet/AspNetCore.Docs/tree/master/aspnetcore/host-and-deploy/windows-service/samples) ([how to download](xref:index#how-to-download-a-sample))
## Prerequisites
* [ASP.NET Core SDK 2.1 or later](https://dotnet.microsoft.com/download)
* [PowerShell 6.2 or later](https://github.com/PowerShell/PowerShell)
## Worker Service template
The ASP.NET Core Worker Service template provides a starting point for writing long running service apps. To use the template as a basis for a Windows Service app:
1. Create a Worker Service app from the .NET Core template.
1. Follow the guidance in the [App configuration](#app-configuration) section to update the Worker Service app so that it can run as a Windows Service.
[!INCLUDE[](~/includes/worker-template-instructions.md)]
## App configuration
The app requires a package reference for [Microsoft.Extensions.Hosting.WindowsServices](https://www.nuget.org/packages/Microsoft.Extensions.Hosting.WindowsServices).
`IHostBuilder.UseWindowsService` is called when building the host. If the app is running as a Windows Service, the method:
* Sets the host lifetime to `WindowsServiceLifetime`.
* Sets the [content root](xref:fundamentals/index#content-root) to [AppContext.BaseDirectory](xref:System.AppContext.BaseDirectory). For more information, see the [Current directory and content root](#current-directory-and-content-root) section.
* Enables logging to the event log:
* The application name is used as the default source name.
* The default log level is *Warning* or higher for an app based on an ASP.NET Core template that calls `CreateDefaultBuilder` to build the host.
* Override the default log level with the `Logging:EventLog:LogLevel:Default` key in *appsettings.json*/*appsettings.{Environment}.json* or other configuration provider.
* Only administrators can create new event sources. When an event source can't be created using the application name, a warning is logged to the *Application* source and event logs are disabled.
In `CreateHostBuilder` of *Program.cs*:
```csharp
Host.CreateDefaultBuilder(args)
.UseWindowsService()
...
```
The following sample apps accompany this topic:
* Background Worker Service Sample: A non-web app sample based on the [Worker Service template](#worker-service-template) that uses [hosted services](xref:fundamentals/host/hosted-services) for background tasks.
* Web App Service Sample: A Razor Pages web app sample that runs as a Windows Service with [hosted services](xref:fundamentals/host/hosted-services) for background tasks.
For MVC guidance, see the articles under <xref:mvc/overview> and <xref:migration/22-to-30>.
## Deployment type
For information and advice on deployment scenarios, see [.NET Core application deployment](/dotnet/core/deploying/).
### SDK
For a web app-based service that uses the Razor Pages or MVC frameworks, specify the Web SDK in the project file:
```xml
<Project Sdk="Microsoft.NET.Sdk.Web">
```
If the service only executes background tasks (for example, [hosted services](xref:fundamentals/host/hosted-services)), specify the Worker SDK in the project file:
```xml
<Project Sdk="Microsoft.NET.Sdk.Worker">
```
### Framework-dependent deployment (FDD)
Framework-dependent deployment (FDD) relies on the presence of a shared system-wide version of .NET Core on the target system. When the FDD scenario is adopted following the guidance in this article, the SDK produces an executable (*.exe*), called a *framework-dependent executable*.
If using the [Web SDK](#sdk), a *web.config* file, which is normally produced when publishing an ASP.NET Core app, is unnecessary for a Windows Services app. To disable the creation of the *web.config* file, add the `<IsTransformWebConfigDisabled>` property set to `true`.
```xml
<PropertyGroup>
<TargetFramework>netcoreapp3.0</TargetFramework>
<IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
</PropertyGroup>
```
### Self-contained deployment (SCD)
Self-contained deployment (SCD) doesn't rely on the presence of a shared framework on the host system. The runtime and the app's dependencies are deployed with the app.
A Windows [Runtime Identifier (RID)](/dotnet/core/rid-catalog) is included in the `<PropertyGroup>` that contains the target framework:
```xml
<RuntimeIdentifier>win7-x64</RuntimeIdentifier>
```
To publish for multiple RIDs:
* Provide the RIDs in a semicolon-delimited list.
* Use the property name [\<RuntimeIdentifiers>](/dotnet/core/tools/csproj#runtimeidentifiers) (plural).
For more information, see [.NET Core RID Catalog](/dotnet/core/rid-catalog).
## Service user account
To create a user account for a service, use the [New-LocalUser](/powershell/module/microsoft.powershell.localaccounts/new-localuser) cmdlet from an administrative PowerShell 6 command shell.
On Windows 10 October 2018 Update (version 1809/build 10.0.17763) or later:
```powershell
New-LocalUser -Name {SERVICE NAME}
```
On Windows OS earlier than the Windows 10 October 2018 Update (version 1809/build 10.0.17763):
```console
powershell -Command "New-LocalUser -Name {SERVICE NAME}"
```
Provide a [strong password](/windows/security/threat-protection/security-policy-settings/password-must-meet-complexity-requirements) when prompted.
Unless the `-AccountExpires` parameter is supplied to the [New-LocalUser](/powershell/module/microsoft.powershell.localaccounts/new-localuser) cmdlet with an expiration <xref:System.DateTime>, the account doesn't expire.
For more information, see [Microsoft.PowerShell.LocalAccounts](/powershell/module/microsoft.powershell.localaccounts/) and [Service User Accounts](/windows/desktop/services/service-user-accounts).
An alternative approach to managing users when using Active Directory is to use Managed Service Accounts. For more information, see [Group Managed Service Accounts Overview](/windows-server/security/group-managed-service-accounts/group-managed-service-accounts-overview).
## Log on as a service rights
To establish *Log on as a service* rights for a service user account:
1. Open the Local Security Policy editor by running *secpol.msc*.
1. Expand the **Local Policies** node and select **User Rights Assignment**.
1. Open the **Log on as a service** policy.
1. Select **Add User or Group**.
1. Provide the object name (user account) using either of the following approaches:
1. Type the user account (`{DOMAIN OR COMPUTER NAME\USER}`) in the object name field and select **OK** to add the user to the policy.
1. Select **Advanced**. Select **Find Now**. Select the user account from the list. Select **OK**. Select **OK** again to add the user to the policy.
1. Select **OK** or **Apply** to accept the changes.
## Create and manage the Windows Service
### Create a service
Use PowerShell commands to register a service. From an administrative PowerShell 6 command shell, execute the following commands:
```powershell
$acl = Get-Acl "{EXE PATH}"
$aclRuleArgs = {DOMAIN OR COMPUTER NAME\USER}, "Read,Write,ReadAndExecute", "ContainerInherit,ObjectInherit", "None", "Allow"
$accessRule = New-Object System.Security.AccessControl.FileSystemAccessRule($aclRuleArgs)
$acl.SetAccessRule($accessRule)
$acl | Set-Acl "{EXE PATH}"
New-Service -Name {SERVICE NAME} -BinaryPathName {EXE FILE PATH} -Credential {DOMAIN OR COMPUTER NAME\USER} -Description "{DESCRIPTION}" -DisplayName "{DISPLAY NAME}" -StartupType Automatic
```
* `{EXE PATH}`: Path to the app's folder on the host (for example, `d:\myservice`). Don't include the app's executable in the path. A trailing slash isn't required.
* `{DOMAIN OR COMPUTER NAME\USER}`: Service user account (for example, `Contoso\ServiceUser`).
* `{SERVICE NAME}`: Service name (for example, `MyService`).
* `{EXE FILE PATH}`: The app's executable path (for example, `d:\myservice\myservice.exe`). Include the executable's file name with extension.
* `{DESCRIPTION}`: Service description (for example, `My sample service`).
* `{DISPLAY NAME}`: Service display name (for example, `My Service`).
### Start a service
Start a service with the following PowerShell 6 command:
```powershell
Start-Service -Name {SERVICE NAME}
```
The command takes a few seconds to start the service.
### Determine a service's status
To check the status of a service, use the following PowerShell 6 command:
```powershell
Get-Service -Name {SERVICE NAME}
```
The status is reported as one of the following values:
* `Starting`
* `Running`
* `Stopping`
* `Stopped`
### Stop a service
Stop a service with the following Powershell 6 command:
```powershell
Stop-Service -Name {SERVICE NAME}
```
### Remove a service
After a short delay to stop a service, remove a service with the following Powershell 6 command:
```powershell
Remove-Service -Name {SERVICE NAME}
```
## Proxy server and load balancer scenarios
Services that interact with requests from the Internet or a corporate network and are behind a proxy or load balancer might require additional configuration. For more information, see <xref:host-and-deploy/proxy-load-balancer>.
## Configure endpoints
By default, ASP.NET Core binds to `http://localhost:5000`. Configure the URL and port by setting the `ASPNETCORE_URLS` environment variable.
For additional URL and port configuration approaches, see the relevant server article:
* <xref:fundamentals/servers/kestrel#endpoint-configuration>
* <xref:fundamentals/servers/httpsys#configure-windows-server>
The preceding guidance covers support for HTTPS endpoints. For example, configure the app for HTTPS when authentication is used with a Windows Service.
> [!NOTE]
> Use of the ASP.NET Core HTTPS development certificate to secure a service endpoint isn't supported.
## Current directory and content root
The current working directory returned by calling <xref:System.IO.Directory.GetCurrentDirectory*> for a Windows Service is the *C:\\WINDOWS\\system32* folder. The *system32* folder isn't a suitable location to store a service's files (for example, settings files). Use one of the following approaches to maintain and access a service's assets and settings files.
### Use ContentRootPath or ContentRootFileProvider
Use [IHostEnvironment.ContentRootPath](xref:Microsoft.Extensions.Hosting.IHostEnvironment.ContentRootPath) or <xref:Microsoft.Extensions.Hosting.IHostEnvironment.ContentRootFileProvider> to locate an app's resources.
When the app runs as a service, <xref:Microsoft.Extensions.Hosting.WindowsServiceLifetimeHostBuilderExtensions.UseWindowsService*> sets the <xref:Microsoft.Extensions.Hosting.IHostEnvironment.ContentRootPath> to [AppContext.BaseDirectory](xref:System.AppContext.BaseDirectory).
The app's default settings files, *appsettings.json* and *appsettings.{Environment}.json*, are loaded from the app's content root by calling [CreateDefaultBuilder during host construction](xref:fundamentals/host/generic-host#set-up-a-host).
For other settings files loaded by developer code in <xref:Microsoft.Extensions.Hosting.HostBuilder.ConfigureAppConfiguration*>, there's no need to call <xref:Microsoft.Extensions.Configuration.FileConfigurationExtensions.SetBasePath*>. In the following example, the *custom_settings.json* file exists in the app's content root and is loaded without explicitly setting a base path:
[!code-csharp[](windows-service/samples_snapshot/CustomSettingsExample.cs?highlight=13)]
Don't attempt to use <xref:System.IO.Directory.GetCurrentDirectory*> to obtain a resource path because a Windows Service app returns the *C:\\WINDOWS\\system32* folder as its current directory.
### Store a service's files in a suitable location on disk
Specify an absolute path with <xref:Microsoft.Extensions.Configuration.FileConfigurationExtensions.SetBasePath*> when using an <xref:Microsoft.Extensions.Configuration.IConfigurationBuilder> to the folder containing the files.
## Troubleshoot
To troubleshoot a Windows Service app, see <xref:test/troubleshoot>.
### Common errors
* An old or pre-release version of PowerShell is in use.
* The registered service doesn't use the app's **published** output from the [dotnet publish](/dotnet/core/tools/dotnet-publish) command. Output of the [dotnet build](/dotnet/core/tools/dotnet-build) command isn't supported for app deployment. Published assets are found in either of the following folders depending on the deployment type:
* *bin/Release/{TARGET FRAMEWORK}/publish* (FDD)
* *bin/Release/{TARGET FRAMEWORK}/{RUNTIME IDENTIFIER}/publish* (SCD)
* The service isn't in the RUNNING state.
* The paths to resources that the app uses (for example, certificates) are incorrect. The base path of a Windows Service is *c:\\Windows\\System32*.
* The user doesn't have *Log on as a service* rights.
* The user's password is expired or incorrectly passed when executing the `New-Service` PowerShell command.
* The app requires ASP.NET Core authentication but isn't configured for secure connections (HTTPS).
* The request URL port is incorrect or not configured correctly in the app.
### System and Application Event Logs
Access the System and Application Event Logs:
1. Open the Start menu, search for *Event Viewer*, and select the **Event Viewer** app.
1. In **Event Viewer**, open the **Windows Logs** node.
1. Select **System** to open the System Event Log. Select **Application** to open the Application Event Log.
1. Search for errors associated with the failing app.
### Run the app at a command prompt
Many startup errors don't produce useful information in the event logs. You can find the cause of some errors by running the app at a command prompt on the hosting system. To log additional detail from the app, lower the [log level](xref:fundamentals/logging/index#log-level) or run the app in the [Development environment](xref:fundamentals/environments).
### Clear package caches
A functioning app may fail immediately after upgrading either the .NET Core SDK on the development machine or changing package versions within the app. In some cases, incoherent packages may break an app when performing major upgrades. Most of these issues can be fixed by following these instructions:
1. Delete the *bin* and *obj* folders.
1. Clear the package caches by executing [dotnet nuget locals all --clear](/dotnet/core/tools/dotnet-nuget-locals) from a command shell.
Clearing package caches can also be accomplished with the [nuget.exe](https://www.nuget.org/downloads) tool and executing the command `nuget locals all -clear`. *nuget.exe* isn't a bundled install with the Windows desktop operating system and must be obtained separately from the [NuGet website](https://www.nuget.org/downloads).
1. Restore and rebuild the project.
1. Delete all of the files in the deployment folder on the server prior to redeploying the app.
### Slow or hanging app
A *crash dump* is a snapshot of the system's memory and can help determine the cause of an app crash, startup failure, or slow app.
#### App crashes or encounters an exception
Obtain and analyze a dump from [Windows Error Reporting (WER)](/windows/desktop/wer/windows-error-reporting):
1. Create a folder to hold crash dump files at `c:\dumps`.
1. Run the [EnableDumps PowerShell script](https://github.com/dotnet/AspNetCore.Docs/blob/master/aspnetcore/host-and-deploy/windows-service/samples/scripts/EnableDumps.ps1) with the application executable name:
```powershell
.\EnableDumps {APPLICATION EXE} c:\dumps
```
1. Run the app under the conditions that cause the crash to occur.
1. After the crash has occurred, run the [DisableDumps PowerShell script](https://github.com/dotnet/AspNetCore.Docs/blob/master/aspnetcore/host-and-deploy/windows-service/samples/scripts/DisableDumps.ps1):
```powershell
.\DisableDumps {APPLICATION EXE}
```
After an app crashes and dump collection is complete, the app is allowed to terminate normally. The PowerShell script configures WER to collect up to five dumps per app.
> [!WARNING]
> Crash dumps might take up a large amount of disk space (up to several gigabytes each).
#### App hangs, fails during startup, or runs normally
When an app *hangs* (stops responding but doesn't crash), fails during startup, or runs normally, see [User-Mode Dump Files: Choosing the Best Tool](/windows-hardware/drivers/debugger/user-mode-dump-files#choosing-the-best-tool) to select an appropriate tool to produce the dump.
#### Analyze the dump
A dump can be analyzed using several approaches. For more information, see [Analyzing a User-Mode Dump File](/windows-hardware/drivers/debugger/analyzing-a-user-mode-dump-file).
## Additional resources
* [Kestrel endpoint configuration](xref:fundamentals/servers/kestrel#endpoint-configuration) (includes HTTPS configuration and SNI support)
* <xref:fundamentals/host/generic-host>
* <xref:test/troubleshoot>
::: moniker-end
::: moniker range="= aspnetcore-2.2"
An ASP.NET Core app can be hosted on Windows as a [Windows Service](/dotnet/framework/windows-services/introduction-to-windows-service-applications) without using IIS. When hosted as a Windows Service, the app automatically starts after server reboots.
[View or download sample code](https://github.com/dotnet/AspNetCore.Docs/tree/master/aspnetcore/host-and-deploy/windows-service/samples) ([how to download](xref:index#how-to-download-a-sample))
## Prerequisites
* [ASP.NET Core SDK 2.1 or later](https://dotnet.microsoft.com/download)
* [PowerShell 6.2 or later](https://github.com/PowerShell/PowerShell)
## App configuration
The app requires package references for [Microsoft.AspNetCore.Hosting.WindowsServices](https://www.nuget.org/packages/Microsoft.AspNetCore.Hosting.WindowsServices) and [Microsoft.Extensions.Logging.EventLog](https://www.nuget.org/packages/Microsoft.Extensions.Logging.EventLog).
To test and debug when running outside of a service, add code to determine if the app is running as a service or a console app. Inspect if the debugger is attached or a `--console` switch is present. If either condition is true (the app isn't run as a service), call <xref:Microsoft.AspNetCore.Hosting.WebHostExtensions.Run*>. If the conditions are false (the app is run as a service):
* Call <xref:System.IO.Directory.SetCurrentDirectory*> and use a path to the app's published location. Don't call <xref:System.IO.Directory.GetCurrentDirectory*> to obtain the path because a Windows Service app returns the *C:\\WINDOWS\\system32* folder when <xref:System.IO.Directory.GetCurrentDirectory*> is called. For more information, see the [Current directory and content root](#current-directory-and-content-root) section. This step is performed before the app is configured in `CreateWebHostBuilder`.
* Call <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*> to run the app as a service.
Because the [Command-line Configuration Provider](xref:fundamentals/configuration/index#command-line-configuration-provider) requires name-value pairs for command-line arguments, the `--console` switch is removed from the arguments before <xref:Microsoft.AspNetCore.WebHost.CreateDefaultBuilder*> receives the arguments.
To write to the Windows Event Log, add the EventLog provider to <xref:Microsoft.AspNetCore.Hosting.WebHostBuilder.ConfigureLogging*>. Set the logging level with the `Logging:LogLevel:Default` key in the *appsettings.Production.json* file.
In the following example from the sample app, `RunAsCustomService` is called instead of <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*> in order to handle lifetime events within the app. For more information, see the [Handle starting and stopping events](#handle-starting-and-stopping-events) section.
[!code-csharp[](windows-service/samples/2.x/AspNetCoreService/Program.cs?name=snippet_Program)]
## Deployment type
For information and advice on deployment scenarios, see [.NET Core application deployment](/dotnet/core/deploying/).
### SDK
For a web app-based service that uses the Razor Pages or MVC frameworks, specify the Web SDK in the project file:
```xml
<Project Sdk="Microsoft.NET.Sdk.Web">
```
If the service only executes background tasks (for example, [hosted services](xref:fundamentals/host/hosted-services)), specify the Worker SDK in the project file:
```xml
<Project Sdk="Microsoft.NET.Sdk.Worker">
```
### Framework-dependent deployment (FDD)
Framework-dependent deployment (FDD) relies on the presence of a shared system-wide version of .NET Core on the target system. When the FDD scenario is adopted following the guidance in this article, the SDK produces an executable (*.exe*), called a *framework-dependent executable*.
The Windows [Runtime Identifier (RID)](/dotnet/core/rid-catalog) ([\<RuntimeIdentifier>](/dotnet/core/tools/csproj#runtimeidentifier)) contains the target framework. In the following example, the RID is set to `win7-x64`. The `<SelfContained>` property is set to `false`. These properties instruct the SDK to generate an executable (*.exe*) file for Windows and an app that depends on the shared .NET Core framework.
A *web.config* file, which is normally produced when publishing an ASP.NET Core app, is unnecessary for a Windows Services app. To disable the creation of the *web.config* file, add the `<IsTransformWebConfigDisabled>` property set to `true`.
```xml
<PropertyGroup>
<TargetFramework>netcoreapp2.2</TargetFramework>
<RuntimeIdentifier>win7-x64</RuntimeIdentifier>
<SelfContained>false</SelfContained>
<IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
</PropertyGroup>
```
### Self-contained deployment (SCD)
Self-contained deployment (SCD) doesn't rely on the presence of a shared framework on the host system. The runtime and the app's dependencies are deployed with the app.
A Windows [Runtime Identifier (RID)](/dotnet/core/rid-catalog) is included in the `<PropertyGroup>` that contains the target framework:
```xml
<RuntimeIdentifier>win7-x64</RuntimeIdentifier>
```
To publish for multiple RIDs:
* Provide the RIDs in a semicolon-delimited list.
* Use the property name [\<RuntimeIdentifiers>](/dotnet/core/tools/csproj#runtimeidentifiers) (plural).
For more information, see [.NET Core RID Catalog](/dotnet/core/rid-catalog).
A `<SelfContained>` property is set to `true`:
```xml
<SelfContained>true</SelfContained>
```
## Service user account
To create a user account for a service, use the [New-LocalUser](/powershell/module/microsoft.powershell.localaccounts/new-localuser) cmdlet from an administrative PowerShell 6 command shell.
On Windows 10 October 2018 Update (version 1809/build 10.0.17763) or later:
```powershell
New-LocalUser -Name {SERVICE NAME}
```
On Windows OS earlier than the Windows 10 October 2018 Update (version 1809/build 10.0.17763):
```console
powershell -Command "New-LocalUser -Name {SERVICE NAME}"
```
Provide a [strong password](/windows/security/threat-protection/security-policy-settings/password-must-meet-complexity-requirements) when prompted.
Unless the `-AccountExpires` parameter is supplied to the [New-LocalUser](/powershell/module/microsoft.powershell.localaccounts/new-localuser) cmdlet with an expiration <xref:System.DateTime>, the account doesn't expire.
For more information, see [Microsoft.PowerShell.LocalAccounts](/powershell/module/microsoft.powershell.localaccounts/) and [Service User Accounts](/windows/desktop/services/service-user-accounts).
An alternative approach to managing users when using Active Directory is to use Managed Service Accounts. For more information, see [Group Managed Service Accounts Overview](/windows-server/security/group-managed-service-accounts/group-managed-service-accounts-overview).
## Log on as a service rights
To establish *Log on as a service* rights for a service user account:
1. Open the Local Security Policy editor by running *secpol.msc*.
1. Expand the **Local Policies** node and select **User Rights Assignment**.
1. Open the **Log on as a service** policy.
1. Select **Add User or Group**.
1. Provide the object name (user account) using either of the following approaches:
1. Type the user account (`{DOMAIN OR COMPUTER NAME\USER}`) in the object name field and select **OK** to add the user to the policy.
1. Select **Advanced**. Select **Find Now**. Select the user account from the list. Select **OK**. Select **OK** again to add the user to the policy.
1. Select **OK** or **Apply** to accept the changes.
## Create and manage the Windows Service
### Create a service
Use PowerShell commands to register a service. From an administrative PowerShell 6 command shell, execute the following commands:
```powershell
$acl = Get-Acl "{EXE PATH}"
$aclRuleArgs = {DOMAIN OR COMPUTER NAME\USER}, "Read,Write,ReadAndExecute", "ContainerInherit,ObjectInherit", "None", "Allow"
$accessRule = New-Object System.Security.AccessControl.FileSystemAccessRule($aclRuleArgs)
$acl.SetAccessRule($accessRule)
$acl | Set-Acl "{EXE PATH}"
New-Service -Name {SERVICE NAME} -BinaryPathName {EXE FILE PATH} -Credential {DOMAIN OR COMPUTER NAME\USER} -Description "{DESCRIPTION}" -DisplayName "{DISPLAY NAME}" -StartupType Automatic
```
* `{EXE PATH}`: Path to the app's folder on the host (for example, `d:\myservice`). Don't include the app's executable in the path. A trailing slash isn't required.
* `{DOMAIN OR COMPUTER NAME\USER}`: Service user account (for example, `Contoso\ServiceUser`).
* `{SERVICE NAME}`: Service name (for example, `MyService`).
* `{EXE FILE PATH}`: The app's executable path (for example, `d:\myservice\myservice.exe`). Include the executable's file name with extension.
* `{DESCRIPTION}`: Service description (for example, `My sample service`).
* `{DISPLAY NAME}`: Service display name (for example, `My Service`).
### Start a service
Start a service with the following PowerShell 6 command:
```powershell
Start-Service -Name {SERVICE NAME}
```
The command takes a few seconds to start the service.
### Determine a service's status
To check the status of a service, use the following PowerShell 6 command:
```powershell
Get-Service -Name {SERVICE NAME}
```
The status is reported as one of the following values:
* `Starting`
* `Running`
* `Stopping`
* `Stopped`
### Stop a service
Stop a service with the following Powershell 6 command:
```powershell
Stop-Service -Name {SERVICE NAME}
```
### Remove a service
After a short delay to stop a service, remove a service with the following Powershell 6 command:
```powershell
Remove-Service -Name {SERVICE NAME}
```
## Handle starting and stopping events
To handle <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService.OnStarting*>, <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService.OnStarted*>, and <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService.OnStopping*> events:
1. Create a class that derives from <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService> with the `OnStarting`, `OnStarted`, and `OnStopping` methods:
[!code-csharp[](windows-service/samples/2.x/AspNetCoreService/CustomWebHostService.cs?name=snippet_CustomWebHostService)]
2. Create an extension method for <xref:Microsoft.AspNetCore.Hosting.IWebHost> that passes the `CustomWebHostService` to <xref:System.ServiceProcess.ServiceBase.Run*>:
[!code-csharp[](windows-service/samples/2.x/AspNetCoreService/WebHostServiceExtensions.cs?name=ExtensionsClass)]
3. In `Program.Main`, call the `RunAsCustomService` extension method instead of <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*>:
```csharp
host.RunAsCustomService();
```
To see the location of <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*> in `Program.Main`, refer to the code sample shown in the [Deployment type](#deployment-type) section.
## Proxy server and load balancer scenarios
Services that interact with requests from the Internet or a corporate network and are behind a proxy or load balancer might require additional configuration. For more information, see <xref:host-and-deploy/proxy-load-balancer>.
## Configure endpoints
By default, ASP.NET Core binds to `http://localhost:5000`. Configure the URL and port by setting the `ASPNETCORE_URLS` environment variable.
For additional URL and port configuration approaches, see the relevant server article:
* <xref:fundamentals/servers/kestrel#endpoint-configuration>
* <xref:fundamentals/servers/httpsys#configure-windows-server>
The preceding guidance covers support for HTTPS endpoints. For example, configure the app for HTTPS when authentication is used with a Windows Service.
> [!NOTE]
> Use of the ASP.NET Core HTTPS development certificate to secure a service endpoint isn't supported.
## Current directory and content root
The current working directory returned by calling <xref:System.IO.Directory.GetCurrentDirectory*> for a Windows Service is the *C:\\WINDOWS\\system32* folder. The *system32* folder isn't a suitable location to store a service's files (for example, settings files). Use one of the following approaches to maintain and access a service's assets and settings files.
### Set the content root path to the app's folder
The <xref:Microsoft.Extensions.Hosting.IHostingEnvironment.ContentRootPath*> is the same path provided to the `binPath` argument when a service is created. Instead of calling `GetCurrentDirectory` to create paths to settings files, call <xref:System.IO.Directory.SetCurrentDirectory*> with the path to the app's [content root](xref:fundamentals/index#content-root).
In `Program.Main`, determine the path to the folder of the service's executable and use the path to establish the app's content root:
```csharp
var pathToExe = Process.GetCurrentProcess().MainModule.FileName;
var pathToContentRoot = Path.GetDirectoryName(pathToExe);
Directory.SetCurrentDirectory(pathToContentRoot);
CreateWebHostBuilder(args)
.Build()
.RunAsService();
```
### Store a service's files in a suitable location on disk
Specify an absolute path with <xref:Microsoft.Extensions.Configuration.FileConfigurationExtensions.SetBasePath*> when using an <xref:Microsoft.Extensions.Configuration.IConfigurationBuilder> to the folder containing the files.
## Troubleshoot
To troubleshoot a Windows Service app, see <xref:test/troubleshoot>.
### Common errors
* An old or pre-release version of PowerShell is in use.
* The registered service doesn't use the app's **published** output from the [dotnet publish](/dotnet/core/tools/dotnet-publish) command. Output of the [dotnet build](/dotnet/core/tools/dotnet-build) command isn't supported for app deployment. Published assets are found in either of the following folders depending on the deployment type:
* *bin/Release/{TARGET FRAMEWORK}/publish* (FDD)
* *bin/Release/{TARGET FRAMEWORK}/{RUNTIME IDENTIFIER}/publish* (SCD)
* The service isn't in the RUNNING state.
* The paths to resources that the app uses (for example, certificates) are incorrect. The base path of a Windows Service is *c:\\Windows\\System32*.
* The user doesn't have *Log on as a service* rights.
* The user's password is expired or incorrectly passed when executing the `New-Service` PowerShell command.
* The app requires ASP.NET Core authentication but isn't configured for secure connections (HTTPS).
* The request URL port is incorrect or not configured correctly in the app.
### System and Application Event Logs
Access the System and Application Event Logs:
1. Open the Start menu, search for *Event Viewer*, and select the **Event Viewer** app.
1. In **Event Viewer**, open the **Windows Logs** node.
1. Select **System** to open the System Event Log. Select **Application** to open the Application Event Log.
1. Search for errors associated with the failing app.
### Run the app at a command prompt
Many startup errors don't produce useful information in the event logs. You can find the cause of some errors by running the app at a command prompt on the hosting system. To log additional detail from the app, lower the [log level](xref:fundamentals/logging/index#log-level) or run the app in the [Development environment](xref:fundamentals/environments).
### Clear package caches
A functioning app may fail immediately after upgrading either the .NET Core SDK on the development machine or changing package versions within the app. In some cases, incoherent packages may break an app when performing major upgrades. Most of these issues can be fixed by following these instructions:
1. Delete the *bin* and *obj* folders.
1. Clear the package caches by executing [dotnet nuget locals all --clear](/dotnet/core/tools/dotnet-nuget-locals) from a command shell.
Clearing package caches can also be accomplished with the [nuget.exe](https://www.nuget.org/downloads) tool and executing the command `nuget locals all -clear`. *nuget.exe* isn't a bundled install with the Windows desktop operating system and must be obtained separately from the [NuGet website](https://www.nuget.org/downloads).
1. Restore and rebuild the project.
1. Delete all of the files in the deployment folder on the server prior to redeploying the app.
### Slow or hanging app
A *crash dump* is a snapshot of the system's memory and can help determine the cause of an app crash, startup failure, or slow app.
#### App crashes or encounters an exception
Obtain and analyze a dump from [Windows Error Reporting (WER)](/windows/desktop/wer/windows-error-reporting):
1. Create a folder to hold crash dump files at `c:\dumps`.
1. Run the [EnableDumps PowerShell script](https://github.com/dotnet/AspNetCore.Docs/blob/master/aspnetcore/host-and-deploy/windows-service/scripts/EnableDumps.ps1) with the application executable name:
```console
.\EnableDumps {APPLICATION EXE} c:\dumps
```
1. Run the app under the conditions that cause the crash to occur.
1. After the crash has occurred, run the [DisableDumps PowerShell script](https://github.com/dotnet/AspNetCore.Docs/blob/master/aspnetcore/host-and-deploy/windows-service/scripts/DisableDumps.ps1):
```console
.\DisableDumps {APPLICATION EXE}
```
After an app crashes and dump collection is complete, the app is allowed to terminate normally. The PowerShell script configures WER to collect up to five dumps per app.
> [!WARNING]
> Crash dumps might take up a large amount of disk space (up to several gigabytes each).
#### App hangs, fails during startup, or runs normally
When an app *hangs* (stops responding but doesn't crash), fails during startup, or runs normally, see [User-Mode Dump Files: Choosing the Best Tool](/windows-hardware/drivers/debugger/user-mode-dump-files#choosing-the-best-tool) to select an appropriate tool to produce the dump.
#### Analyze the dump
A dump can be analyzed using several approaches. For more information, see [Analyzing a User-Mode Dump File](/windows-hardware/drivers/debugger/analyzing-a-user-mode-dump-file).
## Additional resources
* [Kestrel endpoint configuration](xref:fundamentals/servers/kestrel#endpoint-configuration) (includes HTTPS configuration and SNI support)
* <xref:fundamentals/host/web-host>
* <xref:test/troubleshoot>
::: moniker-end
::: moniker range="< aspnetcore-2.2"
An ASP.NET Core app can be hosted on Windows as a [Windows Service](/dotnet/framework/windows-services/introduction-to-windows-service-applications) without using IIS. When hosted as a Windows Service, the app automatically starts after server reboots.
[View or download sample code](https://github.com/dotnet/AspNetCore.Docs/tree/master/aspnetcore/host-and-deploy/windows-service/samples) ([how to download](xref:index#how-to-download-a-sample))
## Prerequisites
* [ASP.NET Core SDK 2.1 or later](https://dotnet.microsoft.com/download)
* [PowerShell 6.2 or later](https://github.com/PowerShell/PowerShell)
## App configuration
The app requires package references for [Microsoft.AspNetCore.Hosting.WindowsServices](https://www.nuget.org/packages/Microsoft.AspNetCore.Hosting.WindowsServices) and [Microsoft.Extensions.Logging.EventLog](https://www.nuget.org/packages/Microsoft.Extensions.Logging.EventLog).
To test and debug when running outside of a service, add code to determine if the app is running as a service or a console app. Inspect if the debugger is attached or a `--console` switch is present. If either condition is true (the app isn't run as a service), call <xref:Microsoft.AspNetCore.Hosting.WebHostExtensions.Run*>. If the conditions are false (the app is run as a service):
* Call <xref:System.IO.Directory.SetCurrentDirectory*> and use a path to the app's published location. Don't call <xref:System.IO.Directory.GetCurrentDirectory*> to obtain the path because a Windows Service app returns the *C:\\WINDOWS\\system32* folder when <xref:System.IO.Directory.GetCurrentDirectory*> is called. For more information, see the [Current directory and content root](#current-directory-and-content-root) section. This step is performed before the app is configured in `CreateWebHostBuilder`.
* Call <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*> to run the app as a service.
Because the [Command-line Configuration Provider](xref:fundamentals/configuration/index#command-line-configuration-provider) requires name-value pairs for command-line arguments, the `--console` switch is removed from the arguments before <xref:Microsoft.AspNetCore.WebHost.CreateDefaultBuilder*> receives the arguments.
To write to the Windows Event Log, add the EventLog provider to <xref:Microsoft.AspNetCore.Hosting.WebHostBuilder.ConfigureLogging*>. Set the logging level with the `Logging:LogLevel:Default` key in the *appsettings.Production.json* file.
In the following example from the sample app, `RunAsCustomService` is called instead of <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*> in order to handle lifetime events within the app. For more information, see the [Handle starting and stopping events](#handle-starting-and-stopping-events) section.
[!code-csharp[](windows-service/samples/2.x/AspNetCoreService/Program.cs?name=snippet_Program)]
## Deployment type
For information and advice on deployment scenarios, see [.NET Core application deployment](/dotnet/core/deploying/).
### SDK
For a web app-based service that uses the Razor Pages or MVC frameworks, specify the Web SDK in the project file:
```xml
<Project Sdk="Microsoft.NET.Sdk.Web">
```
If the service only executes background tasks (for example, [hosted services](xref:fundamentals/host/hosted-services)), specify the Worker SDK in the project file:
```xml
<Project Sdk="Microsoft.NET.Sdk.Worker">
```
### Framework-dependent deployment (FDD)
Framework-dependent deployment (FDD) relies on the presence of a shared system-wide version of .NET Core on the target system. When the FDD scenario is adopted following the guidance in this article, the SDK produces an executable (*.exe*), called a *framework-dependent executable*.
The Windows [Runtime Identifier (RID)](/dotnet/core/rid-catalog) ([\<RuntimeIdentifier>](/dotnet/core/tools/csproj#runtimeidentifier)) contains the target framework. In the following example, the RID is set to `win7-x64`. The `<SelfContained>` property is set to `false`. These properties instruct the SDK to generate an executable (*.exe*) file for Windows and an app that depends on the shared .NET Core framework.
The `<UseAppHost>` property is set to `true`. This property provides the service with an activation path (an executable, *.exe*) for an FDD.
A *web.config* file, which is normally produced when publishing an ASP.NET Core app, is unnecessary for a Windows Services app. To disable the creation of the *web.config* file, add the `<IsTransformWebConfigDisabled>` property set to `true`.
```xml
<PropertyGroup>
<TargetFramework>netcoreapp2.2</TargetFramework>
<RuntimeIdentifier>win7-x64</RuntimeIdentifier>
<UseAppHost>true</UseAppHost>
<SelfContained>false</SelfContained>
<IsTransformWebConfigDisabled>true</IsTransformWebConfigDisabled>
</PropertyGroup>
```
### Self-contained deployment (SCD)
Self-contained deployment (SCD) doesn't rely on the presence of a shared framework on the host system. The runtime and the app's dependencies are deployed with the app.
A Windows [Runtime Identifier (RID)](/dotnet/core/rid-catalog) is included in the `<PropertyGroup>` that contains the target framework:
```xml
<RuntimeIdentifier>win7-x64</RuntimeIdentifier>
```
To publish for multiple RIDs:
* Provide the RIDs in a semicolon-delimited list.
* Use the property name [\<RuntimeIdentifiers>](/dotnet/core/tools/csproj#runtimeidentifiers) (plural).
For more information, see [.NET Core RID Catalog](/dotnet/core/rid-catalog).
A `<SelfContained>` property is set to `true`:
```xml
<SelfContained>true</SelfContained>
```
## Service user account
To create a user account for a service, use the [New-LocalUser](/powershell/module/microsoft.powershell.localaccounts/new-localuser) cmdlet from an administrative PowerShell 6 command shell.
On Windows 10 October 2018 Update (version 1809/build 10.0.17763) or later:
```powershell
New-LocalUser -Name {SERVICE NAME}
```
On Windows OS earlier than the Windows 10 October 2018 Update (version 1809/build 10.0.17763):
```console
powershell -Command "New-LocalUser -Name {SERVICE NAME}"
```
Provide a [strong password](/windows/security/threat-protection/security-policy-settings/password-must-meet-complexity-requirements) when prompted.
Unless the `-AccountExpires` parameter is supplied to the [New-LocalUser](/powershell/module/microsoft.powershell.localaccounts/new-localuser) cmdlet with an expiration <xref:System.DateTime>, the account doesn't expire.
For more information, see [Microsoft.PowerShell.LocalAccounts](/powershell/module/microsoft.powershell.localaccounts/) and [Service User Accounts](/windows/desktop/services/service-user-accounts).
An alternative approach to managing users when using Active Directory is to use Managed Service Accounts. For more information, see [Group Managed Service Accounts Overview](/windows-server/security/group-managed-service-accounts/group-managed-service-accounts-overview).
## Log on as a service rights
To establish *Log on as a service* rights for a service user account:
1. Open the Local Security Policy editor by running *secpol.msc*.
1. Expand the **Local Policies** node and select **User Rights Assignment**.
1. Open the **Log on as a service** policy.
1. Select **Add User or Group**.
1. Provide the object name (user account) using either of the following approaches:
1. Type the user account (`{DOMAIN OR COMPUTER NAME\USER}`) in the object name field and select **OK** to add the user to the policy.
1. Select **Advanced**. Select **Find Now**. Select the user account from the list. Select **OK**. Select **OK** again to add the user to the policy.
1. Select **OK** or **Apply** to accept the changes.
## Create and manage the Windows Service
### Create a service
Use PowerShell commands to register a service. From an administrative PowerShell 6 command shell, execute the following commands:
```powershell
$acl = Get-Acl "{EXE PATH}"
$aclRuleArgs = {DOMAIN OR COMPUTER NAME\USER}, "Read,Write,ReadAndExecute", "ContainerInherit,ObjectInherit", "None", "Allow"
$accessRule = New-Object System.Security.AccessControl.FileSystemAccessRule($aclRuleArgs)
$acl.SetAccessRule($accessRule)
$acl | Set-Acl "{EXE PATH}"
New-Service -Name {SERVICE NAME} -BinaryPathName {EXE FILE PATH} -Credential {DOMAIN OR COMPUTER NAME\USER} -Description "{DESCRIPTION}" -DisplayName "{DISPLAY NAME}" -StartupType Automatic
```
* `{EXE PATH}`: Path to the app's folder on the host (for example, `d:\myservice`). Don't include the app's executable in the path. A trailing slash isn't required.
* `{DOMAIN OR COMPUTER NAME\USER}`: Service user account (for example, `Contoso\ServiceUser`).
* `{SERVICE NAME}`: Service name (for example, `MyService`).
* `{EXE FILE PATH}`: The app's executable path (for example, `d:\myservice\myservice.exe`). Include the executable's file name with extension.
* `{DESCRIPTION}`: Service description (for example, `My sample service`).
* `{DISPLAY NAME}`: Service display name (for example, `My Service`).
### Start a service
Start a service with the following PowerShell 6 command:
```powershell
Start-Service -Name {SERVICE NAME}
```
The command takes a few seconds to start the service.
### Determine a service's status
To check the status of a service, use the following PowerShell 6 command:
```powershell
Get-Service -Name {SERVICE NAME}
```
The status is reported as one of the following values:
* `Starting`
* `Running`
* `Stopping`
* `Stopped`
### Stop a service
Stop a service with the following Powershell 6 command:
```powershell
Stop-Service -Name {SERVICE NAME}
```
### Remove a service
After a short delay to stop a service, remove a service with the following Powershell 6 command:
```powershell
Remove-Service -Name {SERVICE NAME}
```
## Handle starting and stopping events
To handle <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService.OnStarting*>, <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService.OnStarted*>, and <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService.OnStopping*> events:
1. Create a class that derives from <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostService> with the `OnStarting`, `OnStarted`, and `OnStopping` methods:
[!code-csharp[](windows-service/samples/2.x/AspNetCoreService/CustomWebHostService.cs?name=snippet_CustomWebHostService)]
2. Create an extension method for <xref:Microsoft.AspNetCore.Hosting.IWebHost> that passes the `CustomWebHostService` to <xref:System.ServiceProcess.ServiceBase.Run*>:
[!code-csharp[](windows-service/samples/2.x/AspNetCoreService/WebHostServiceExtensions.cs?name=ExtensionsClass)]
3. In `Program.Main`, call the `RunAsCustomService` extension method instead of <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*>:
```csharp
host.RunAsCustomService();
```
To see the location of <xref:Microsoft.AspNetCore.Hosting.WindowsServices.WebHostWindowsServiceExtensions.RunAsService*> in `Program.Main`, refer to the code sample shown in the [Deployment type](#deployment-type) section.
## Proxy server and load balancer scenarios
Services that interact with requests from the Internet or a corporate network and are behind a proxy or load balancer might require additional configuration. For more information, see <xref:host-and-deploy/proxy-load-balancer>.
## Configure endpoints
By default, ASP.NET Core binds to `http://localhost:5000`. Configure the URL and port by setting the `ASPNETCORE_URLS` environment variable.
For additional URL and port configuration approaches, see the relevant server article:
* <xref:fundamentals/servers/kestrel#endpoint-configuration>
* <xref:fundamentals/servers/httpsys#configure-windows-server>
The preceding guidance covers support for HTTPS endpoints. For example, configure the app for HTTPS when authentication is used with a Windows Service.
> [!NOTE]
> Use of the ASP.NET Core HTTPS development certificate to secure a service endpoint isn't supported.
## Current directory and content root
The current working directory returned by calling <xref:System.IO.Directory.GetCurrentDirectory*> for a Windows Service is the *C:\\WINDOWS\\system32* folder. The *system32* folder isn't a suitable location to store a service's files (for example, settings files). Use one of the following approaches to maintain and access a service's assets and settings files.
### Set the content root path to the app's folder
The <xref:Microsoft.Extensions.Hosting.IHostingEnvironment.ContentRootPath*> is the same path provided to the `binPath` argument when a service is created. Instead of calling `GetCurrentDirectory` to create paths to settings files, call <xref:System.IO.Directory.SetCurrentDirectory*> with the path to the app's [content root](xref:fundamentals/index#content-root).
In `Program.Main`, determine the path to the folder of the service's executable and use the path to establish the app's content root:
```csharp
var pathToExe = Process.GetCurrentProcess().MainModule.FileName;
var pathToContentRoot = Path.GetDirectoryName(pathToExe);
Directory.SetCurrentDirectory(pathToContentRoot);
CreateWebHostBuilder(args)
.Build()
.RunAsService();
```
### Store a service's files in a suitable location on disk
Specify an absolute path with <xref:Microsoft.Extensions.Configuration.FileConfigurationExtensions.SetBasePath*> when using an <xref:Microsoft.Extensions.Configuration.IConfigurationBuilder> to the folder containing the files.
## Troubleshoot
To troubleshoot a Windows Service app, see <xref:test/troubleshoot>.
### Common errors
* An old or pre-release version of PowerShell is in use.
* The registered service doesn't use the app's **published** output from the [dotnet publish](/dotnet/core/tools/dotnet-publish) command. Output of the [dotnet build](/dotnet/core/tools/dotnet-build) command isn't supported for app deployment. Published assets are found in either of the following folders depending on the deployment type:
* *bin/Release/{TARGET FRAMEWORK}/publish* (FDD)
* *bin/Release/{TARGET FRAMEWORK}/{RUNTIME IDENTIFIER}/publish* (SCD)
* The service isn't in the RUNNING state.
* The paths to resources that the app uses (for example, certificates) are incorrect. The base path of a Windows Service is *c:\\Windows\\System32*.
* The user doesn't have *Log on as a service* rights.
* The user's password is expired or incorrectly passed when executing the `New-Service` PowerShell command.
* The app requires ASP.NET Core authentication but isn't configured for secure connections (HTTPS).
* The request URL port is incorrect or not configured correctly in the app.
### System and Application Event Logs
Access the System and Application Event Logs:
1. Open the Start menu, search for *Event Viewer*, and select the **Event Viewer** app.
1. In **Event Viewer**, open the **Windows Logs** node.
1. Select **System** to open the System Event Log. Select **Application** to open the Application Event Log.
1. Search for errors associated with the failing app.
### Run the app at a command prompt
Many startup errors don't produce useful information in the event logs. You can find the cause of some errors by running the app at a command prompt on the hosting system. To log additional detail from the app, lower the [log level](xref:fundamentals/logging/index#log-level) or run the app in the [Development environment](xref:fundamentals/environments).
### Clear package caches
A functioning app may fail immediately after upgrading either the .NET Core SDK on the development machine or changing package versions within the app. In some cases, incoherent packages may break an app when performing major upgrades. Most of these issues can be fixed by following these instructions:
1. Delete the *bin* and *obj* folders.
1. Clear the package caches by executing [dotnet nuget locals all --clear](/dotnet/core/tools/dotnet-nuget-locals) from a command shell.
Clearing package caches can also be accomplished with the [nuget.exe](https://www.nuget.org/downloads) tool and executing the command `nuget locals all -clear`. *nuget.exe* isn't a bundled install with the Windows desktop operating system and must be obtained separately from the [NuGet website](https://www.nuget.org/downloads).
1. Restore and rebuild the project.
1. Delete all of the files in the deployment folder on the server prior to redeploying the app.
### Slow or hanging app
A *crash dump* is a snapshot of the system's memory and can help determine the cause of an app crash, startup failure, or slow app.
#### App crashes or encounters an exception
Obtain and analyze a dump from [Windows Error Reporting (WER)](/windows/desktop/wer/windows-error-reporting):
1. Create a folder to hold crash dump files at `c:\dumps`.
1. Run the [EnableDumps PowerShell script](https://github.com/dotnet/AspNetCore.Docs/blob/master/aspnetcore/host-and-deploy/windows-service/scripts/EnableDumps.ps1) with the application executable name:
```console
.\EnableDumps {APPLICATION EXE} c:\dumps
```
1. Run the app under the conditions that cause the crash to occur.
1. After the crash has occurred, run the [DisableDumps PowerShell script](https://github.com/dotnet/AspNetCore.Docs/blob/master/aspnetcore/host-and-deploy/windows-service/scripts/DisableDumps.ps1):
```console
.\DisableDumps {APPLICATION EXE}
```
After an app crashes and dump collection is complete, the app is allowed to terminate normally. The PowerShell script configures WER to collect up to five dumps per app.
> [!WARNING]
> Crash dumps might take up a large amount of disk space (up to several gigabytes each).
#### App hangs, fails during startup, or runs normally
When an app *hangs* (stops responding but doesn't crash), fails during startup, or runs normally, see [User-Mode Dump Files: Choosing the Best Tool](/windows-hardware/drivers/debugger/user-mode-dump-files#choosing-the-best-tool) to select an appropriate tool to produce the dump.
#### Analyze the dump
A dump can be analyzed using several approaches. For more information, see [Analyzing a User-Mode Dump File](/windows-hardware/drivers/debugger/analyzing-a-user-mode-dump-file).
## Additional resources
* [Kestrel endpoint configuration](xref:fundamentals/servers/kestrel#endpoint-configuration) (includes HTTPS configuration and SNI support)
* <xref:fundamentals/host/web-host>
* <xref:test/troubleshoot>
::: moniker-end
| {
"pile_set_name": "Github"
} |
<Page x:Class="BluetoothLEExplorer.Views.Shell"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:Controls="using:Template10.Controls"
xmlns:Core="using:Microsoft.Xaml.Interactions.Core"
xmlns:Interactivity="using:Microsoft.Xaml.Interactivity"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:local="using:BluetoothLEExplorer.Views"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:views="using:BluetoothLEExplorer.Views"
xmlns:vm="using:BluetoothLEExplorer.ViewModels"
xmlns:myconverters="using:BluetoothLEExplorer.Services.Converters"
Background="{ThemeResource ApplicationPageBackgroundThemeBrush}"
mc:Ignorable="d">
<Page.DataContext>
<vm:ShellViewModel x:Name="ViewModel" />
</Page.DataContext>
<Page.Resources>
<myconverters:ShellFontConverter x:Key="StrikethroughConverter" />
</Page.Resources>
<Controls:HamburgerMenu x:Name="MyHamburgerMenu"
VisualStateNarrowMinWidth="{StaticResource NarrowMinWidth}"
VisualStateNormalMinWidth="{StaticResource NormalMinWidth}"
>
<Controls:HamburgerMenu.PrimaryButtons>
<!-- mainpage button -->
<Controls:HamburgerButtonInfo ClearHistory="True" PageType="views:Discover" >
<StackPanel Orientation="Horizontal">
<SymbolIcon Width="48" Height="48"
Symbol="Home" />
<TextBlock Margin="12,0,0,0" VerticalAlignment="Center"
Text="Discover and Pair" TextDecorations="{x:Bind ViewModel.IsCentralRoleSupported, Converter={StaticResource StrikethroughConverter}}" />
</StackPanel>
</Controls:HamburgerButtonInfo>
<Controls:HamburgerButtonInfo ClearHistory="True" PageType="views:VirtualPeripheralPage" >
<StackPanel Orientation="Horizontal">
<SymbolIcon Width="48" Height="48"
Symbol="Home" />
<TextBlock Margin="12,0,0,0" VerticalAlignment="Center"
Text="Virtual Peripheral" TextDecorations="{x:Bind ViewModel.IsPeripheralRoleSupported, Converter={StaticResource StrikethroughConverter}}" />
</StackPanel>
</Controls:HamburgerButtonInfo>
<Controls:HamburgerButtonInfo ClearHistory="True" PageType="views:VirtualKeyboardPage" >
<StackPanel Orientation="Horizontal">
<SymbolIcon Width="48" Height="48"
Symbol="Home" />
<TextBlock Margin="12,0,0,0" VerticalAlignment="Center"
Text="Virtual Keyboard" TextDecorations="{x:Bind ViewModel.IsPeripheralRoleSupported, Converter={StaticResource StrikethroughConverter}}" />
</StackPanel>
</Controls:HamburgerButtonInfo>
<Controls:HamburgerButtonInfo ClearHistory="True" PageType="views:Beacon" Visibility="Collapsed">
<StackPanel Orientation="Horizontal">
<SymbolIcon Width="48" Height="48"
Symbol="Home" />
<TextBlock Margin="12,0,0,0" VerticalAlignment="Center"
Text="Beacon"/>
</StackPanel>
</Controls:HamburgerButtonInfo>
</Controls:HamburgerMenu.PrimaryButtons>
<Controls:HamburgerMenu.SecondaryButtons>
<!-- settingspage button -->
<Controls:HamburgerButtonInfo x:Name="SettingsButton" PageParameter="0"
PageType="views:SettingsPage">
<Controls:HamburgerButtonInfo.NavigationTransitionInfo>
<SuppressNavigationTransitionInfo />
</Controls:HamburgerButtonInfo.NavigationTransitionInfo>
<StackPanel Orientation="Horizontal">
<SymbolIcon Width="48" Height="48"
Symbol="Setting" />
<TextBlock Margin="12,0,0,0" VerticalAlignment="Center"
Text="Settings" />
</StackPanel>
</Controls:HamburgerButtonInfo>
</Controls:HamburgerMenu.SecondaryButtons>
</Controls:HamburgerMenu>
</Page>
| {
"pile_set_name": "Github"
} |
import { TestBed } from '@angular/core/testing';
import { of } from 'rxjs';
import { createDummies, fastTestSetup } from '../../../../test/helpers';
import { Contribution } from '../../../core/contribution';
import { VcsCommitItemDummy } from '../../../core/dummies';
import { GitGetHistoryResult } from '../../../core/git';
import { datetime, DateUnits } from '../../../libs/datetime';
import { GitService, SharedModule } from '../../shared';
import {
VCS_COMMIT_CONTRIBUTION_DATABASE,
VcsCommitContributionDatabase,
VcsCommitContributionDatabaseProvider,
} from './vcs-commit-contribution-database';
import { VcsCommitContributionMeasurement } from './vcs-commit-contribution-measurement';
describe('browser.vcs.vcsLocal.VcsCommitContributionMeasurement', () => {
let measurement: VcsCommitContributionMeasurement;
let git: GitService;
let contributionDB: VcsCommitContributionDatabase;
const commitItemDummy = new VcsCommitItemDummy();
const keyGenerator = (date: Date) => `${date.getFullYear()}.${date.getMonth()}.${date.getDate()}`;
function createKeysForMonth(monthDate: Date): Date[] {
const keys: Date[] = [];
const year = monthDate.getFullYear();
const month = monthDate.getMonth();
const indexDate = datetime.getFirstDateOfMonth(year, month);
for (let i = 0; i < datetime.getDaysInMonth(year, month); i++) {
keys.push(datetime.copy(indexDate));
datetime.add(indexDate, DateUnits.DAY, 1);
}
return keys;
}
fastTestSetup();
beforeAll(() => {
TestBed.configureTestingModule({
imports: [
SharedModule,
],
providers: [
VcsCommitContributionDatabaseProvider,
VcsCommitContributionMeasurement,
],
});
});
beforeEach(() => {
measurement = TestBed.get(VcsCommitContributionMeasurement);
git = TestBed.get(GitService);
contributionDB = TestBed.get(VCS_COMMIT_CONTRIBUTION_DATABASE);
});
afterEach(async () => {
await contributionDB.contributions.clear();
});
describe('measure', () => {
it('should throw error if first key is not a valid date.', async () => {
let error = null;
try {
await measurement.measure([undefined], keyGenerator);
} catch (err) {
error = err;
}
expect(error).not.toBeNull();
});
it('should contribution exists in database at input month which is not current month, '
+ 'return cache from database.', async () => {
const contribution: Contribution = { items: {} };
spyOn(contributionDB, 'isContributionExistsForMonth').and.returnValue(Promise.resolve(true));
spyOn(contributionDB, 'getContributionForMonth').and.returnValue(contribution);
const prevMonth = datetime.today();
datetime.add(prevMonth, DateUnits.MONTH, -1);
const result = await measurement.measure([prevMonth], keyGenerator);
expect(result).toEqual(contribution);
});
it('should get commits since first date of month until end date of month, and makes '
+ 'contribution.', async () => {
const today = datetime.today();
const commits = createDummies(commitItemDummy, 10);
const keys = createKeysForMonth(today);
spyOn(contributionDB, 'isContributionExistsForMonth').and.returnValue(Promise.resolve(false));
spyOn(git, 'getCommitHistory').and.returnValue(of({
history: commits,
next: null,
previous: null,
} as GitGetHistoryResult));
const result = await measurement.measure(keys, keyGenerator);
expect(result.items[keyGenerator(today)]).toEqual(10);
});
});
});
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
"""
Copyright (c) 2006-2020 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
from lib.core.exception import SqlmapUnsupportedFeatureException
from plugins.generic.filesystem import Filesystem as GenericFilesystem
class Filesystem(GenericFilesystem):
def readFile(self, remoteFile):
errMsg = "on SAP MaxDB reading of files is not supported"
raise SqlmapUnsupportedFeatureException(errMsg)
def writeFile(self, localFile, remoteFile, fileType=None, forceCheck=False):
errMsg = "on SAP MaxDB writing of files is not supported"
raise SqlmapUnsupportedFeatureException(errMsg)
| {
"pile_set_name": "Github"
} |
// cmcstl2 - A concept-enabled C++ standard library
//
// Copyright Eric Niebler 2014
// Copyright Casey Carter 2015
//
// Use, modification and distribution is subject to the
// Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// Project home: https://github.com/caseycarter/cmcstl2
//
//===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
#ifndef STL2_DETAIL_ALGORITHM_NTH_ELEMENT_HPP
#define STL2_DETAIL_ALGORITHM_NTH_ELEMENT_HPP
#include <stl2/detail/algorithm/min_element.hpp>
#include <stl2/detail/concepts/callable.hpp>
#include <stl2/detail/range/primitives.hpp>
///////////////////////////////////////////////////////////////////////////
// nth_element [alg.nth.element]
//
STL2_OPEN_NAMESPACE {
struct __nth_element_fn : private __niebloid {
// TODO: refactor this monstrosity.
template<random_access_iterator I, sentinel_for<I> S, class Comp = less,
class Proj = identity>
requires sortable<I, Comp, Proj>
constexpr I operator()(I first, I nth, S last, Comp comp = {},
Proj proj = {}) const
{
constexpr iter_difference_t<I> limit = 7;
static_assert(limit >= 3);
auto pred = [&](auto&& lhs, auto&& rhs) -> bool {
return __stl2::invoke(comp,
__stl2::invoke(proj, static_cast<decltype(lhs)>(lhs)),
__stl2::invoke(proj, static_cast<decltype(rhs)>(rhs)));
};
I end_orig = next(nth, last);
I end = end_orig;
while (true) {
if (nth == end) return end_orig;
iter_difference_t<I> len = end - first;
STL2_EXPECT(len >= 0);
switch (len) {
case 0:
case 1:
return end_orig;
case 2:
if (pred(*--end, *first)) iter_swap(first, end);
return end_orig;
case 3:
{
I m = first;
sort3(first, ++m, --end, comp, proj);
return end_orig;
}
default: break;
}
if (len <= limit) {
selection_sort(first, end, comp, proj);
return end_orig;
}
// Post: len > limit
I m = first + len / 2;
I lm1 = end;
unsigned n_swaps = sort3(first, m, --lm1, comp, proj);
// Post: *m is median
// partition [first, m) < *m and *m <= [m, end)
//(this inhibits tossing elements equivalent to m around unnecessarily)
I i = first;
I j = lm1;
// j points beyond range to be tested, *lm1 is known to be <= *m
// The search going up is known to be guarded but the search coming down isn't.
// Prime the downward search with a guard.
if (!pred(*i, *m)) { // if *first == *m
// *first == *m, *first doesn't go in first part
// manually guard downward moving j against i
bool restart = false;
while (true) {
if (i == --j) {
// *first == *m, *m <= all other elements
// Parition instead into [first, i) == *first and *first < [i, end)
++i; // first + 1
j = end;
if (!pred(*first, *--j)) { // we need a guard if *first == *(end-1)
while (true) {
if (i == j) {
return end_orig; // [first, end) all equivalent elements
}
if (pred(*first, *i)) {
iter_swap(i, j);
++n_swaps;
++i;
break;
}
++i;
}
}
// [first, i) == *first and *first < [j, end) and j == end - 1
if (i == j) return end_orig;
while (true) {
while (!pred(*first, *i)) { ++i; }
while (pred(*first, *--j)) {}
if (i >= j) break;
iter_swap(i, j);
++n_swaps;
++i;
}
// [first, i) == *first and *first < [i, end)
// The first part is sorted,
if (nth < i) return end_orig;
// nth_element the second part
// nth_element<C>(i, nth, end, comp);
restart = true;
break;
}
if (pred(*j, *m)) {
iter_swap(i, j);
++n_swaps;
break; // found guard for downward moving j, now use unguarded partition
}
}
if (restart) {
first = i;
continue;
}
}
++i;
// j points beyond range to be tested, *lm1 is known to be <= *m
// if not yet partitioned...
if (i < j) {
// known that *(i - 1) < *m
while (true) {
// m still guards upward moving i
while (pred(*i, *m)) { ++i; }
// It is now known that a guard exists for downward moving j
while (!pred(*--j, *m)) {}
if (i >= j) break;
iter_swap(i, j);
++n_swaps;
// It is known that m != j
// If m just moved, follow it
if (m == i) m = j;
++i;
}
}
// [first, i) < *m and *m <= [i, end)
if (i != m && pred(*m, *i)) {
iter_swap(i, m);
++n_swaps;
}
// [first, i) < *i and *i <= [i+1, end)
if (nth == i) return end_orig;
if (n_swaps == 0) {
// We were given a perfectly partitioned sequence. Coincidence?
if (nth < i) {
// Check for [first, i) already sorted
j = m = first;
while (true) {
if (++j == i) {
// [first, i) sorted
return end_orig;
}
if (pred(*j, *m)) {
// not yet sorted, so sort
break;
}
m = j;
}
} else {
// Check for [i, end) already sorted
j = m = i;
while (true) {
if (++j == end) {
// [i, end) sorted
return end_orig;
}
if (pred(*j, *m)) {
// not yet sorted, so sort
break;
}
m = j;
}
}
}
// nth_element on range containing nth
if (nth < i) {
// nth_element<C>(first, nth, i, comp);
end = i;
} else {
// nth_element<C>(i+1, nth, end, comp);
first = ++i;
}
}
return end_orig;
}
template<random_access_range Rng, class Comp = less, class Proj = identity>
requires sortable<iterator_t<Rng>, Comp, Proj>
constexpr safe_iterator_t<Rng> operator()(Rng&& rng, iterator_t<Rng> nth,
Comp comp = {}, Proj proj = {}) const
{
return (*this)(begin(rng), std::move(nth), end(rng),
__stl2::ref(comp), __stl2::ref(proj));
}
private:
// stable, 2-3 compares, 0-2 swaps
template<class I, class C, class P>
requires sortable<I, C, P>
static constexpr unsigned sort3(I x, I y, I z, C& comp, P& proj) {
auto pred = [&](auto&& lhs, auto&& rhs) -> bool {
return __stl2::invoke(comp,
__stl2::invoke(proj, static_cast<decltype(lhs)>(lhs)),
__stl2::invoke(proj, static_cast<decltype(rhs)>(rhs)));
};
if (!pred(*y, *x)) { // if x <= y
if (!pred(*z, *y)) { // if y <= z
return 0; // x <= y && y <= z
}
// x <= y && y > z
iter_swap(y, z); // x <= z && y < z
if (pred(*y, *x)) { // if x > y
iter_swap(x, y); // x < y && y <= z
return 2;
}
return 1; // x <= y && y < z
}
if (pred(*z, *y)) { // x > y, if y > z
iter_swap(x, z); // x < y && y < z
return 1;
}
iter_swap(x, y); // x > y && y <= z
// x < y && x <= z
if (pred(*z, *y)) { // if y > z
iter_swap(y, z); // x <= y && y < z
return 2;
}
return 1;
}
template<bidirectional_iterator I, class C, class P>
requires sortable<I, C, P>
static constexpr void selection_sort(I begin, I end, C &comp, P &proj) {
STL2_EXPECT(begin != end);
for (I lm1 = prev(end); begin != lm1; ++begin) {
I i = min_element(begin, end, __stl2::ref(comp),
__stl2::ref(proj));
if (i != begin) {
iter_swap(begin, i);
}
}
}
};
inline constexpr __nth_element_fn nth_element{};
} STL2_CLOSE_NAMESPACE
#endif
| {
"pile_set_name": "Github"
} |
#
# OpenSSL/crypto/txt_db/Makefile
#
DIR= txt_db
TOP= ../..
CC= cc
INCLUDES=
CFLAG=-g
MAKEFILE= Makefile
AR= ar r
CFLAGS= $(INCLUDES) $(CFLAG)
GENERAL=Makefile
TEST=
APPS=
LIB=$(TOP)/libcrypto.a
LIBSRC=txt_db.c
LIBOBJ=txt_db.o
SRC= $(LIBSRC)
EXHEADER= txt_db.h
HEADER= $(EXHEADER)
ALL= $(GENERAL) $(SRC) $(HEADER)
top:
(cd ../..; $(MAKE) DIRS=crypto SDIRS=$(DIR) sub_all)
all: lib
lib: $(LIBOBJ)
$(AR) $(LIB) $(LIBOBJ)
$(RANLIB) $(LIB) || echo Never mind.
@touch lib
files:
$(PERL) $(TOP)/util/files.pl Makefile >> $(TOP)/MINFO
links:
@$(PERL) $(TOP)/util/mklink.pl ../../include/openssl $(EXHEADER)
@$(PERL) $(TOP)/util/mklink.pl ../../test $(TEST)
@$(PERL) $(TOP)/util/mklink.pl ../../apps $(APPS)
install:
@[ -n "$(INSTALLTOP)" ] # should be set by top Makefile...
@headerlist="$(EXHEADER)"; for i in $$headerlist ; \
do \
(cp $$i $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i; \
chmod 644 $(INSTALL_PREFIX)$(INSTALLTOP)/include/openssl/$$i ); \
done;
tags:
ctags $(SRC)
tests:
lint:
lint -DLINT $(INCLUDES) $(SRC)>fluff
update: depend
depend:
@[ -n "$(MAKEDEPEND)" ] # should be set by top Makefile...
$(MAKEDEPEND) -- $(CFLAG) $(INCLUDES) $(DEPFLAG) -- $(PROGS) $(LIBSRC)
dclean:
$(PERL) -pe 'if (/^# DO NOT DELETE THIS LINE/) {print; exit(0);}' $(MAKEFILE) >Makefile.new
mv -f Makefile.new $(MAKEFILE)
clean:
rm -f *.o *.obj lib tags core .pure .nfs* *.old *.bak fluff
# DO NOT DELETE THIS LINE -- make depend depends on it.
txt_db.o: ../../e_os.h ../../include/openssl/bio.h
txt_db.o: ../../include/openssl/buffer.h ../../include/openssl/crypto.h
txt_db.o: ../../include/openssl/e_os2.h ../../include/openssl/err.h
txt_db.o: ../../include/openssl/lhash.h ../../include/openssl/opensslconf.h
txt_db.o: ../../include/openssl/opensslv.h ../../include/openssl/ossl_typ.h
txt_db.o: ../../include/openssl/safestack.h ../../include/openssl/stack.h
txt_db.o: ../../include/openssl/symhacks.h ../../include/openssl/txt_db.h
txt_db.o: ../cryptlib.h txt_db.c
| {
"pile_set_name": "Github"
} |
{
"id": "arming_sword",
"fg": [
"arming_sword"
],
"bg": [
]
}
| {
"pile_set_name": "Github"
} |
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/apis/bigquery_v2"
require "stringio"
require "base64"
require "bigdecimal"
require "time"
require "date"
module Google
module Cloud
module Bigquery
# rubocop:disable Metrics/ModuleLength
##
# @private
#
# Internal conversion of raw data values to/from Bigquery values
#
# | BigQuery | Ruby | Notes |
# |-------------|----------------|---|
# | `BOOL` | `true`/`false` | |
# | `INT64` | `Integer` | |
# | `FLOAT64` | `Float` | |
# | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
# | `STRING` | `String` | |
# | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
# | `DATE` | `Date` | |
# | `TIMESTAMP` | `Time` | |
# | `TIME` | `Google::Cloud::BigQuery::Time` | |
# | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
# | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
# | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
module Convert
##
# @private
def self.format_rows rows, fields
Array(rows).map do |row|
# convert TableRow to hash to handle nested TableCell values
format_row row.to_h, fields
end
end
##
# @private
def self.format_row row, fields
row_pairs = fields.zip(row[:f]).map do |f, v|
[f.name.to_sym, format_value(v, f)]
end
Hash[row_pairs]
end
# rubocop:disable all
def self.format_value value, field
if value.nil?
nil
elsif value.empty?
nil
elsif value[:v].nil?
nil
elsif Array === value[:v]
value[:v].map { |v| format_value v, field }
elsif Hash === value[:v]
format_row value[:v], field.fields
elsif field.type == "STRING"
String value[:v]
elsif field.type == "INTEGER"
Integer value[:v]
elsif field.type == "FLOAT"
if value[:v] == "Infinity"
Float::INFINITY
elsif value[:v] == "-Infinity"
-Float::INFINITY
elsif value[:v] == "NaN"
Float::NAN
else
Float value[:v]
end
elsif field.type == "NUMERIC"
BigDecimal value[:v]
elsif field.type == "BOOLEAN"
(value[:v] == "true" ? true : (value[:v] == "false" ? false : nil))
elsif field.type == "BYTES"
StringIO.new Base64.decode64 value[:v]
elsif field.type == "TIMESTAMP"
::Time.at Float(value[:v])
elsif field.type == "TIME"
Bigquery::Time.new value[:v]
elsif field.type == "DATETIME"
::Time.parse("#{value[:v]} UTC").to_datetime
elsif field.type == "DATE"
Date.parse value[:v]
else
value[:v]
end
end
##
# @private
def self.to_query_param param, type = nil
type ||= default_query_param_type_for param
Google::Apis::BigqueryV2::QueryParameter.new(
parameter_type: to_query_param_type(type),
parameter_value: to_query_param_value(param)
)
end
##
# @private
def self.to_query_param_value value
return Google::Apis::BigqueryV2::QueryParameterValue.new value: nil if value.nil?
json_value = to_json_value value
if Array === json_value
array_values = json_value.map { |v| to_query_param_value v }
Google::Apis::BigqueryV2::QueryParameterValue.new array_values: array_values
elsif Hash === json_value
struct_pairs = json_value.map do |key, value|
[String(key), to_query_param_value(value)]
end
struct_values = Hash[struct_pairs]
Google::Apis::BigqueryV2::QueryParameterValue.new struct_values: struct_values
else
# Everything else is converted to a string, per the API expectations.
Google::Apis::BigqueryV2::QueryParameterValue.new value: json_value.to_s
end
end
def self.to_query_param_type type
if Array === type
Google::Apis::BigqueryV2::QueryParameterType.new(
type: "ARRAY".freeze,
array_type: to_query_param_type(type.first)
)
elsif Hash === type
Google::Apis::BigqueryV2::QueryParameterType.new(
type: "STRUCT".freeze,
struct_types: type.map do |key, val|
Google::Apis::BigqueryV2::QueryParameterType::StructType.new(
name: String(key),
type: to_query_param_type(val)
)
end
)
else
Google::Apis::BigqueryV2::QueryParameterType.new(type: type.to_s.freeze)
end
end
def self.default_query_param_type_for param
raise ArgumentError, "nil params are not supported, must assign optional type" if param.nil?
case param
when String
:STRING
when Symbol
:STRING
when TrueClass
:BOOL
when FalseClass
:BOOL
when Integer
:INT64
when BigDecimal
:NUMERIC
when Numeric
:FLOAT64
when ::Time
:TIMESTAMP
when Bigquery::Time
:TIME
when DateTime
:DATETIME
when Date
:DATE
when Array
if param.empty?
raise ArgumentError, "Cannot determine type for empty array values"
end
non_nil_values = param.compact.map { |p| default_query_param_type_for p }.compact
if non_nil_values.empty?
raise ArgumentError, "Cannot determine type for array of nil values"
end
if non_nil_values.uniq.count > 1
raise ArgumentError, "Cannot determine type for array of different types of values"
end
[non_nil_values.first]
when Hash
Hash[param.map do |key, value|
[key, default_query_param_type_for(value)]
end]
else
if param.respond_to?(:read) && param.respond_to?(:rewind)
:BYTES
else
raise "A query parameter of type #{param.class} is not supported"
end
end
end
##
# @private
def self.to_json_value value
if DateTime === value
value.strftime "%Y-%m-%d %H:%M:%S.%6N"
elsif Date === value
value.to_s
elsif ::Time === value
value.strftime "%Y-%m-%d %H:%M:%S.%6N%:z"
elsif Bigquery::Time === value
value.value
elsif BigDecimal === value
# Round to precision of 9
value.finite? ? value.round(9).to_s("F") : value.to_s
elsif value.respond_to?(:read) && value.respond_to?(:rewind)
value.rewind
Base64.strict_encode64(value.read.force_encoding("ASCII-8BIT"))
elsif Array === value
value.map { |v| to_json_value v }
elsif Hash === value
Hash[value.map { |k, v| [k.to_s, to_json_value(v)] }]
else
value
end
end
# rubocop:enable all
##
# @private
def self.to_json_rows rows
rows.map { |row| to_json_row row }
end
##
# @private
def self.to_json_row row
Hash[row.map { |k, v| [k.to_s, to_json_value(v)] }]
end
def self.resolve_legacy_sql standard_sql, legacy_sql
return !standard_sql unless standard_sql.nil?
return legacy_sql unless legacy_sql.nil?
false
end
##
# @private
#
# Converts create disposition strings to API values.
#
# @return [String] API representation of create disposition.
def self.create_disposition str
val = {
"create_if_needed" => "CREATE_IF_NEEDED",
"createifneeded" => "CREATE_IF_NEEDED",
"if_needed" => "CREATE_IF_NEEDED",
"needed" => "CREATE_IF_NEEDED",
"create_never" => "CREATE_NEVER",
"createnever" => "CREATE_NEVER",
"never" => "CREATE_NEVER"
}[str.to_s.downcase]
return val unless val.nil?
str
end
##
# @private
#
# Converts write disposition strings to API values.
#
# @return [String] API representation of write disposition.
def self.write_disposition str
val = {
"write_truncate" => "WRITE_TRUNCATE",
"writetruncate" => "WRITE_TRUNCATE",
"truncate" => "WRITE_TRUNCATE",
"write_append" => "WRITE_APPEND",
"writeappend" => "WRITE_APPEND",
"append" => "WRITE_APPEND",
"write_empty" => "WRITE_EMPTY",
"writeempty" => "WRITE_EMPTY",
"empty" => "WRITE_EMPTY"
}[str.to_s.downcase]
return val unless val.nil?
str
end
##
# @private
#
# Converts source format strings to API values.
#
# @return [String] API representation of source format.
def self.source_format format
val = {
"csv" => "CSV",
"json" => "NEWLINE_DELIMITED_JSON",
"newline_delimited_json" => "NEWLINE_DELIMITED_JSON",
"avro" => "AVRO",
"orc" => "ORC",
"parquet" => "PARQUET",
"datastore" => "DATASTORE_BACKUP",
"backup" => "DATASTORE_BACKUP",
"datastore_backup" => "DATASTORE_BACKUP",
"ml_tf_saved_model" => "ML_TF_SAVED_MODEL",
"ml_xgboost_booster" => "ML_XGBOOST_BOOSTER"
}[format.to_s.downcase]
return val unless val.nil?
format
end
##
# @private
#
# Converts file paths into source format by extension.
#
# @return [String] API representation of source format.
def self.derive_source_format_from_list paths
paths.map do |path|
derive_source_format path
end.compact.uniq.first
end
##
# @private
#
# Converts file path into source format by extension.
#
# @return [String] API representation of source format.
def self.derive_source_format path
return "CSV" if path.end_with? ".csv"
return "NEWLINE_DELIMITED_JSON" if path.end_with? ".json"
return "AVRO" if path.end_with? ".avro"
return "ORC" if path.end_with? ".orc"
return "PARQUET" if path.end_with? ".parquet"
return "DATASTORE_BACKUP" if path.end_with? ".backup_info"
nil
end
##
# @private
#
# Converts a primitive time value in milliseconds to a Ruby Time object.
#
# @return [Time, nil] The Ruby Time object, or nil if the given argument
# is nil.
def self.millis_to_time time_millis
return nil unless time_millis
::Time.at Rational(time_millis, 1000)
end
##
# @private
#
# Converts a Ruby Time object to a primitive time value in milliseconds.
#
# @return [Integer, nil] The primitive time value in milliseconds, or
# nil if the given argument is nil.
def self.time_to_millis time_obj
return nil unless time_obj
(time_obj.to_i * 1000) + (time_obj.nsec / 1_000_000)
end
end
# rubocop:enable Metrics/ModuleLength
end
end
end
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.