text
stringlengths
2
100k
meta
dict
package org.roaringbitmap.buffer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.roaringbitmap.IntConsumer; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.nio.CharBuffer; import static org.junit.jupiter.api.Assertions.*; @Execution(ExecutionMode.CONCURRENT) public class TestMappeableArrayContainer { @Test public void addEmptyRange() { MappeableContainer ac = new MappeableArrayContainer(); ac = ac.iadd(1,1); assertEquals(0, ac.getCardinality()); } @Test public void addInvalidRange() { assertThrows(IllegalArgumentException.class, () -> { MappeableContainer ac = new MappeableArrayContainer(); ac.add(13, 1); }); } @Test public void iaddInvalidRange() { assertThrows(IllegalArgumentException.class, () -> { MappeableContainer ac = new MappeableArrayContainer(); ac.iadd(13, 1); }); } @Test public void iaddSanityTest() { MappeableContainer ac = new MappeableArrayContainer(); ac = ac.iadd(10, 20); // insert disjoint at end ac = ac.iadd(30, 70); // insert disjoint between ac = ac.iadd(25, 26); // insert disjoint at start ac = ac.iadd(1, 2); // insert overlap at end ac = ac.iadd(60, 80); // insert overlap between ac = ac.iadd(10, 30); // insert overlap at start ac = ac.iadd(1, 20); assertEquals(79, ac.getCardinality()); } @Test public void remove() { MappeableContainer ac = new MappeableArrayContainer(); ac = ac.iadd(1,3); ac = ac.remove((char) 2); assertEquals(1, ac.getCardinality()); assertTrue(ac.contains((char) 1)); } @Test public void removeInvalidRange() { assertThrows(IllegalArgumentException.class, () -> { MappeableContainer ac = new MappeableArrayContainer(); ac.remove(13, 1); }); } @Test public void iremoveEmptyRange() { MappeableContainer ac = new MappeableArrayContainer(); ac.remove(1,1); assertEquals(0, ac.getCardinality()); } @Test public void iremoveInvalidRange() { assertThrows(IllegalArgumentException.class, () -> { MappeableContainer ac = new MappeableArrayContainer(); ac.iremove(13, 1); }); } @Test public void constructorWithRun() { MappeableContainer ac = new MappeableArrayContainer(1, 13); assertEquals(12, ac.getCardinality()); for (int i = 1; i <= 12; i++) { assertTrue(ac.contains((char) i)); } } @Test public void add() { MappeableContainer ac = newArrayContainer(1, 2, 3, 5); ac = ac.add((char) 4); assertEquals(5, ac.getCardinality()); for (int i = 1; i <= 5; i++) { assertTrue(ac.contains((char) i)); } } @Test public void add2() { MappeableContainer ac = newArrayContainer(1, 5000); ac = ac.add((char) 7000); assertEquals(5000, ac.getCardinality()); for (int i = 1; i < 5000; i++) { assertTrue(ac.contains((char) i)); } assertTrue(ac.contains((char) 7000)); } @Test public void flip() { MappeableContainer ac = newArrayContainer(1, 2, 3, 5); ac = ac.flip((char) 4); assertEquals(5, ac.getCardinality()); for (int i = 1; i <= 5; i++) { assertTrue(ac.contains((char) i)); } } @Test public void flip2() { MappeableContainer ac = newArrayContainer(1, 2, 3, 4, 5); ac = ac.flip((char) 5); assertEquals(4, ac.getCardinality()); for (int i = 1; i <= 4; i++) { assertTrue(ac.contains((char) i)); } } @Test public void flip3() { MappeableContainer ac = newArrayContainer(1, 5000); ac = ac.flip((char) 7000); assertEquals(5000, ac.getCardinality()); for (int i = 1; i < 5000; i++) { assertTrue(ac.contains((char) i)); } assertTrue(ac.contains((char) 7000)); } static MappeableArrayContainer newArrayContainer(int... values) { CharBuffer buffer = CharBuffer.allocate(values.length); for (int value : values) { buffer.put((char) value); } return new MappeableArrayContainer(buffer.asReadOnlyBuffer(), values.length); } static MappeableArrayContainer newArrayContainer(int firstOfRun, final int lastOfRun) { CharBuffer buffer = CharBuffer.allocate(lastOfRun - firstOfRun); for (int i = firstOfRun; i < lastOfRun; i++) { buffer.put((char) i); } return new MappeableArrayContainer(buffer.asReadOnlyBuffer(), lastOfRun - firstOfRun); } @Test public void iand() { MappeableContainer ac = new MappeableArrayContainer(); ac = ac.add(10,20); MappeableContainer bc = new MappeableBitmapContainer(); bc = bc.add(15,25); ac.iand(bc); assertEquals(5, ac.getCardinality()); for (int i = 15; i < 20; i++) { assertTrue(ac.contains((char) i)); } } @Test public void iandNotArray() { MappeableContainer ac = new MappeableArrayContainer(); ac = ac.add(10,20); MappeableContainer ac2 = newArrayContainer(15,25); ac.iandNot(ac2); assertEquals(5, ac.getCardinality()); for (int i = 10; i < 15; i++) { assertTrue(ac.contains((char) i)); } } @Test public void iandNotBitmap() { MappeableContainer ac = new MappeableArrayContainer(); ac = ac.add(10,20); MappeableContainer bc = new MappeableBitmapContainer(); bc = bc.add(15,25); ac.iandNot(bc); assertEquals(5, ac.getCardinality()); for (int i = 10; i < 15; i++) { assertTrue(ac.contains((char) i)); } } @Test public void intersects() { MappeableContainer ac1 = new MappeableArrayContainer(); ac1 = ac1.add(10,20); MappeableContainer ac2 = new MappeableArrayContainer(); ac2 = ac2.add(15,25); assertTrue(ac1.intersects(ac2)); } @Test public void numberOfRuns() { MappeableContainer ac = newArrayContainer(1, 13); assertEquals(1, ac.numberOfRuns()); } @Test public void roundtrip() throws Exception { MappeableContainer ac = new MappeableArrayContainer(); ac = ac.add(1, 5); final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (ObjectOutputStream oo = new ObjectOutputStream(bos)) { ac.writeExternal(oo); } MappeableContainer ac2 = new MappeableArrayContainer(); final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); ac2.readExternal(new ObjectInputStream(bis)); assertEquals(4, ac2.getCardinality()); for (int i = 1; i < 5; i++) { assertTrue(ac2.contains((char) i)); } } @Test public void orArray() { MappeableContainer ac = newArrayContainer(0,8192); MappeableContainer ac2 = newArrayContainer(15,25); ac = ac.or(ac2); assertEquals(8192, ac.getCardinality()); for (int i = 0; i < 8192; i++) { assertTrue(ac.contains((char) i)); } } @Test public void xorArray() { MappeableContainer ac = newArrayContainer(0,8192); MappeableContainer ac2 = newArrayContainer(15,25); ac = ac.xor(ac2); assertEquals(8182, ac.getCardinality()); for (int i = 0; i < 15; i++) { assertTrue(ac.contains((char) i)); } for (int i = 25; i < 8192; i++) { assertTrue(ac.contains((char) i)); } } @Test public void foreach() { MappeableContainer ac = newArrayContainer(0, 64); ac.forEach((char) 0, new IntConsumer() { int expected = 0; @Override public void accept(int value) { assertEquals(value, expected++); } }); } @Test public void orFullToRunContainer() { MappeableArrayContainer ac = new MappeableArrayContainer(0, 1 << 12); MappeableBitmapContainer half = new MappeableBitmapContainer(1 << 12, 1 << 16); MappeableContainer result = ac.or(half); assertEquals(1 << 16, result.getCardinality()); assertTrue(result instanceof MappeableRunContainer); } @Test public void orFullToRunContainer2() { MappeableArrayContainer ac = new MappeableArrayContainer(0, 1 << 15); MappeableArrayContainer half = new MappeableArrayContainer(1 << 15, 1 << 16); MappeableContainer result = ac.or(half); assertEquals(1 << 16, result.getCardinality()); assertTrue(result instanceof MappeableRunContainer); } @Test public void testLazyORFull() { MappeableArrayContainer ac = new MappeableArrayContainer(0, 1 << 15); MappeableArrayContainer ac2 = new MappeableArrayContainer(1 << 15, 1 << 16); MappeableContainer rbc = ac.lazyor(ac2); assertEquals(-1, rbc.getCardinality()); MappeableContainer repaired = rbc.repairAfterLazy(); assertEquals(1 << 16, repaired.getCardinality()); assertTrue(repaired instanceof MappeableRunContainer); } @Test public void isNotFull() { assertFalse(new MappeableArrayContainer().add('a').isFull()); } @Test public void testToString() { MappeableArrayContainer ac1 = new MappeableArrayContainer(5, 15); ac1.add((char) -3); ac1.add((char) -17); assertEquals("{5,6,7,8,9,10,11,12,13,14,65519,65533}", ac1.toString()); } @Test public void iorNotIncreaseCapacity() { MappeableArrayContainer ac1 = new MappeableArrayContainer(); MappeableArrayContainer ac2 = new MappeableArrayContainer(); ac1.add((char) 128); ac1.add((char) 256); ac2.add((char) 1024); ac1.ior(ac2); assertTrue(ac1.contains((char) 128)); assertTrue(ac1.contains((char) 256)); assertTrue(ac1.contains((char) 1024)); } @Test public void iorIncreaseCapacity() { MappeableArrayContainer ac1 = new MappeableArrayContainer(); MappeableArrayContainer ac2 = new MappeableArrayContainer(); ac1.add((char) 128); ac1.add((char) 256); ac1.add((char) 512); ac1.add((char) 513); ac2.add((char) 1024); ac1.ior(ac2); assertTrue(ac1.contains((char) 128)); assertTrue(ac1.contains((char) 256)); assertTrue(ac1.contains((char) 512)); assertTrue(ac1.contains((char) 513)); assertTrue(ac1.contains((char) 1024)); } @Test public void iorSanityCheck() { MappeableContainer ac = new MappeableArrayContainer().add(0, 10); MappeableContainer disjoint = new MappeableArrayContainer().add(20, 40); ac.ior(disjoint); assertTrue(ac.contains(disjoint)); } @Test public void testIntersectsWithRange() { MappeableContainer container = new MappeableArrayContainer().add(0, 10); assertTrue(container.intersects(0, 1)); assertTrue(container.intersects(0, 101)); assertTrue(container.intersects(0, lower16Bits(-1))); assertFalse(container.intersects(11, lower16Bits(-1))); } @Test public void testIntersectsWithRange2() { MappeableContainer container = new MappeableArrayContainer().add(lower16Bits(-50), lower16Bits(-10)); assertFalse(container.intersects(0, 1)); assertTrue(container.intersects(0, lower16Bits(-40))); assertFalse(container.intersects(lower16Bits(-100), lower16Bits(-55))); assertFalse(container.intersects(lower16Bits(-9), lower16Bits(-1))); assertTrue(container.intersects(11, 1 << 16)); } @Test public void testIntersectsWithRange3() { MappeableContainer container = new MappeableArrayContainer() .add((char) 1) .add((char) 300) .add((char) 1024); assertTrue(container.intersects(0, 300)); assertTrue(container.intersects(1, 300)); assertFalse(container.intersects(2, 300)); assertFalse(container.intersects(2, 299)); assertTrue(container.intersects(0, lower16Bits(-1))); assertFalse(container.intersects(1025, 1 << 16)); } @Test public void testContainsRange() { MappeableContainer ac = new MappeableArrayContainer().add(20, 100); assertFalse(ac.contains(1, 21)); assertFalse(ac.contains(1, 19)); assertTrue(ac.contains(20, 100)); assertTrue(ac.contains(20, 99)); assertTrue(ac.contains(21, 100)); assertFalse(ac.contains(21, 101)); assertFalse(ac.contains(19, 99)); assertFalse(ac.contains(190, 9999)); } @Test public void testContainsRange2() { MappeableContainer ac = new MappeableArrayContainer() .add((char)1).add((char)10) .add(20, 100); assertFalse(ac.contains(1, 21)); assertFalse(ac.contains(1, 20)); assertTrue(ac.contains(1, 2)); } @Test public void testContainsRangeUnsigned() { MappeableContainer ac = new MappeableArrayContainer().add(1 << 15, 1 << 8 | 1 << 15); assertTrue(ac.contains(1 << 15, 1 << 8 | 1 << 15)); assertTrue(ac.contains(1 + (1 << 15), (1 << 8 | 1 << 15) - 1)); assertFalse(ac.contains(1 + (1 << 15), (1 << 8 | 1 << 15) + 1)); assertFalse(ac.contains((1 << 15) - 1, (1 << 8 | 1 << 15) - 1)); assertFalse(ac.contains(0, 1 << 15)); assertFalse(ac.contains(1 << 8 | 1 << 15 | 1, 1 << 16)); } @Test public void testNextValueBeforeStart() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(10, container.nextValue((char)5)); } @Test public void testNextValue() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(10, container.nextValue((char)10)); assertEquals(20, container.nextValue((char)11)); assertEquals(30, container.nextValue((char)30)); } @Test public void testNextValueAfterEnd() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(-1, container.nextValue((char)31)); } @Test public void testNextValue2() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129); assertTrue(container instanceof MappeableArrayContainer); assertEquals(64, container.nextValue((char)0)); assertEquals(64, container.nextValue((char)64)); assertEquals(65, container.nextValue((char)65)); assertEquals(128, container.nextValue((char)128)); assertEquals(-1, container.nextValue((char)129)); assertEquals(-1, container.nextValue((char)5000)); } @Test public void testNextValueBetweenRuns() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129).iadd(256, 321); assertTrue(container instanceof MappeableArrayContainer); assertEquals(64, container.nextValue((char)0)); assertEquals(64, container.nextValue((char)64)); assertEquals(65, container.nextValue((char)65)); assertEquals(128, container.nextValue((char)128)); assertEquals(256, container.nextValue((char)129)); assertEquals(-1, container.nextValue((char)512)); } @Test public void testNextValue3() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129).iadd(200, 501).iadd(5000, 5201); assertTrue(container instanceof MappeableArrayContainer); assertEquals(64, container.nextValue((char)0)); assertEquals(64, container.nextValue((char)63)); assertEquals(64, container.nextValue((char)64)); assertEquals(65, container.nextValue((char)65)); assertEquals(128, container.nextValue((char)128)); assertEquals(200, container.nextValue((char)129)); assertEquals(200, container.nextValue((char)199)); assertEquals(200, container.nextValue((char)200)); assertEquals(250, container.nextValue((char)250)); assertEquals(5000, container.nextValue((char)2500)); assertEquals(5000, container.nextValue((char)5000)); assertEquals(5200, container.nextValue((char)5200)); assertEquals(-1, container.nextValue((char)5201)); } @Test public void testPreviousValue1() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129); assertTrue(container instanceof MappeableArrayContainer); assertEquals(-1, container.previousValue((char)0)); assertEquals(-1, container.previousValue((char)63)); assertEquals(64, container.previousValue((char)64)); assertEquals(65, container.previousValue((char)65)); assertEquals(128, container.previousValue((char)128)); assertEquals(128, container.previousValue((char)129)); } @Test public void testPreviousValue2() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129).iadd(200, 501).iadd(5000, 5201); assertTrue(container instanceof MappeableArrayContainer); assertEquals(-1, container.previousValue((char)0)); assertEquals(-1, container.previousValue((char)63)); assertEquals(64, container.previousValue((char)64)); assertEquals(65, container.previousValue((char)65)); assertEquals(128, container.previousValue((char)128)); assertEquals(128, container.previousValue((char)129)); assertEquals(128, container.previousValue((char)199)); assertEquals(200, container.previousValue((char)200)); assertEquals(250, container.previousValue((char)250)); assertEquals(500, container.previousValue((char)2500)); assertEquals(5000, container.previousValue((char)5000)); assertEquals(5200, container.previousValue((char)5200)); } @Test public void testPreviousValueBeforeStart() { MappeableContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(-1, container.previousValue((char)5)); } @Test public void testPreviousValueSparse() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(-1, container.previousValue((char)9)); assertEquals(10, container.previousValue((char)10)); assertEquals(10, container.previousValue((char)11)); assertEquals(20, container.previousValue((char)21)); assertEquals(30, container.previousValue((char)30)); } @Test public void testPreviousValueUnsigned() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { (char)((1 << 15) | 5), (char)((1 << 15) | 7)}), 2); assertEquals(-1, container.previousValue((char)((1 << 15) | 4))); assertEquals(((1 << 15) | 5), container.previousValue((char)((1 << 15) | 5))); assertEquals(((1 << 15) | 5), container.previousValue((char)((1 << 15) | 6))); assertEquals(((1 << 15) | 7), container.previousValue((char)((1 << 15) | 7))); assertEquals(((1 << 15) | 7), container.previousValue((char)((1 << 15) | 8))); } @Test public void testNextValueUnsigned() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { (char)((1 << 15) | 5), (char)((1 << 15) | 7)}), 2); assertEquals(((1 << 15) | 5), container.nextValue((char)((1 << 15) | 4))); assertEquals(((1 << 15) | 5), container.nextValue((char)((1 << 15) | 5))); assertEquals(((1 << 15) | 7), container.nextValue((char)((1 << 15) | 6))); assertEquals(((1 << 15) | 7), container.nextValue((char)((1 << 15) | 7))); assertEquals(-1, container.nextValue((char)((1 << 15) | 8))); } @Test public void testPreviousValueAfterEnd() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(30, container.previousValue((char)31)); } @Test public void testPreviousAbsentValue1() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129); assertEquals(0, container.previousAbsentValue((char)0)); assertEquals(63, container.previousAbsentValue((char)63)); assertEquals(63, container.previousAbsentValue((char)64)); assertEquals(63, container.previousAbsentValue((char)65)); assertEquals(63, container.previousAbsentValue((char)128)); assertEquals(129, container.previousAbsentValue((char)129)); } @Test public void testPreviousAbsentValue2() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129).iadd(200, 501).iadd(5000, 5201); assertEquals(0, container.previousAbsentValue((char)0)); assertEquals(63, container.previousAbsentValue((char)63)); assertEquals(63, container.previousAbsentValue((char)64)); assertEquals(63, container.previousAbsentValue((char)65)); assertEquals(63, container.previousAbsentValue((char)128)); assertEquals(129, container.previousAbsentValue((char)129)); assertEquals(199, container.previousAbsentValue((char)199)); assertEquals(199, container.previousAbsentValue((char)200)); assertEquals(199, container.previousAbsentValue((char)250)); assertEquals(2500, container.previousAbsentValue((char)2500)); assertEquals(4999, container.previousAbsentValue((char)5000)); assertEquals(4999, container.previousAbsentValue((char)5200)); } @Test public void testPreviousAbsentValueEmpty() { MappeableArrayContainer container = new MappeableArrayContainer(); for (int i = 0; i < 1000; i++) { assertEquals(i, container.previousAbsentValue((char)i)); } } @Test public void testPreviousAbsentValueSparse() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(9, container.previousAbsentValue((char)9)); assertEquals(9, container.previousAbsentValue((char)10)); assertEquals(11, container.previousAbsentValue((char)11)); assertEquals(21, container.previousAbsentValue((char)21)); assertEquals(29, container.previousAbsentValue((char)30)); } @Test public void testPreviousAbsentValueUnsigned() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { (char)((1 << 15) | 5), (char)((1 << 15) | 7)}), 2); assertEquals(((1 << 15) | 4), container.previousAbsentValue((char)((1 << 15) | 4))); assertEquals(((1 << 15) | 4), container.previousAbsentValue((char)((1 << 15) | 5))); assertEquals(((1 << 15) | 6), container.previousAbsentValue((char)((1 << 15) | 6))); assertEquals(((1 << 15) | 6), container.previousAbsentValue((char)((1 << 15) | 7))); assertEquals(((1 << 15) | 8), container.previousAbsentValue((char)((1 << 15) | 8))); } @Test public void testNextAbsentValue1() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129); assertEquals(0, container.nextAbsentValue((char)0)); assertEquals(63, container.nextAbsentValue((char)63)); assertEquals(129, container.nextAbsentValue((char)64)); assertEquals(129, container.nextAbsentValue((char)65)); assertEquals(129, container.nextAbsentValue((char)128)); assertEquals(129, container.nextAbsentValue((char)129)); } @Test public void testNextAbsentValue2() { MappeableContainer container = new MappeableArrayContainer().iadd(64, 129).iadd(200, 501).iadd(5000, 5201); assertEquals(0, container.nextAbsentValue((char)0)); assertEquals(63, container.nextAbsentValue((char)63)); assertEquals(129, container.nextAbsentValue((char)64)); assertEquals(129, container.nextAbsentValue((char)65)); assertEquals(129, container.nextAbsentValue((char)128)); assertEquals(129, container.nextAbsentValue((char)129)); assertEquals(199, container.nextAbsentValue((char)199)); assertEquals(501, container.nextAbsentValue((char)200)); assertEquals(501, container.nextAbsentValue((char)250)); assertEquals(2500, container.nextAbsentValue((char)2500)); assertEquals(5201, container.nextAbsentValue((char)5000)); assertEquals(5201, container.nextAbsentValue((char)5200)); } @Test public void testNextAbsentValueEmpty() { MappeableArrayContainer container = new MappeableArrayContainer(); for (int i = 0; i < 1000; i++) { assertEquals(i, container.nextAbsentValue((char)i)); } } @Test public void testNextAbsentValueSparse() { MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(new char[] { 10, 20, 30}), 3); assertEquals(9, container.nextAbsentValue((char)9)); assertEquals(11, container.nextAbsentValue((char)10)); assertEquals(11, container.nextAbsentValue((char)11)); assertEquals(21, container.nextAbsentValue((char)21)); assertEquals(31, container.nextAbsentValue((char)30)); } @Test public void testNextAbsentValueUnsigned() { char[] array = {(char) ((1 << 15) | 5), (char) ((1 << 15) | 7)}; MappeableArrayContainer container = new MappeableArrayContainer(CharBuffer.wrap(array), 2); assertEquals(((1 << 15) | 4), container.nextAbsentValue((char)((1 << 15) | 4))); assertEquals(((1 << 15) | 6), container.nextAbsentValue((char)((1 << 15) | 5))); assertEquals(((1 << 15) | 6), container.nextAbsentValue((char)((1 << 15) | 6))); assertEquals(((1 << 15) | 8), container.nextAbsentValue((char)((1 << 15) | 7))); assertEquals(((1 << 15) | 8), container.nextAbsentValue((char)((1 << 15) | 8))); } private static int lower16Bits(int x) { return ((char)x); } }
{ "pile_set_name": "Github" }
/*********************************************************************************************************************** * Copyright (C) 2010-2013 by the Stratosphere project (http://stratosphere.eu) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. **********************************************************************************************************************/ package eu.stratosphere.runtime.io.serialization.types; public enum SerializationTestTypeFactory { BOOLEAN(new BooleanType()), BYTE_ARRAY(new ByteArrayType()), BYTE_SUB_ARRAY(new ByteSubArrayType()), BYTE(new ByteType()), CHAR(new CharType()), DOUBLE(new DoubleType()), FLOAT(new FloatType()), INT(new IntType()), LONG(new LongType()), SHORT(new ShortType()), UNSIGNED_BYTE(new UnsignedByteType()), UNSIGNED_SHORT(new UnsignedShortType()), STRING(new AsciiStringType()); private final SerializationTestType factory; SerializationTestTypeFactory(SerializationTestType type) { this.factory = type; } public SerializationTestType factory() { return this.factory; } }
{ "pile_set_name": "Github" }
// // URLSession+Rx.swift // RxCocoa // // Created by Krunoslav Zaher on 3/23/15. // Copyright © 2015 Krunoslav Zaher. All rights reserved. // import struct Foundation.URL import struct Foundation.URLRequest import struct Foundation.Data import struct Foundation.Date import struct Foundation.TimeInterval import class Foundation.HTTPURLResponse import class Foundation.URLSession import class Foundation.URLResponse import class Foundation.JSONSerialization import class Foundation.NSError import var Foundation.NSURLErrorCancelled import var Foundation.NSURLErrorDomain #if os(Linux) // don't know why import Foundation #endif import RxSwift /// RxCocoa URL errors. public enum RxCocoaURLError : Swift.Error { /// Unknown error occurred. case unknown /// Response is not NSHTTPURLResponse case nonHTTPResponse(response: URLResponse) /// Response is not successful. (not in `200 ..< 300` range) case httpRequestFailed(response: HTTPURLResponse, data: Data?) /// Deserialization error. case deserializationError(error: Swift.Error) } extension RxCocoaURLError : CustomDebugStringConvertible { /// A textual representation of `self`, suitable for debugging. public var debugDescription: String { switch self { case .unknown: return "Unknown error has occurred." case let .nonHTTPResponse(response): return "Response is not NSHTTPURLResponse `\(response)`." case let .httpRequestFailed(response, _): return "HTTP request failed with `\(response.statusCode)`." case let .deserializationError(error): return "Error during deserialization of the response: \(error)" } } } fileprivate func escapeTerminalString(_ value: String) -> String { return value.replacingOccurrences(of: "\"", with: "\\\"", options:[], range: nil) } fileprivate func convertURLRequestToCurlCommand(_ request: URLRequest) -> String { let method = request.httpMethod ?? "GET" var returnValue = "curl -X \(method) " if let httpBody = request.httpBody, request.httpMethod == "POST" { let maybeBody = String(data: httpBody, encoding: String.Encoding.utf8) if let body = maybeBody { returnValue += "-d \"\(escapeTerminalString(body))\" " } } for (key, value) in request.allHTTPHeaderFields ?? [:] { let escapedKey = escapeTerminalString(key as String) let escapedValue = escapeTerminalString(value as String) returnValue += "\n -H \"\(escapedKey): \(escapedValue)\" " } let URLString = request.url?.absoluteString ?? "<unknown url>" returnValue += "\n\"\(escapeTerminalString(URLString))\"" returnValue += " -i -v" return returnValue } fileprivate func convertResponseToString(_ response: URLResponse?, _ error: NSError?, _ interval: TimeInterval) -> String { let ms = Int(interval * 1000) if let response = response as? HTTPURLResponse { if 200 ..< 300 ~= response.statusCode { return "Success (\(ms)ms): Status \(response.statusCode)" } else { return "Failure (\(ms)ms): Status \(response.statusCode)" } } if let error = error { if error.domain == NSURLErrorDomain && error.code == NSURLErrorCancelled { return "Canceled (\(ms)ms)" } return "Failure (\(ms)ms): NSError > \(error)" } return "<Unhandled response from server>" } extension Reactive where Base: URLSession { /** Observable sequence of responses for URL request. Performing of request starts after observer is subscribed and not after invoking this method. **URL requests will be performed per subscribed observer.** Any error during fetching of the response will cause observed sequence to terminate with error. - parameter request: URL request. - returns: Observable sequence of URL responses. */ public func response(request: URLRequest) -> Observable<(response: HTTPURLResponse, data: Data)> { return Observable.create { observer in // smart compiler should be able to optimize this out let d: Date? if Logging.URLRequests(request) { d = Date() } else { d = nil } let task = self.base.dataTask(with: request) { (data, response, error) in if Logging.URLRequests(request) { let interval = Date().timeIntervalSince(d ?? Date()) print(convertURLRequestToCurlCommand(request)) #if os(Linux) print(convertResponseToString(response, error.flatMap { $0 as? NSError }, interval)) #else print(convertResponseToString(response, error.map { $0 as NSError }, interval)) #endif } guard let response = response, let data = data else { observer.on(.error(error ?? RxCocoaURLError.unknown)) return } guard let httpResponse = response as? HTTPURLResponse else { observer.on(.error(RxCocoaURLError.nonHTTPResponse(response: response))) return } observer.on(.next((httpResponse, data))) observer.on(.completed) } task.resume() return Disposables.create(with: task.cancel) } } /** Observable sequence of response data for URL request. Performing of request starts after observer is subscribed and not after invoking this method. **URL requests will be performed per subscribed observer.** Any error during fetching of the response will cause observed sequence to terminate with error. If response is not HTTP response with status code in the range of `200 ..< 300`, sequence will terminate with `(RxCocoaErrorDomain, RxCocoaError.NetworkError)`. - parameter request: URL request. - returns: Observable sequence of response data. */ public func data(request: URLRequest) -> Observable<Data> { return response(request: request).map { pair -> Data in if 200 ..< 300 ~= pair.0.statusCode { return pair.1 } else { throw RxCocoaURLError.httpRequestFailed(response: pair.0, data: pair.1) } } } /** Observable sequence of response JSON for URL request. Performing of request starts after observer is subscribed and not after invoking this method. **URL requests will be performed per subscribed observer.** Any error during fetching of the response will cause observed sequence to terminate with error. If response is not HTTP response with status code in the range of `200 ..< 300`, sequence will terminate with `(RxCocoaErrorDomain, RxCocoaError.NetworkError)`. If there is an error during JSON deserialization observable sequence will fail with that error. - parameter request: URL request. - returns: Observable sequence of response JSON. */ public func json(request: URLRequest, options: JSONSerialization.ReadingOptions = []) -> Observable<Any> { return data(request: request).map { (data) -> Any in do { return try JSONSerialization.jsonObject(with: data, options: options) } catch let error { throw RxCocoaURLError.deserializationError(error: error) } } } /** Observable sequence of response JSON for GET request with `URL`. Performing of request starts after observer is subscribed and not after invoking this method. **URL requests will be performed per subscribed observer.** Any error during fetching of the response will cause observed sequence to terminate with error. If response is not HTTP response with status code in the range of `200 ..< 300`, sequence will terminate with `(RxCocoaErrorDomain, RxCocoaError.NetworkError)`. If there is an error during JSON deserialization observable sequence will fail with that error. - parameter url: URL of `NSURLRequest` request. - returns: Observable sequence of response JSON. */ public func json(url: Foundation.URL) -> Observable<Any> { return json(request: URLRequest(url: url)) } }
{ "pile_set_name": "Github" }
(* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Path = PyrePath type t = | Root of Path.t | Subdirectory of { root: Path.t; subdirectory: string; } [@@deriving sexp, compare, hash, show, eq] type search_result = { relative_path: Path.RelativePath.t; (** The searched path relative to one of the search root *) priority: int; (** Smaller int means higher priority *) } val get_root : t -> Path.t val to_path : t -> Path.t (* Create search path from its string representation. This operation does NOT have filesystem side-effect. *) val create : string -> t (* Create a normalized search path from its string representation. Normalizing a path means to expand its relativized root and follow symlinks. This operation DOES have filesystem side-effect. *) val create_normalized : string -> t (* Turn a potentially un-normalized search path into a normalized one. This operation DOES have filesystem side-effect.*) val normalize : t -> t val search_for_path : search_paths:t list -> Path.t -> search_result option
{ "pile_set_name": "Github" }
<h1 class="heading">单表访问方法</h1> <p>标签: MySQL 是怎样运行的</p> <hr> <p>对于我们这些<code>MySQL</code>的使用者来说,<code>MySQL</code>其实就是一个软件,平时用的最多的就是查询功能。DBA时不时丢过来一些慢查询语句让优化,我们如果连查询是怎么执行的都不清楚还优化个毛线,所以是时候掌握真正的技术了。我们在第一章的时候就曾说过,<code>MySQL Server</code>有一个称为<code>查询优化器</code>的模块,一条查询语句进行语法解析之后就会被交给查询优化器来进行优化,优化的结果就是生成一个所谓的<code>执行计划</code>,这个执行计划表明了应该使用哪些索引进行查询,表之间的连接顺序是啥样的,最后会按照执行计划中的步骤调用存储引擎提供的方法来真正的执行查询,并将查询结果返回给用户。不过查询优化这个主题有点儿大,在学会跑之前还得先学会走,所以本章先来瞅瞅<code>MySQL</code>怎么执行单表查询(就是<code>FROM</code>子句后边只有一个表,最简单的那种查询~)。不过需要强调的一点是,在学习本章前务必看过前边关于记录结构、数据页结构以及索引的部分,如果你不能保证这些东西已经完全掌握,那么本章不适合你。</p> <p>为了故事的顺利发展,我们先得有个表:</p> <pre><code class="hljs bash" lang="bash">CREATE TABLE single_table ( id INT NOT NULL AUTO_INCREMENT, key1 VARCHAR(100), key2 INT, key3 VARCHAR(100), key_part1 VARCHAR(100), key_part2 VARCHAR(100), key_part3 VARCHAR(100), common_field VARCHAR(100), PRIMARY KEY (id), KEY idx_key1 (key1), UNIQUE KEY idx_key2 (key2), KEY idx_key3 (key3), KEY idx_key_part(key_part1, key_part2, key_part3) ) Engine=InnoDB CHARSET=utf8; </code></pre><p>我们为这个<code>single_table</code>表建立了1个聚簇索引和4个二级索引,分别是:</p> <ul> <li> <p>为<code>id</code>列建立的聚簇索引。</p> </li> <li> <p>为<code>key1</code>列建立的<code>idx_key1</code>二级索引。</p> </li> <li> <p>为<code>key2</code>列建立的<code>idx_key2</code>二级索引,而且该索引是唯一二级索引。</p> </li> <li> <p>为<code>key3</code>列建立的<code>idx_key3</code>二级索引。</p> </li> <li> <p>为<code>key_part1</code>、<code>key_part2</code>、<code>key_part3</code>列建立的<code>idx_key_part</code>二级索引,这也是一个联合索引。</p> </li> </ul> <p>然后我们需要为这个表插入 10000 行记录,除<code>id</code>列外其余的列都插入随机值就好了,具体的插入语句我就不写了,自己写个程序插入吧(id列是自增主键列,不需要我们手动插入)。</p> <h2 class="heading">访问方法(access method)的概念</h2> <p>想必各位都用过高德地图来查找到某个地方的路线吧(此处没有为高德地图打广告的意思,他们没给我钱,大家用百度地图也可以啊),如果我们搜西安钟楼到大雁塔之间的路线的话,地图软件会给出 n 种路线供我们选择,如果我们实在闲的没事儿干并且足够有钱的话,还可以用南辕北辙的方式绕地球一圈到达目的地。也就是说,不论采用哪一种方式,我们最终的目标就是到达大雁塔这个地方。回到<code>MySQL</code>中来,我们平时所写的那些查询语句本质上只是一种声明式的语法,只是告诉<code>MySQL</code>我们要获取的数据符合哪些规则,至于<code>MySQL</code>背地里是怎么把查询结果搞出来的那是<code>MySQL</code>自己的事儿。对于单个表的查询来说,设计 MySQL 的大叔把查询的执行方式大致分为下边两种:</p> <ul> <li> <p>使用全表扫描进行查询</p> <p>这种执行方式很好理解,就是把表的每一行记录都扫一遍嘛,把符合搜索条件的记录加入到结果集就完了。不管是啥查询都可以使用这种方式执行,当然,这种也是最笨的执行方式。</p> </li> <li> <p>使用索引进行查询</p> <p>因为直接使用全表扫描的方式执行查询要遍历好多记录,所以代价可能太大了。如果查询语句中的搜索条件可以使用到某个索引,那直接使用索引来执行查询可能会加快查询执行的时间。使用索引来执行查询的方式五花八门,又可以细分为许多种类:</p> <ul> <li> <p>针对主键或唯一二级索引的等值查询</p> </li> <li> <p>针对普通二级索引的等值查询</p> </li> <li> <p>针对索引列的范围查询</p> </li> <li> <p>直接扫描整个索引</p> </li> </ul> </li> </ul> <p>设计<code>MySQL</code>的大叔把<code>MySQL</code>执行查询语句的方式称之为<code>访问方法</code>或者<code>访问类型</code>。同一个查询语句可能可以使用多种不同的访问方法来执行,虽然最后的查询结果都是一样的,但是执行的时间可能差老鼻子远了,就像是从钟楼到大雁塔,你可以坐火箭去,也可以坐飞机去,当然也可以坐乌龟去。下边细细道来各种<code>访问方法</code>的具体内容。</p> <h2 class="heading">const</h2> <p>有的时候我们可以通过主键列来定位一条记录,比方说这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE id = 1438; </code></pre><p><code>MySQL</code>会直接利用主键值在聚簇索引中定位对应的用户记录,就像这样:</p> <p></p><figure><img alt="image_1ctendl4319v659s1dfoj6lssl16.png-36.4kB" src="https://user-gold-cdn.xitu.io/2018/12/21/167cece536c16a14?w=600&amp;h=491&amp;f=png&amp;s=37259"><figcaption></figcaption></figure><p></p> <p>原谅我把聚簇索引对应的复杂的<code>B+</code>树结构搞了一个极度精简版,为了突出重点,我们忽略掉了<code>页</code>的结构,直接把所有的叶子节点的记录都放在一起展示,而且记录中只展示我们关心的索引列,对于<code>single_table</code>表的聚簇索引来说,展示的就是<code>id</code>列。我们想突出的重点就是:<code>B+</code>树叶子节点中的记录是按照索引列排序的,对于的聚簇索引来说,它对应的<code>B+</code>树叶子节点中的记录就是按照<code>id</code>列排序的。<code>B+</code>树本来就是一个矮矮的大胖子,所以这样根据主键值定位一条记录的速度贼快。类似的,我们根据唯一二级索引列来定位一条记录的速度也是贼快的,比如下边这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 = 3841; </code></pre><p>这个查询的执行过程的示意图就是这样:</p> <p></p><figure><img alt="image_1cthurrlpbhlotsjru1dsjrrl30.png-110.2kB" src="https://user-gold-cdn.xitu.io/2018/12/21/167cece53760be3d?w=845&amp;h=618&amp;f=png&amp;s=112848"><figcaption></figcaption></figure><p></p> <p>可以看到这个查询的执行分两步,第一步先从<code>idx_key2</code>对应的<code>B+</code>树索引中根据<code>key2</code>列与常数的等值比较条件定位到一条二级索引记录,然后再根据该记录的<code>id</code>值到聚簇索引中获取到完整的用户记录。</p> <p>设计<code>MySQL</code>的大叔认为通过主键或者唯一二级索引列与常数的等值比较来定位一条记录是像坐火箭一样快的,所以他们把这种通过主键或者唯一二级索引列来定位一条记录的访问方法定义为:<code>const</code>,意思是常数级别的,代价是可以忽略不计的。不过这种<code>const</code>访问方法只能在主键列或者唯一二级索引列和一个常数进行等值比较时才有效,如果主键或者唯一二级索引是由多个列构成的话,索引中的每一个列都需要与常数进行等值比较,这个<code>const</code>访问方法才有效(这是因为只有该索引中全部列都采用等值比较才可以定位唯一的一条记录)。</p> <p>对于唯一二级索引来说,查询该列为<code>NULL</code>值的情况比较特殊,比如这样:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 IS NULL; </code></pre><p>因为唯一二级索引列并不限制<code>NULL</code>值的数量,所以上述语句可能访问到多条记录,也就是说上边这个语句不可以使用<code>const</code>访问方法来执行。</p> <h2 class="heading">ref</h2> <p>有时候我们对某个普通的二级索引列与常数进行等值比较,比如这样:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'abc'</span>; </code></pre><p>对于这个查询,我们当然可以选择全表扫描来逐一对比搜索条件是否满足要求,我们也可以先使用二级索引找到对应记录的<code>id</code>值,然后再回表到聚簇索引中查找完整的用户记录。由于普通二级索引并不限制索引列值的唯一性,所以可能找到多条对应的记录,也就是说使用二级索引来执行查询的代价取决于等值匹配到的二级索引记录条数。如果匹配的记录较少,则回表的代价还是比较低的,所以<code>MySQL</code>可能选择使用索引而不是全表扫描的方式来执行查询。设计<code>MySQL</code>的大叔就把这种搜索条件为二级索引列与常数等值比较,采用二级索引来执行查询的访问方法称为:<code>ref</code>。我们看一下采用<code>ref</code>访问方法执行查询的图示:</p> <p></p><figure><img alt="image_1ctf14vso11cdclsmc6ac8pru9h.png-109.5kB" src="https://user-gold-cdn.xitu.io/2018/12/21/167cece5377ba4d7?w=762&amp;h=606&amp;f=png&amp;s=112165"><figcaption></figcaption></figure><p></p> <p>从图示中可以看出,对于普通的二级索引来说,通过索引列进行等值比较后可能匹配到多条连续的记录,而不是像主键或者唯一二级索引那样最多只能匹配1条记录,所以这种<code>ref</code>访问方法比<code>const</code>差了那么一丢丢,但是在二级索引等值比较时匹配的记录数较少时的效率还是很高的(如果匹配的二级索引记录太多那么回表的成本就太大了),跟坐高铁差不多。不过需要注意下边两种情况:</p> <ul> <li> <p>二级索引列值为<code>NULL</code>的情况</p> <p>不论是普通的二级索引,还是唯一二级索引,它们的索引列对包含<code>NULL</code>值的数量并不限制,所以我们采用<code>key IS NULL</code>这种形式的搜索条件最多只能使用<code>ref</code>的访问方法,而不是<code>const</code>的访问方法。</p> </li> <li> <p>对于某个包含多个索引列的二级索引来说,只要是最左边的连续索引列是与常数的等值比较就可能采用<code>ref</code>的访问方法,比方说下边这几个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key_part1 = <span class="hljs-string">'god like'</span>; SELECT * FROM single_table WHERE key_part1 = <span class="hljs-string">'god like'</span> AND key_part2 = <span class="hljs-string">'legendary'</span>; SELECT * FROM single_table WHERE key_part1 = <span class="hljs-string">'god like'</span> AND key_part2 = <span class="hljs-string">'legendary'</span> AND key_part3 = <span class="hljs-string">'penta kill'</span>; </code></pre><p>但是如果最左边的连续索引列并不全部是等值比较的话,它的访问方法就不能称为<code>ref</code>了,比方说这样:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key_part1 = <span class="hljs-string">'god like'</span> AND key_part2 &gt; <span class="hljs-string">'legendary'</span>; </code></pre></li> </ul> <h2 class="heading">ref_or_null</h2> <p>有时候我们不仅想找出某个二级索引列的值等于某个常数的记录,还想把该列的值为<code>NULL</code>的记录也找出来,就像下边这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_demo WHERE key1 = <span class="hljs-string">'abc'</span> OR key1 IS NULL; </code></pre><p>当使用二级索引而不是全表扫描的方式执行该查询时,这种类型的查询使用的访问方法就称为<code>ref_or_null</code>,这个<code>ref_or_null</code>访问方法的执行过程如下:</p> <p></p><figure><img alt="image_1ctf21uu8113m1ajm1rcitgf5eeco.png-122.5kB" src="https://user-gold-cdn.xitu.io/2018/12/21/167cece5375a11e7?w=757&amp;h=597&amp;f=png&amp;s=125422"><figcaption></figcaption></figure><p></p> <p>可以看到,上边的查询相当于先分别从<code>idx_key1</code>索引对应的<code>B+</code>树中找出<code>key1 IS NULL</code>和<code>key1 = 'abc'</code>的两个连续的记录范围,然后根据这些二级索引记录中的<code>id</code>值再回表查找完整的用户记录。</p> <h2 class="heading">range</h2> <p>我们之前介绍的几种访问方法都是在对索引列与某一个常数进行等值比较的时候才可能使用到(<code>ref_or_null</code>比较奇特,还计算了值为<code>NULL</code>的情况),但是有时候我们面对的搜索条件更复杂,比如下边这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 IN (1438, 6328) OR (key2 &gt;= 38 AND key2 &lt;= 79); </code></pre><p>我们当然还可以使用全表扫描的方式来执行这个查询,不过也可以使用<code>二级索引 + 回表</code>的方式执行,如果采用<code>二级索引 + 回表</code>的方式来执行的话,那么此时的搜索条件就不只是要求索引列与常数的等值匹配了,而是索引列需要匹配某个或某些范围的值,在本查询中<code>key2</code>列的值只要匹配下列3个范围中的任何一个就算是匹配成功了:</p> <ul> <li> <p><code>key2</code>的值是<code>1438</code></p> </li> <li> <p><code>key2</code>的值是<code>6328</code></p> </li> <li> <p><code>key2</code>的值在<code>38</code>和<code>79</code>之间。</p> </li> </ul> <p>设计<code>MySQL</code>的大叔把这种利用索引进行范围匹配的访问方法称之为:<code>range</code>。</p> <pre><code class="hljs bash" lang="bash">小贴士: 此处所说的使用索引进行范围匹配中的 `索引` 可以是聚簇索引,也可以是二级索引。 </code></pre><p>如果把这几个所谓的<code>key2</code>列的值需要满足的<code>范围</code>在数轴上体现出来的话,那应该是这个样子:</p> <p></p><figure><img alt="image_1cth9mkf41li1dad1tnd6dm5139.png-9.2kB" src="https://user-gold-cdn.xitu.io/2018/12/21/167cece53770ba99?w=654&amp;h=150&amp;f=png&amp;s=9381"><figcaption></figcaption></figure><p></p> <p>也就是从数学的角度看,每一个所谓的范围都是数轴上的一个<code>区间</code>,3个范围也就对应着3个区间:</p> <ul> <li> <p>范围1:<code>key2 = 1438</code></p> </li> <li> <p>范围2:<code>key2 = 6328</code></p> </li> <li> <p>范围3:<code>key2 ∈ [38, 79]</code>,注意这里是闭区间。</p> </li> </ul> <p>我们可以把那种索引列等值匹配的情况称之为<code>单点区间</code>,上边所说的<code>范围1</code>和<code>范围2</code>都可以被称为单点区间,像<code>范围3</code>这种的我们可以称为连续范围区间。</p> <h2 class="heading">index</h2> <p>看下边这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT key_part1, key_part2, key_part3 FROM single_table WHERE key_part2 = <span class="hljs-string">'abc'</span>; </code></pre><p>由于<code>key_part2</code>并不是联合索引<code>idx_key_part</code>最左索引列,所以我们无法使用<code>ref</code>或者<code>range</code>访问方法来执行这个语句。但是这个查询符合下边这两个条件:</p> <ul> <li> <p>它的查询列表只有3个列:<code>key_part1</code>, <code>key_part2</code>, <code>key_part3</code>,而索引<code>idx_key_part</code>又包含这三个列。</p> </li> <li> <p>搜索条件中只有<code>key_part2</code>列。这个列也包含在索引<code>idx_key_part</code>中。</p> </li> </ul> <p>也就是说我们可以直接通过遍历<code>idx_key_part</code>索引的叶子节点的记录来比较<code>key_part2 = 'abc'</code>这个条件是否成立,把匹配成功的二级索引记录的<code>key_part1</code>, <code>key_part2</code>, <code>key_part3</code>列的值直接加到结果集中就行了。由于二级索引记录比聚簇索记录小的多(聚簇索引记录要存储所有用户定义的列以及所谓的隐藏列,而二级索引记录只需要存放索引列和主键),而且这个过程也不用进行回表操作,所以直接遍历二级索引比直接遍历聚簇索引的成本要小很多,设计<code>MySQL</code>的大叔就把这种采用遍历二级索引记录的执行方式称之为:<code>index</code>。</p> <h2 class="heading">all</h2> <p>最直接的查询执行方式就是我们已经提了无数遍的全表扫描,对于<code>InnoDB</code>表来说也就是直接扫描聚簇索引,设计<code>MySQL</code>的大叔把这种使用全表扫描执行查询的方式称之为:<code>all</code>。</p> <h2 class="heading">注意事项</h2> <h3 class="heading">重温 二级索引 + 回表</h3> <p><span style="color:red">一般情况下</span>只能利用单个二级索引执行查询,比方说下边的这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'abc'</span> AND key2 &gt; 1000; </code></pre><p>查询优化器会识别到这个查询中的两个搜索条件:</p> <ul> <li> <p><code>key1 = 'abc'</code></p> </li> <li> <p><code>key2 &gt; 1000</code></p> </li> </ul> <p>优化器一般会根据<code>single_table</code>表的统计数据来判断到底使用哪个条件到对应的二级索引中查询扫描的行数会更少,选择那个扫描行数较少的条件到对应的二级索引中查询(关于如何比较的细节我们后边的章节中会唠叨)。然后将从该二级索引中查询到的结果经过回表得到完整的用户记录后再根据其余的<code>WHERE</code>条件过滤记录。一般来说,等值查找比范围查找需要扫描的行数更少(也就是<code>ref</code>的访问方法一般比<code>range</code>好,但这也不总是一定的,也可能采用<code>ref</code>访问方法的那个索引列的值为特定值的行数特别多),所以这里假设优化器决定使用<code>idx_key1</code>索引进行查询,那么整个查询过程可以分为两个步骤:</p> <ul> <li> <p>步骤1:使用二级索引定位记录的阶段,也就是根据条件<code>key1 = 'abc'</code>从<code>idx_key1</code>索引代表的<code>B+</code>树中找到对应的二级索引记录。</p> </li> <li> <p>步骤2:回表阶段,也就是根据上一步骤中找到的记录的主键值进行<code>回表</code>操作,也就是到聚簇索引中找到对应的完整的用户记录,再根据条件<code>key2 &gt; 1000</code>到完整的用户记录继续过滤。将最终符合过滤条件的记录返回给用户。</p> </li> </ul> <p>这里需要特别提醒大家的一点是,<span style="color:red">因为二级索引的节点中的记录只包含索引列和主键,所以在步骤1中使用<code>idx_key1</code>索引进行查询时只会用到与<code>key1</code>列有关的搜索条件,其余条件,比如<code>key2 &gt; 1000</code>这个条件在步骤1中是用不到的,只有在步骤2完成回表操作后才能继续针对完整的用户记录中继续过滤</span>。</p> <blockquote class="warning"><p>小贴士: 需要注意的是,我们说一般情况下执行一个查询只会用到二级索引,不过还是有特殊情况的,我们后边会详细唠叨的。 </p></blockquote><h3 class="heading">明确range访问方法使用的范围区间</h3> <p>其实对于<code>B+</code>树索引来说,只要索引列和常数使用<code>=</code>、<code>&lt;=&gt;</code>、<code>IN</code>、<code>NOT IN</code>、<code>IS NULL</code>、<code>IS NOT NULL</code>、<code>&gt;</code>、<code>&lt;</code>、<code>&gt;=</code>、<code>&lt;=</code>、<code>BETWEEN</code>、<code>!=</code>(不等于也可以写成<code>&lt;&gt;</code>)或者<code>LIKE</code>操作符连接起来,就可以产生一个所谓的<code>区间</code>。</p> <blockquote class="warning"><p>小贴士: LIKE操作符比较特殊,只有在匹配完整字符串或者匹配字符串前缀时才可以利用索引,具体原因我们在前边的章节中唠叨过了,这里就不赘述了。 IN操作符的效果和若干个等值匹配操作符`=`之间用`OR`连接起来是一样的,也就是说会产生多个单点区间,比如下边这两个语句的效果是一样的: SELECT * FROM single_table WHERE key2 IN (1438, 6328); SELECT * FROM single_table WHERE key2 = 1438 OR key2 = 6328; </p></blockquote><p>不过在日常的工作中,一个查询的<code>WHERE</code>子句可能有很多个小的搜索条件,这些搜索条件需要使用<code>AND</code>或者<code>OR</code>操作符连接起来,虽然大家都知道这两个操作符的作用,但我还是要再说一遍:</p> <ul> <li> <p><code>cond1 AND cond2</code> :只有当<code>cond1</code>和<code>cond2</code>都为<code>TRUE</code>时整个表达式才为<code>TRUE</code>。</p> </li> <li> <p><code>cond1 OR cond2</code>:只要<code>cond1</code>或者<code>cond2</code>中有一个为<code>TRUE</code>整个表达式就为<code>TRUE</code>。</p> </li> </ul> <p>当我们想使用<code>range</code>访问方法来执行一个查询语句时,重点就是找出该查询可用的索引以及这些索引对应的范围区间。下边分两种情况看一下怎么从由<code>AND</code>或<code>OR</code>组成的复杂搜索条件中提取出正确的范围区间。</p> <h4 class="heading">所有搜索条件都可以使用某个索引的情况</h4> <p>有时候每个搜索条件都可以使用到某个索引,比如下边这个查询语句:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 &gt; 100 AND key2 &gt; 200; </code></pre><p>这个查询中的搜索条件都可以使用到<code>key2</code>,也就是说每个搜索条件都对应着一个<code>idx_key2</code>的范围区间。这两个小的搜索条件使用<code>AND</code>连接起来,也就是要取两个范围区间的交集,在我们使用<code>range</code>访问方法执行查询时,使用的<code>idx_key2</code>索引的范围区间的确定过程就如下图所示:</p> <p></p><figure><img alt="image_1ctia5p09rqss4413qq16gdbbj3q.png-44kB" src="https://user-gold-cdn.xitu.io/2018/12/21/167cece53fac75d6?w=937&amp;h=306&amp;f=png&amp;s=45034"><figcaption></figcaption></figure><p></p> <p><code>key2 &gt; 100</code>和<code>key2 &gt; 200</code>交集当然就是<code>key2 &gt; 200</code>了,也就是说上边这个查询使用<code>idx_key2</code>的范围区间就是<code>(200, +∞)</code>。这东西小学都学过吧,再不济初中肯定都学过。我们再看一下使用<code>OR</code>将多个搜索条件连接在一起的情况:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 &gt; 100 OR key2 &gt; 200; </code></pre><p><code>OR</code>意味着需要取各个范围区间的并集,所以上边这个查询在我们使用<code>range</code>访问方法执行查询时,使用的<code>idx_key2</code>索引的范围区间的确定过程就如下图所示:</p> <p></p><figure><img alt="image_1ctia94i617ihr5ncku4ed1gg247.png-49.1kB" src="https://user-gold-cdn.xitu.io/2018/12/21/167cece5630e89ef?w=912&amp;h=313&amp;f=png&amp;s=50244"><figcaption></figcaption></figure><p></p> <p>也就是说上边这个查询使用<code>idx_key2</code>的范围区间就是<code>(100, +∞)</code>。</p> <h4 class="heading">有的搜索条件无法使用索引的情况</h4> <p>比如下边这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 &gt; 100 AND common_field = <span class="hljs-string">'abc'</span>; </code></pre><p>请注意,这个查询语句中能利用的索引只有<code>idx_key2</code>一个,而<code>idx_key2</code>这个二级索引的记录中又不包含<code>common_field</code>这个字段,所以在使用二级索引<code>idx_key2</code>定位定位记录的阶段用不到<code>common_field = 'abc'</code>这个条件,这个条件是在回表获取了完整的用户记录后才使用的,而<code>范围区间</code>是为了到索引中取记录中提出的概念,所以在确定<code>范围区间</code>的时候不需要考虑<code>common_field = 'abc'</code>这个条件,我们在为某个索引确定范围区间的时候只需要把用不到相关索引的搜索条件替换为<code>TRUE</code>就好了。</p> <blockquote class="warning"><p>小贴士: 之所以把用不到索引的搜索条件替换为TRUE,是因为我们不打算使用这些条件进行在该索引上进行过滤,所以不管索引的记录满不满足这些条件,我们都把它们选取出来,待到之后回表的时候再使用它们过滤。 </p></blockquote><p>我们把上边的查询中用不到<code>idx_key2</code>的搜索条件替换后就是这样:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 &gt; 100 AND TRUE; </code></pre><p>化简之后就是这样:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 &gt; 100; </code></pre><p>也就是说最上边那个查询使用<code>idx_key2</code>的范围区间就是:<code>(100, +∞)</code>。</p> <p>再来看一下使用<code>OR</code>的情况:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 &gt; 100 OR common_field = <span class="hljs-string">'abc'</span>; </code></pre><p>同理,我们把使用不到<code>idx_key2</code>索引的搜索条件替换为<code>TRUE</code>:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key2 &gt; 100 OR TRUE; </code></pre><p>接着化简:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE TRUE; </code></pre><p>额,这也就说说明如果我们强制使用<code>idx_key2</code>执行查询的话,对应的范围区间就是<code>(-∞, +∞)</code>,也就是需要将全部二级索引的记录进行回表,这个代价肯定比直接全表扫描都大了。也就是说一个使用到索引的搜索条件和没有使用该索引的搜索条件使用<code>OR</code>连接起来后是无法使用该索引的。</p> <h4 class="heading">复杂搜索条件下找出范围匹配的区间</h4> <p>有的查询的搜索条件可能特别复杂,光是找出范围匹配的各个区间就挺烦的,比方说下边这个:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE (key1 &gt; <span class="hljs-string">'xyz'</span> AND key2 = 748 ) OR (key1 &lt; <span class="hljs-string">'abc'</span> AND key1 &gt; <span class="hljs-string">'lmn'</span>) OR (key1 LIKE <span class="hljs-string">'%suf'</span> AND key1 &gt; <span class="hljs-string">'zzz'</span> AND (key2 &lt; 8000 OR common_field = <span class="hljs-string">'abc'</span>)) ; </code></pre><p>我滴个神,这个搜索条件真是绝了,不过大家不要被复杂的表象迷住了双眼,按着下边这个套路分析一下:</p> <ul> <li> <p>首先查看<code>WHERE</code>子句中的搜索条件都涉及到了哪些列,哪些列可能使用到索引。</p> <p>这个查询的搜索条件涉及到了<code>key1</code>、<code>key2</code>、<code>common_field</code>这3个列,然后<code>key1</code>列有普通的二级索引<code>idx_key1</code>,<code>key2</code>列有唯一二级索引<code>idx_key2</code>。</p> </li> <li> <p>对于那些可能用到的索引,分析它们的范围区间。</p> <ul> <li> <p>假设我们使用<code>idx_key1</code>执行查询</p> <ul> <li> <p>我们需要把那些用不到该索引的搜索条件暂时移除掉,移除方法也简单,直接把它们替换为<code>TRUE</code>就好了。上边的查询中除了有关<code>key2</code>和<code>common_field</code>列不能使用到<code>idx_key1</code>索引外,<code>key1 LIKE '%suf'</code>也使用不到索引,所以把这些搜索条件替换为<code>TRUE</code>之后的样子就是这样:</p> <pre><code class="hljs bash" lang="bash">(key1 &gt; <span class="hljs-string">'xyz'</span> AND TRUE ) OR (key1 &lt; <span class="hljs-string">'abc'</span> AND key1 &gt; <span class="hljs-string">'lmn'</span>) OR (TRUE AND key1 &gt; <span class="hljs-string">'zzz'</span> AND (TRUE OR TRUE)) </code></pre><p>化简一下上边的搜索条件就是下边这样:</p> <pre><code class="hljs bash" lang="bash">(key1 &gt; <span class="hljs-string">'xyz'</span>) OR (key1 &lt; <span class="hljs-string">'abc'</span> AND key1 &gt; <span class="hljs-string">'lmn'</span>) OR (key1 &gt; <span class="hljs-string">'zzz'</span>) </code></pre></li> <li> <p>替换掉永远为<code>TRUE</code>或<code>FALSE</code>的条件</p> <p>因为符合<code>key1 &lt; 'abc' AND key1 &gt; 'lmn'</code>永远为<code>FALSE</code>,所以上边的搜索条件可以被写成这样:</p> <pre><code class="hljs bash" lang="bash">(key1 &gt; <span class="hljs-string">'xyz'</span>) OR (key1 &gt; <span class="hljs-string">'zzz'</span>) </code></pre></li> <li> <p>继续化简区间</p> <p><code>key1 &gt; 'xyz'</code>和<code>key1 &gt; 'zzz'</code>之间使用<code>OR</code>操作符连接起来的,意味着要取并集,所以最终的结果化简的到的区间就是:<code>key1 &gt; xyz</code>。也就是说:<span style="color:red">上边那个有一坨搜索条件的查询语句如果使用 idx_key1 索引执行查询的话,需要把满足<code>key1 &gt; xyz</code>的二级索引记录都取出来,然后拿着这些记录的id再进行回表,得到完整的用户记录之后再使用其他的搜索条件进行过滤</span>。</p> </li> </ul> </li> <li> <p>假设我们使用<code>idx_key2</code>执行查询</p> <ul> <li> <p>我们需要把那些用不到该索引的搜索条件暂时使用<code>TRUE</code>条件替换掉,其中有关<code>key1</code>和<code>common_field</code>的搜索条件都需要被替换掉,替换结果就是:</p> <pre><code class="hljs bash" lang="bash">(TRUE AND key2 = 748 ) OR (TRUE AND TRUE) OR (TRUE AND TRUE AND (key2 &lt; 8000 OR TRUE)) </code></pre><p>哎呀呀,<code>key2 &lt; 8000 OR TRUE</code>的结果肯定是<code>TRUE</code>呀,也就是说化简之后的搜索条件成这样了:</p> <pre><code class="hljs bash" lang="bash">key2 = 748 OR TRUE </code></pre><p>这个化简之后的结果就更简单了:</p> <pre><code class="hljs bash" lang="bash">TRUE </code></pre><p>这个结果也就意味着如果我们要使用<code>idx_key2</code>索引执行查询语句的话,需要扫描<code>idx_key2</code>二级索引的所有记录,然后再回表,这不是得不偿失么,所以这种情况下不会使用<code>idx_key2</code>索引的。</p> </li> </ul> </li> </ul> </li> </ul> <h3 class="heading">索引合并</h3> <p>我们前边说过<code>MySQL</code>在一般情况下执行一个查询时最多只会用到单个二级索引,但不是还有特殊情况么,在这些特殊情况下也可能在一个查询中使用到多个二级索引,设计<code>MySQL</code>的大叔把这种使用到多个索引来完成一次查询的执行方法称之为:<code>index merge</code>,具体的索引合并算法有下边三种。</p> <h4 class="heading">Intersection合并</h4> <p><code>Intersection</code>翻译过来的意思是<code>交集</code>。这里是说某个查询可以使用多个二级索引,将从多个二级索引中查询到的结果取交集,比方说下边这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> AND key3 = <span class="hljs-string">'b'</span>; </code></pre><p>假设这个查询使用<code>Intersection</code>合并的方式执行的话,那这个过程就是这样的:</p> <ul> <li> <p>从<code>idx_key1</code>二级索引对应的<code>B+</code>树中取出<code>key1 = 'a'</code>的相关记录。</p> </li> <li> <p>从<code>idx_key3</code>二级索引对应的<code>B+</code>树中取出<code>key3 = 'b'</code>的相关记录。</p> </li> <li> <p>二级索引的记录都是由<code>索引列 + 主键</code>构成的,所以我们可以计算出这两个结果集中<code>id</code>值的交集。</p> </li> <li> <p>按照上一步生成的<code>id</code>值列表进行回表操作,也就是从聚簇索引中把指定<code>id</code>值的完整用户记录取出来,返回给用户。</p> </li> </ul> <p>这里有同学会思考:为啥不直接使用<code>idx_key1</code>或者<code>idx_key2</code>只根据某个搜索条件去读取一个二级索引,然后回表后再过滤另外一个搜索条件呢?这里要分析一下两种查询执行方式之间需要的成本代价。</p> <p>只读取一个二级索引的成本:</p> <ul> <li> <p>按照某个搜索条件读取一个二级索引</p> </li> <li> <p>根据从该二级索引得到的主键值进行回表操作,然后再过滤其他的搜索条件</p> </li> </ul> <p>读取多个二级索引之后取交集成本:</p> <ul> <li> <p>按照不同的搜索条件分别读取不同的二级索引</p> </li> <li> <p>将从多个二级索引得到的主键值取交集,然后进行回表操作</p> </li> </ul> <p>虽然读取多个二级索引比读取一个二级索引消耗性能,但是读取二级索引的操作是<code>顺序I/O</code>,而回表操作是<code>随机I/O</code>,所以如果只读取一个二级索引时需要回表的记录数特别多,而读取多个二级索引之后取交集的记录数非常少,当节省的因为<code>回表</code>而造成的性能损耗比访问多个二级索引带来的性能损耗更高时,读取多个二级索引后取交集比只读取一个二级索引的成本更低。</p> <p><code>MySQL</code>在某些特定的情况下才可能会使用到<code>Intersection</code>索引合并:</p> <ul> <li> <p>情况一:二级索引列是等值匹配的情况,对于联合索引来说,在联合索引中的每个列都必须等值匹配,不能出现只出现匹配部分列的情况。</p> <p>比方说下边这个查询可能用到<code>idx_key1</code>和<code>idx_key_part</code>这两个二级索引进行<code>Intersection</code>索引合并的操作:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> AND key_part1 = <span class="hljs-string">'a'</span> AND key_part2 = <span class="hljs-string">'b'</span> AND key_part3 = <span class="hljs-string">'c'</span>; </code></pre><p>而下边这两个查询就不能进行<code>Intersection</code>索引合并:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 &gt; <span class="hljs-string">'a'</span> AND key_part1 = <span class="hljs-string">'a'</span> AND key_part2 = <span class="hljs-string">'b'</span> AND key_part3 = <span class="hljs-string">'c'</span>; SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> AND key_part1 = <span class="hljs-string">'a'</span>; </code></pre><p>第一个查询是因为对<code>key1</code>进行了范围匹配,第二个查询是因为联合索引<code>idx_key_part</code>中的<code>key_part2</code>列并没有出现在搜索条件中,所以这两个查询不能进行<code>Intersection</code>索引合并。</p> </li> <li> <p>情况二:主键列可以是范围匹配</p> <p>比方说下边这个查询可能用到主键和<code>idx_key_part</code>进行<code>Intersection</code>索引合并的操作:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE id &gt; 100 AND key1 = <span class="hljs-string">'a'</span>; </code></pre></li> </ul> <p>为啥呢?凭啥呀?突然冒出这么两个规定让大家一脸懵逼,下边我们慢慢品一品这里头的玄机。这话还得从<code>InnoDB</code>的索引结构说起,你要是记不清麻烦再回头看看。对于<code>InnoDB</code>的二级索引来说,记录先是按照索引列进行排序,如果该二级索引是一个联合索引,那么会按照联合索引中的各个列依次排序。而二级索引的用户记录是由<code>索引列 + 主键</code>构成的,二级索引列的值相同的记录可能会有好多条,这些索引列的值相同的记录又是按照<code>主键</code>的值进行排序的。所以重点来了,之所以在二级索引列都是等值匹配的情况下才可能使用<code>Intersection</code>索引合并,是因为<span style="color:red">只有在这种情况下根据二级索引查询出的结果集是按照主键值排序的</span>。</p> <p>so?还是没看懂根据二级索引查询出的结果集是按照主键值排序的对使用<code>Intersection</code>索引合并有啥好处?小伙子,别忘了<code>Intersection</code>索引合并会把从多个二级索引中查询出的主键值求交集,如果从各个二级索引中查询的到的结果集本身就是已经按照主键排好序的,那么求交集的过程就很easy啦。假设某个查询使用<code>Intersection</code>索引合并的方式从<code>idx_key1</code>和<code>idx_key2</code>这两个二级索引中获取到的主键值分别是:</p> <ul> <li> <p>从<code>idx_key1</code>中获取到已经排好序的主键值:1、3、5</p> </li> <li> <p>从<code>idx_key2</code>中获取到已经排好序的主键值:2、3、4</p> </li> </ul> <p>那么求交集的过程就是这样:逐个取出这两个结果集中最小的主键值,如果两个值相等,则加入最后的交集结果中,否则丢弃当前较小的主键值,再取该丢弃的主键值所在结果集的后一个主键值来比较,直到某个结果集中的主键值用完了,如果还是觉得不太明白那继续往下看:</p> <ul> <li> <p>先取出这两个结果集中较小的主键值做比较,因为<code>1 &lt; 2</code>,所以把<code>idx_key1</code>的结果集的主键值<code>1</code>丢弃,取出后边的<code>3</code>来比较。</p> </li> <li> <p>因为<code>3 &gt; 2</code>,所以把<code>idx_key2</code>的结果集的主键值<code>2</code>丢弃,取出后边的<code>3</code>来比较。</p> </li> <li> <p>因为<code>3 = 3</code>,所以把<code>3</code>加入到最后的交集结果中,继续两个结果集后边的主键值来比较。</p> </li> <li> <p>后边的主键值也不相等,所以最后的交集结果中只包含主键值<code>3</code>。</p> </li> </ul> <p>别看我们写的啰嗦,这个过程其实可快了,时间复杂度是<code>O(n)</code>,但是如果从各个二级索引中查询出的结果集并不是按照主键排序的话,那就要先把结果集中的主键值排序完再来做上边的那个过程,就比较耗时了。</p> <blockquote class="warning"><p>小贴士: 按照有序的主键值去回表取记录有个专有名词儿,叫:Rowid Ordered Retrieval,简称ROR,以后大家在某些地方见到这个名词儿就眼熟了。 </p></blockquote><p>另外,不仅是多个二级索引之间可以采用<code>Intersection</code>索引合并,索引合并也可以有聚簇索引参加,也就是我们上边写的<code>情况二</code>:在搜索条件中有主键的范围匹配的情况下也可以使用<code>Intersection</code>索引合并索引合并。为啥主键这就可以范围匹配了?还是得回到应用场景里,比如看下边这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> AND id &gt; 100; </code></pre><p>假设这个查询可以采用<code>Intersection</code>索引合并,我们理所当然的以为这个查询会分别按照<code>id &gt; 100</code>这个条件从聚簇索引中获取一些记录,在通过<code>key1 = 'a'</code>这个条件从<code>idx_key1</code>二级索引中获取一些记录,然后再求交集,其实这样就把问题复杂化了,没必要从聚簇索引中获取一次记录。别忘了二级索引的记录中都带有主键值的,所以可以在从<code>idx_key1</code>中获取到的主键值上直接运用条件<code>id &gt; 100</code>过滤就行了,这样多简单。所以涉及主键的搜索条件只不过是为了从别的二级索引得到的结果集中过滤记录罢了,是不是等值匹配不重要。</p> <p>当然,上边说的<code>情况一</code>和<code>情况二</code>只是发生<code>Intersection</code>索引合并的必要条件,不是充分条件。也就是说即使情况一、情况二成立,也不一定发生<code>Intersection</code>索引合并,这得看优化器的心情。优化器在下边两个条件满足的情况下才趋向于使用<code>Intersection</code>索引合并:</p> <ul> <li> <p>单独根据搜索条件从某个二级索引中获取的记录数太多,导致回表开销太大</p> </li> <li> <p>通过<code>Intersection</code>索引合并后需要回表的记录数大大减少</p> </li> </ul> <h4 class="heading">Union合并</h4> <p>我们在写查询语句时经常想把既符合某个搜索条件的记录取出来,也把符合另外的某个搜索条件的记录取出来,我们说这些不同的搜索条件之间是<code>OR</code>关系。有时候<code>OR</code>关系的不同搜索条件会使用到同一个索引,比方说这样:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> OR key3 = <span class="hljs-string">'b'</span> </code></pre><p><code>Intersection</code>是交集的意思,这适用于使用不同索引的搜索条件之间使用<code>AND</code>连接起来的情况;<code>Union</code>是并集的意思,适用于使用不同索引的搜索条件之间使用<code>OR</code>连接起来的情况。与<code>Intersection</code>索引合并类似,<code>MySQL</code>在某些特定的情况下才可能会使用到<code>Union</code>索引合并:</p> <ul> <li> <p>情况一:二级索引列是等值匹配的情况,对于联合索引来说,在联合索引中的每个列都必须等值匹配,不能出现只出现匹配部分列的情况。</p> <p>比方说下边这个查询可能用到<code>idx_key1</code>和<code>idx_key_part</code>这两个二级索引进行<code>Union</code>索引合并的操作:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> OR ( key_part1 = <span class="hljs-string">'a'</span> AND key_part2 = <span class="hljs-string">'b'</span> AND key_part3 = <span class="hljs-string">'c'</span>); </code></pre><p>而下边这两个查询就不能进行<code>Union</code>索引合并:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 &gt; <span class="hljs-string">'a'</span> OR (key_part1 = <span class="hljs-string">'a'</span> AND key_part2 = <span class="hljs-string">'b'</span> AND key_part3 = <span class="hljs-string">'c'</span>); SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> OR key_part1 = <span class="hljs-string">'a'</span>; </code></pre><p>第一个查询是因为对<code>key1</code>进行了范围匹配,第二个查询是因为联合索引<code>idx_key_part</code>中的<code>key_part2</code>列并没有出现在搜索条件中,所以这两个查询不能进行<code>Union</code>索引合并。</p> </li> <li> <p>情况二:主键列可以是范围匹配</p> </li> <li> <p>情况三:使用<code>Intersection</code>索引合并的搜索条件</p> <p>这种情况其实也挺好理解,就是搜索条件的某些部分使用<code>Intersection</code>索引合并的方式得到的主键集合和其他方式得到的主键集合取交集,比方说这个查询:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key_part1 = <span class="hljs-string">'a'</span> AND key_part2 = <span class="hljs-string">'b'</span> AND key_part3 = <span class="hljs-string">'c'</span> OR (key1 = <span class="hljs-string">'a'</span> AND key3 = <span class="hljs-string">'b'</span>); </code></pre><p>优化器可能采用这样的方式来执行这个查询:</p> <ul> <li> <p>先按照搜索条件<code>key1 = 'a' AND key3 = 'b'</code>从索引<code>idx_key1</code>和<code>idx_key3</code>中使用<code>Intersection</code>索引合并的方式得到一个主键集合。</p> </li> <li> <p>再按照搜索条件<code>key_part1 = 'a' AND key_part2 = 'b' AND key_part3 = 'c'</code>从联合索引<code>idx_key_part</code>中得到另一个主键集合。</p> </li> <li> <p>采用<code>Union</code>索引合并的方式把上述两个主键集合取并集,然后进行回表操作,将结果返回给用户。</p> </li> </ul> </li> </ul> <p>当然,查询条件符合了这些情况也不一定就会采用<code>Union</code>索引合并,也得看优化器的心情。优化器在下边两个条件满足的情况下才趋向于使用<code>Union</code>索引合并:</p> <ul> <li> <p>单独根据搜索条件从某个二级索引中获取的记录数比较少</p> </li> <li> <p>通过<code>Intersection</code>索引合并后需要回表的记录数大大减少</p> </li> </ul> <h4 class="heading">Sort-Union合并</h4> <p><code>Union</code>索引合并的使用条件太苛刻,必须保证各个二级索引列在进行等值匹配的条件下才可能被用到,比方说下边这个查询就无法使用到<code>Union</code>索引合并:</p> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 &lt; <span class="hljs-string">'a'</span> OR key3 &gt; <span class="hljs-string">'z'</span> </code></pre><p>这是因为根据<code>key1 &lt; 'a'</code>从<code>idx_key1</code>索引中获取的二级索引记录的主键值不是排好序的,根据<code>key3 &gt; 'z'</code>从<code>idx_key3</code>索引中获取的二级索引记录的主键值也不是排好序的,但是<code>key1 &lt; 'a'</code>和<code>key3 &gt; 'z'</code>这两个条件又特别让我们动心,所以我们可以这样:</p> <ul> <li> <p>先根据<code>key1 &lt; 'a'</code>条件从<code>idx_key1</code>二级索引总获取记录,并按照记录的主键值进行排序</p> </li> <li> <p>再根据<code>key3 &gt; 'z'</code>条件从<code>idx_key3</code>二级索引总获取记录,并按照记录的主键值进行排序</p> </li> <li> <p>因为上述的两个二级索引主键值都是排好序的,剩下的操作和<code>Union</code>索引合并方式就一样了。</p> </li> </ul> <p>我们把上述这种先按照二级索引记录的主键值进行排序,之后按照<code>Union</code>索引合并方式执行的方式称之为<code>Sort-Union</code>索引合并,很显然,这种<code>Sort-Union</code>索引合并比单纯的<code>Union</code>索引合并多了一步对二级索引记录的主键值排序的过程。</p> <blockquote class="warning"><p>小贴士: 为啥有Sort-Union索引合并,就没有Sort-Intersection索引合并么?是的,的确没有Sort-Intersection索引合并这么一说, Sort-Union的适用场景是单独根据搜索条件从某个二级索引中获取的记录数比较少,这样即使对这些二级索引记录按照主键值进行排序的成本也不会太高 而Intersection索引合并的适用场景是单独根据搜索条件从某个二级索引中获取的记录数太多,导致回表开销太大,合并后可以明显降低回表开销,但是如果加入Sort-Intersection后,就需要为大量的二级索引记录按照主键值进行排序,这个成本可能比回表查询都高了,所以也就没有引入Sort-Intersection这个玩意儿。 </p></blockquote><h4 class="heading">索引合并注意事项</h4> <h4 class="heading">联合索引替代Intersection索引合并</h4> <pre><code class="hljs bash" lang="bash">SELECT * FROM single_table WHERE key1 = <span class="hljs-string">'a'</span> AND key3 = <span class="hljs-string">'b'</span>; </code></pre><p>这个查询之所以可能使用<code>Intersection</code>索引合并的方式执行,还不是因为<code>idx_key1</code>和<code>idx_key2</code>是两个单独的<code>B+</code>树索引,你要是把这两个列搞一个联合索引,那直接使用这个联合索引就把事情搞定了,何必用啥索引合并呢,就像这样:</p> <pre><code class="hljs bash" lang="bash">ALTER TABLE single_table drop index idx_key1, idx_key3, add index idx_key1_key3(key1, key3); </code></pre><p>这样我们把没用的<code>idx_key1</code>、<code>idx_key3</code>都干掉,再添加一个联合索引<code>idx_key1_key3</code>,使用这个联合索引进行查询简直是又快又好,既不用多读一棵<code>B+</code>树,也不用合并结果,何乐而不为?</p> <blockquote class="warning"><p>小贴士: 不过小心有单独对key3列进行查询的业务场景,这样子不得不再把key3列的单独索引给加上。 </p></blockquote>
{ "pile_set_name": "Github" }
package com.tencent.mm.protocal; import com.tencent.mm.protocal.b.ami; import com.tencent.mm.protocal.b.vr; import com.tencent.mm.sdk.platformtools.be; public final class s$a extends k.c implements k.a { public vr jsA = new vr(); public final byte[] tZ() { jsg = ac.aYu(); jsA.jzd = new ami().aV(be.baN()); jsA.kfq = k.a(this); return jsA.toByteArray(); } public final int ua() { return 572; } } /* Location: * Qualified Name: com.tencent.mm.protocal.s.a * Java Class Version: 6 (50.0) * JD-Core Version: 0.7.1 */
{ "pile_set_name": "Github" }
import Rand from "../../util/Rand.js"; export default class BenchmarkGenerator { constructor() { this.lastLat = Rand.rollFromZero(500); this.lastThr = Rand.rollFromZero(1000); } hasNext() { return true; } next() { let latency, throughput; const time = Date.now(); if (Rand.rolledByOdds(0.1)) { latency = Rand.rollFromZero(500); throughput = Rand.rollFromZero(100000); } else { const latDiff = Rand.rollFromZero(200) - 100; const thrDiff = Rand.rollFromZero(1000) - 500; latency = this.lastLat + latDiff; if (latency < 1) latency = 1; if (latency > 500) latency = 500; throughput = this.lastThr + thrDiff; if (throughput < 1000) throughput = 1000; if (throughput > 1000000) throughput = 1000000; } this.lastLat = latency; this.lastThr = throughput; return { time: time, latency: latency, throughput: throughput } } }
{ "pile_set_name": "Github" }
## DESCRIPTION ## ENTER DESCRIPTION HERE ## ENDDESCRIPTION ## DBsubject(Electricity) ## DBchapter(Heat and Heat Transfer) ## DBsection(Hearing) ## Date(December 2017) ## Institution(Brock University) ## Author(Caroline Promnitz) ## Edited (Sara Hesse, May 23 2018) ## TitleText1('College Physics') ## AuthorText1('Urone et. al') ## EditionText1('2017') ## Section1('17.6') ## Problem1('63') ## KEYWORDS('intensity','decibel') DOCUMENT(); loadMacros( "PGbasicmacros.pl", "MathObjects.pl", "PGauxiliaryFunctions.pl", "PGchoicemacros.pl", "BrockPhysicsMacros.pl", "PGanswermacros.pl", "PG_CAPAmacros.pl", "answerHints.pl" ); TEXT(beginproblem()); $showPartialCorrectAnswers = 1; $showHint = 3; $dB20 = 23; $dB70 = 70; BEGIN_TEXT <strong>If you don't solve this problem correctly in $showHint tries, you can get a hint.</strong> $PAR What is the approximate sound intensity level in decibels of a \(600 \, \(\textrm{Hz}\) tone if it has a loudness of \(20\) phons? $PAR \{ans_rule(40)\} \(\textrm{dB}\) $PAR END_TEXT ANS(num_cmp("$dB20")); BEGIN_TEXT If it has a loudness of \(70\) phons? $PAR \{ans_rule(40)\} \(\textrm{dB}\) $PAR END_TEXT ANS(num_cmp("$dB70")); BEGIN_HINT Refer to graph which relates phons to sound intensity (in decibels) in College Physics. END_HINT Context()->normalStrings; ENDDOCUMENT()
{ "pile_set_name": "Github" }
--- namespace: Titanium.UI.UserWindow.getDOMWindow type: method description: | Return the WebKit DOMWindow of the page loaded in this window if one exists, otherwise return null. A DOMWindow object will not be available until a UI.UserWindow's PAGE_INITIALIZED event has fired. since: 0.5.0 platforms: [osx, linux, win32] returns: DOMWindow|null parameters: []
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!-- *** GENERATED FROM project.xml - DO NOT EDIT *** *** EDIT ../build.xml INSTEAD *** --> <project name="com.jme3.gde.nmgen-impl" basedir=".."> <fail message="Please build using Ant 1.7.1 or higher."> <condition> <not> <antversion atleast="1.7.1"/> </not> </condition> </fail> <property file="nbproject/private/suite-private.properties"/> <property file="nbproject/suite.properties"/> <fail unless="suite.dir">You must set 'suite.dir' to point to your containing module suite</fail> <property file="${suite.dir}/nbproject/private/platform-private.properties"/> <property file="${suite.dir}/nbproject/platform.properties"/> <macrodef name="property" uri="http://www.netbeans.org/ns/nb-module-project/2"> <attribute name="name"/> <attribute name="value"/> <sequential> <property name="@{name}" value="${@{value}}"/> </sequential> </macrodef> <macrodef name="evalprops" uri="http://www.netbeans.org/ns/nb-module-project/2"> <attribute name="property"/> <attribute name="value"/> <sequential> <property name="@{property}" value="@{value}"/> </sequential> </macrodef> <property file="${user.properties.file}"/> <nbmproject2:property name="harness.dir" value="nbplatform.${nbplatform.active}.harness.dir" xmlns:nbmproject2="http://www.netbeans.org/ns/nb-module-project/2"/> <nbmproject2:property name="nbplatform.active.dir" value="nbplatform.${nbplatform.active}.netbeans.dest.dir" xmlns:nbmproject2="http://www.netbeans.org/ns/nb-module-project/2"/> <nbmproject2:evalprops property="cluster.path.evaluated" value="${cluster.path}" xmlns:nbmproject2="http://www.netbeans.org/ns/nb-module-project/2"/> <fail message="Path to 'platform' cluster missing in $${cluster.path} property or using corrupt Netbeans Platform (missing harness)."> <condition> <not> <contains string="${cluster.path.evaluated}" substring="platform"/> </not> </condition> </fail> <import file="${harness.dir}/build.xml"/> </project>
{ "pile_set_name": "Github" }
package com.onlyxiahui.im.message.data.chat; import java.util.List; /** * 描述:聊天内容 * * @author XiaHui * @date 2015-04-16 08:09:32 * @version 0.0.1 */ public class Content { private Font font=new Font(); private List<Section> sections; private long timestamp; public Font getFont() { return font; } public void setFont(Font font) { this.font = font; } public List<Section> getSections() { return sections; } public void setSections(List<Section> sections) { this.sections = sections; } public long getTimestamp() { return timestamp; } public void setTimestamp(long timestamp) { this.timestamp = timestamp; } }
{ "pile_set_name": "Github" }
msgid "" msgstr "" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2014-03-19 17:59+0700\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" msgid "Action" msgstr "" msgid "All queues have been removed" msgstr "" msgid "Are you sure you want to delete ALL queues" msgstr "" msgid "Are you sure you want to delete queue" msgstr "" msgid "Count" msgstr "" msgid "Delete ALL queues" msgstr "" msgid "Message" msgstr "" msgid "Queue Code" msgstr "" msgid "Queue has been removed" msgstr "" msgid "Scheduled" msgstr "" msgid "User" msgstr "" msgid "View SMS queue" msgstr ""
{ "pile_set_name": "Github" }
<?php namespace Drupal\Tests\bar\Functional; use Drupal\dcg_test\TestTrait; use Drupal\Tests\BrowserTestBase; /** * Controller test. * * @group DCG */ final class ControllerTest extends BrowserTestBase { use TestTrait; /** * {@inheritdoc} */ public static $modules = ['bar', 'node']; /** * {@inheritdoc} */ protected $defaultTheme = 'stark'; /** * Test callback. */ public function testController(): void { $user = $this->drupalCreateUser(['access content']); $this->drupalLogin($user); $this->drupalGet('bar/example'); $this->assertPageTitle('Example'); $this->assertSession()->responseMatches('#It works!#s'); } }
{ "pile_set_name": "Github" }
(; Context map ; Tuesday Feb 12, 2013 at 4:30am is the "now" for the tests {:reference-time (time/t -2 2013 2 12 4 30 0) :min (time/t -2 1900) :max (time/t -2 2100)} "sad" "sada" "upravo sad" "ovaj tren" (datetime 2013 2 12 4 30 00) "danas" (datetime 2013 2 12) "jucer" "jučer" (datetime 2013 2 11) "sutra" (datetime 2013 2 13) "ponedjeljak" "pon." "ovaj ponedjeljak" (datetime 2013 2 18 :day-of-week 1) "ponedjeljak, 18. veljace" "ponedjeljak, 18. veljače" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) "utorak" "utorak 19." (datetime 2013 2 19) "cetvrtak" "četvrtak" "čet" "cet." (datetime 2013 2 14) "petak" "pet" "pet." (datetime 2013 2 15) "subota" "sub" "sub." (datetime 2013 2 16) "nedjelja" "ned" "ned." (datetime 2013 2 17) "1. ozujak" "1. ožujak" "prvi ozujka" (datetime 2013 3 1 :day 1 :month 3) "treci ozujka" "treci ožujka" (datetime 2013 3 3 :day 3 :month 3) ; "martovske ide" ; (datetime 2013 3 15 :month 3) "3. ozujka 2015" "treci ozujka 2015" "3/3/2015" "3/3/15" "2015-3-3" "2015-03-03" (datetime 2015 3 3 :day 3 :month 3 :year 2015) "15ti drugi" (datetime 2013 2 15 :day 15) "15. veljace" "15. veljače" "15/02" (datetime 2013 2 15 :day 15 :month 2) "8. kolovoza" "8. kolovoz" (datetime 2013 8 8 :day 8 :month 8) "listopad 2014" (datetime 2014 10 :year 2014 :month 10) "31/10/1974" "31/10/74" "74-10-31" (datetime 1974 10 31 :day 31 :month 10 :year 1974) "14travanj 2015" "14. travnja, 2015" "14. travanj 15" (datetime 2015 4 14 :day 14 :month 4 :years 2015) "sljedeci utorak" "sljedeceg utorka" (datetime 2013 2 19 :day-of-week 2) "petak nakon sljedeceg" (datetime 2013 2 22 :day-of-week 2) "sljedeci ozujak" (datetime 2013 3) "ozujak nakon sljedeceg" (datetime 2014 3) "nedjelja, 10. veljace" "nedjelja, 10. veljače" (datetime 2013 2 10 :day-of-week 7 :day 10 :month 2) "Sri, 13. velj" (datetime 2013 2 13 :day-of-week 3 :day 13 :month 2) "ponedjeljak, veljaca 18." "Pon, 18. veljace" (datetime 2013 2 18 :day-of-week 1 :day 18 :month 2) ; ;; Cycles "ovaj tjedan" (datetime 2013 2 11 :grain :week) "prosli tjedan" "prošli tjedan" "prethodni tjedan" (datetime 2013 2 4 :grain :week) "sljedeci tjedan" (datetime 2013 2 18 :grain :week) "prethodni mjesec" (datetime 2013 1) "sljedeci mjesec" (datetime 2013 3) "ovaj kvartal" "ovo tromjesecje" (datetime 2013 1 1 :grain :quarter) "sljedeci kvartal" (datetime 2013 4 1 :grain :quarter) "treci kvartal" "3. kvartal" "trece tromjesecje" "3. tromjesečje" (datetime 2013 7 1 :grain :quarter) "4. kvartal 2018" "četvrto tromjesečje 2018" (datetime 2018 10 1 :grain :quarter) "prošla godina" "prethodna godina" (datetime 2012) "ova godina" (datetime 2013) "sljedece godina" (datetime 2014) "prosle nedjelje" "prosli tjedan u nedjelju" (datetime 2013 2 10 :day-of-week 7) "prosli utorak" (datetime 2013 2 5 :day-of-week 2) "sljedeci utorak" ; when today is Tuesday, "mardi prochain" is a week from now (datetime 2013 2 19 :day-of-week 2) "sljedecu srijedu" ; when today is Tuesday, "mercredi prochain" is tomorrow (datetime 2013 2 13 :day-of-week 3) "sljedeci tjedan u srijedu" "srijeda sljedeci tjedan" (datetime 2013 2 20 :day-of-week 3) "sljedeci petak" (datetime 2013 2 15 :day-of-week 5) "ovaj tjedan u ponedjeljak" (datetime 2013 2 11 :day-of-week 1) "ovaj utorak" (datetime 2013 2 19 :day-of-week 2) "ova srijeda" "ovaj tjedan u srijedu" (datetime 2013 2 13 :day-of-week 3) "prekosutra" (datetime 2013 2 14) "prekosutra u 5 popodne" "prekosutra u 17" (datetime 2013 2 14 17) "prekjucer" "prekjučer" (datetime 2013 2 10) "prekjučer u 8" "prekjučer u 8 sati" (datetime 2013 2 10 8) "zadnji ponedjeljak u ozujku" (datetime 2013 3 25 :day-of-week 1) "zadnja nedjelja u ozujku 2014" (datetime 2014 3 30 :day-of-week 7) "treci dan u listopadu" (datetime 2013 10 3) "prvi tjedan u listopadu 2014" (datetime 2014 10 6 :grain :week) "zadnji dan u listopadu 2015" (datetime 2015 10 31) "zadnji tjedan u rujnu 2014" (datetime 2014 9 22 :grain :week) ;; nth of "prvi utorak u listopadu" (datetime 2013 10 1) "treci utorak u rujnu 2014" (datetime 2014 9 16) "prva srijeda u listopadu 2014" (datetime 2014 10 1) "druga srijeda u listopadu 2014" (datetime 2014 10 8) ;; nth after "treci utorak poslije Bozica 2014" (datetime 2015 1 13) ;; Hours "3 u noci" "u 3 ujutro" "u tri sata u noci" (datetime 2013 2 13 3) "3:18 rano" (datetime 2013 2 12 3 18) "u 3 poslijepodne" "@ 15" "15" "15 sati poslijepodne" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) "oko 3 poslijepodne" ;; FIXME pm overrides precision "otprilike u 3 poslijepodne" "cca 3 poslijepodne" "cca 15" (datetime 2013 2 12 15 :hour 3 :meridiem :pm) ;; :precision "approximate" "15 i 15" "3:15 poslijepodne" "15:15" (datetime 2013 2 12 15 15 :hour 3 :minute 15 :meridiem :pm) "cetvrt nakon 3 poslijepodne" (datetime 2013 2 12 15 15 :hour 3 :minute 15 :meridiem :pm :grain :second) "3 i 20 popodne" "3:20 poslijepodne" "3:20 popodne" "dvadeset nakon 3 popodne" "15:20" (datetime 2013 2 12 15 20 :hour 3 :minute 20 :meridiem :pm) "tri i po popodne" "pola 4 popodne" "15:30" "pola cetiri popodne" (datetime 2013 2 12 15 30 :hour 3 :minute 30) "15:23:24" (datetime 2013 2 12 15 23 24 :hour 15 :minute 23 :second 24) "petnaest do podne" "11:45" "četvrt do podneva" ; Ambiguous with interval (datetime 2013 2 12 11 45 :hour 11 :minute 45) "8 navecer" "osam sati navecer" "danas 8 navecer" (datetime 2013 2 12 20) ;; Mixing date and time "u 7:30 popodne u pet, 20. rujna" (datetime 2013 9 20 19 30 :hour 7 :minute 30 :meridiem :pm) "9 ujutro u subotu" "u subotu u 9 sati ujutro" (datetime 2013 2 16 9 :day-of-week 6 :hour 9 :meridiem :am) "pet, srp 18., 2014, 19:00" "pet, srp 18., 2014 u 19:00" (datetime 2014 7 18 19 0 :day-of-week 5 :hour 7 :meridiem :pm) ; TODO reported as not found even tough it passes ; "pet, srp 18., 2014, 19 sati 10 minuta" ; (datetime 2014 7 18 19 10 :day-of-week 5 :hour 7 :meridiem :pm) ; ;; Involving periods "za jednu sekundu" (datetime 2013 2 12 4 30 1) "za jednu minutu" (datetime 2013 2 12 4 31 0) "za 2 minute" "za jos 2 minute" "2 minute od sad" (datetime 2013 2 12 4 32 0) "za 60 minuta" (datetime 2013 2 12 5 30 0) "oko cetvrt sata" "oko 1/4h" "oko 1/4 h" "oko 1/4 sata" (datetime 2013 2 12 4 45 0) "za pola sata" "za pol sata" "za 1/2h" "za 1/2 h" "za 1/2 sata" (datetime 2013 2 12 5 0 0) "za tri-cetvrt sata" "za 3/4h" "za 3/4 h" "za 3/4 sata" (datetime 2013 2 12 5 15 0) ; TODO reported as not found, ; "za dva i pol sata" "za 2.5 sata" (datetime 2013 2 12 7 0 0) "za jedan sat" "za 1h" (datetime 2013 2 12 5 30) "za par sati" (datetime 2013 2 12 6 30) "za nekoliko sati" (datetime 2013 2 12 7 30) "za 24 sata" "za 24h" (datetime 2013 2 13 4 30) "za 1 dan" "za jedan dan" (datetime 2013 2 13 4) "3 godine od danasnjeg dana" (datetime 2016 2) "za 7 dana" (datetime 2013 2 19 4) "za 1 tjedan" (datetime 2013 2 19) "za oko pola sata" ;; FIXME precision is lost (datetime 2013 2 12 5 0 0) ;; :precision "approximate" "prije 7 dana" (datetime 2013 2 5 4) "prije 14 dana" (datetime 2013 1 29 4) "prije jedan tjedan" "prije jednog tjedna" (datetime 2013 2 5) "prije tri tjedna" (datetime 2013 1 22) "prije tri mjeseca" (datetime 2012 11 12) "prije dvije godine" (datetime 2011 2) "1954" (datetime 1954) "za 7 dana" (datetime 2013 2 19 4) "za 14 dana" (datetime 2013 2 26 4) "za jedan tjedan" (datetime 2013 2 19) "za tri tjedna" (datetime 2013 3 5) "za tri mjeseca" (datetime 2013 5 12) "za dvije godine" (datetime 2015 2) "jednu godinu poslije Bozica" (datetime 2013 12) ; resolves as after last Xmas... ; Seasons "ovog ljeta" "ovo ljeto" "ljetos" (datetime-interval [2013 6 21] [2013 9 24]) "ove zime" "zimus" (datetime-interval [2012 12 21] [2013 3 21]) ; US holidays (http://www.timeanddate.com/holidays/us/) "Bozic" "zicbo" (datetime 2013 12 25) "stara godina" (datetime 2013 12 31) "nova godina" (datetime 2014 1 1) "valentinovo" (datetime 2013 2 14) "majcin dan" (datetime 2013 5 12) "dan oceva" (datetime 2013 6 16) "noc vjestica" (datetime 2013 10 31) ; Part of day (morning, afternoon...) "veceras" "ove veceri" "danas navecer" (datetime-interval [2013 2 12 18] [2013 2 13 00]) "prosli vikend" (datetime-interval [2013 2 8 18] [2013 2 11 00]) "sutra navecer" ;"Wednesday evening" (datetime-interval [2013 2 13 18] [2013 2 14 00]) "sutra rucak" (datetime-interval [2013 2 13 12] [2013 2 13 14]) "jucer navecer" "prethodne veceri" (datetime-interval [2013 2 11 18] [2013 2 12 00]) "ovaj vikend" "ovog vikenda" (datetime-interval [2013 2 15 18] [2013 2 18 00]) "ponedjeljak ujutro" (datetime-interval [2013 2 18 4] [2013 2 18 12]) "ponedjeljak rano ujutro" "ponedjeljak rano" "ponedjeljak u rane jutarnje sate" (datetime-interval [2013 2 18 3] [2013 2 18 9]) "15. veljace ujutro" (datetime-interval [2013 2 15 4] [2013 2 15 12]) ; Intervals involving cycles "prosle 2 sekunde" "prethodne dvije sekunde" (datetime-interval [2013 2 12 4 29 58] [2013 2 12 4 30 00]) "sljedece 3 sekunde" "sljedece tri sekunde" (datetime-interval [2013 2 12 4 30 01] [2013 2 12 4 30 04]) "prosle 2 minute" "prethodne dvije minute" (datetime-interval [2013 2 12 4 28] [2013 2 12 4 30]) "sljedece 3 minute" "sljedece tri minute" (datetime-interval [2013 2 12 4 31] [2013 2 12 4 34]) "prethodni jedan sat" (datetime-interval [2013 2 12 3] [2013 2 12 4]) "prethodna 24 sata" "prethodna dvadeset i cetiri sata" "prethodna dvadeset i cetiri sata" "prethodna 24h" (datetime-interval [2013 2 11 4] [2013 2 12 4]) "sljedeca 3 sata" "sljedeca tri sata" (datetime-interval [2013 2 12 5] [2013 2 12 8]) "prethodna dva dana" "prethodna 2 dana" "prosla 2 dana" (datetime-interval [2013 2 10] [2013 2 12]) "sljedeca 3 dana" "sljedeca tri dana" (datetime-interval [2013 2 13] [2013 2 16]) "sljedecih nekoliko dana" (datetime-interval [2013 2 13] [2013 2 16]) "prethodna 2 tjedna" "prethodna dva tjedna" "prosla 2 tjedna" (datetime-interval [2013 1 28 :grain :week] [2013 2 11 :grain :week]) "sljedeca 3 tjedna" "sljedeca tri tjedna" (datetime-interval [2013 2 18 :grain :week] [2013 3 11 :grain :week]) "prethodna 2 mjeseca" "prethodna dva mjeseca" (datetime-interval [2012 12] [2013 02]) "sljedeca 3 mjeseca" "sljedeca tri mjeseca" (datetime-interval [2013 3] [2013 6]) "prethodne 2 godine" "prethodne dvije godine" (datetime-interval [2011] [2013]) "sljedece 3 godine" "sljedece tri godine" (datetime-interval [2014] [2017]) ; Explicit intervals "srpanj 13-15" "srpanj 13 do 15" "srpanj 13 - srpanj 15" (datetime-interval [2013 7 13] [2013 7 16]) "kol 8 - kol 12" (datetime-interval [2013 8 8] [2013 8 13]) "9:30 - 11:00" (datetime-interval [2013 2 12 9 30] [2013 2 12 11 1]) "od 9:30 - 11:00 u cetvrtak" "između 9:30 i 11:00 u cetvrtak" "9:30 - 11:00 u cetvrtak" "izmedju 9:30 i 11:00 u cetvrtak" "cetvrtak od 9:30 do 11:00" "od 9:30 do 11:00 u cetvrtak" "cetvrtak od 9:30 do 11:00" (datetime-interval [2013 2 14 9 30] [2013 2 14 11 1]) "cetvrtak od 9 do 11 ujutro" (datetime-interval [2013 2 14 9] [2013 2 14 12]) "11:30-1:30" ; go train this rule! "11:30-1:30" "11:30-1:30" "11:30-1:30" "11:30-1:30" "11:30-1:30" "11:30-1:30" (datetime-interval [2013 2 12 11 30] [2013 2 12 13 31]) "1:30 poslijepodne u sub, ruj 21." (datetime 2013 9 21 13 30) "sljedeca 2 tjedna" (datetime-interval [2013 2 18 :grain :week] [2013 3 4 :grain :week]) "do 2 poslijepodne" (datetime 2013 2 12 14 :direction :before) "do kraja ovog dana" (datetime-interval [2013 2 12 4 30 0] [2013 2 13 0]) "do kraja dana" (datetime 2013 2 13 0) "do kraja ovog mjeseca" (datetime-interval [2013 2 12 4 30 0] [2013 3 1 0]) "do kraja sljedeceg mjeseca" (datetime-interval [2013 2 12 4 30 0] [2013 4 1 0]) ; Timezones "4 poslijepodne CET" (datetime 2013 2 12 16 :hour 4 :meridiem :pm :timezone "CET") "cetvrtak 8:00 GMT" (datetime 2013 2 14 8 00 :timezone "GMT") ;; Bookface tests "danas u 14" "u 2 poslijepodne" (datetime 2013 2 12 14) "25/4 U 16 sati" (datetime 2013 4 25 16) "15 sati sutra" (datetime 2013 2 13 15) ; winner is picked but has a different hash, strange ; Expected {:start #object[org.joda.time.DateTime 0x701d4ca1 "2013-02-12T14:00:00.000-02:00"], :grain :hour} ; Got {:start #object[org.joda.time.DateTime 0x78ce5efb "2013-02-12T14:00:00.000-02:00"], :grain :hour} ; "nakon 14 sati" ; "iza 14 sati" ; (datetime 2013 2 12 14 :direction :after) "nakon 5 dana" (datetime 2013 2 17 4 :direction :after) "prije 11" (datetime 2013 2 12 11 :direction :before) "poslijepodne" "popodne" (datetime-interval [2013 2 12 12] [2013 2 12 20]) "u 13:30" "13:30" (datetime 2013 2 12 13 30) "za 15 minuta" (datetime 2013 2 12 4 45 0) "poslije rucka" (datetime-interval [2013 2 12 13] [2013 2 12 17]) "10:30" (datetime 2013 2 12 10 30) "jutro" ;; how should we deal with fb mornings? (datetime-interval [2013 2 12 4] [2013 2 12 12]) "sljedeci ponedjeljak" (datetime 2013 2 18 :day-of-week 1) "u 12" "u podne" (datetime 2013 2 12 12) "u 12 u noci" "u ponoc" (datetime 2013 2 13 0) "ozujak" "u ozujku" (datetime 2013 3))
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 import ( "fmt" "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/labels" ) // ReplicationControllerListerExpansion allows custom methods to be added to // ReplicationControllerLister. type ReplicationControllerListerExpansion interface { GetPodControllers(pod *v1.Pod) ([]*v1.ReplicationController, error) } // ReplicationControllerNamespaceListerExpansion allows custom methods to be added to // ReplicationControllerNamespaceLister. type ReplicationControllerNamespaceListerExpansion interface{} // GetPodControllers returns a list of ReplicationControllers that potentially match a pod. // Only the one specified in the Pod's ControllerRef will actually manage it. // Returns an error only if no matching ReplicationControllers are found. func (s *replicationControllerLister) GetPodControllers(pod *v1.Pod) ([]*v1.ReplicationController, error) { if len(pod.Labels) == 0 { return nil, fmt.Errorf("no controllers found for pod %v because it has no labels", pod.Name) } items, err := s.ReplicationControllers(pod.Namespace).List(labels.Everything()) if err != nil { return nil, err } var controllers []*v1.ReplicationController for i := range items { rc := items[i] selector := labels.Set(rc.Spec.Selector).AsSelectorPreValidated() // If an rc with a nil or empty selector creeps in, it should match nothing, not everything. if selector.Empty() || !selector.Matches(labels.Set(pod.Labels)) { continue } controllers = append(controllers, rc) } if len(controllers) == 0 { return nil, fmt.Errorf("could not find controller for pod %s in namespace %s with labels: %v", pod.Name, pod.Namespace, pod.Labels) } return controllers, nil }
{ "pile_set_name": "Github" }
From 3fa237b3afabc293e563292b8d89265a871626ad Mon Sep 17 00:00:00 2001 From: Martin Kelly <[email protected]> Date: Mon, 22 May 2017 17:00:05 -0700 Subject: [PATCH] add #include <sys/sysmacros.h> In newer glibc versions, the definition for major() has been moved to sys/sysmacros.h, and using the older version in <sys/types.h> has been deprecated. So, add an include for <sys/sysmacros.h>. Upstream-Status: Pending Signed-off-by: Martin Kelly <[email protected]> --- open-vm-tools/lib/wiper/wiperPosix.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/open-vm-tools/lib/wiper/wiperPosix.c b/open-vm-tools/lib/wiper/wiperPosix.c index bd542410..ccf06293 100644 --- a/open-vm-tools/lib/wiper/wiperPosix.c +++ b/open-vm-tools/lib/wiper/wiperPosix.c @@ -43,6 +43,9 @@ # include <libgen.h> # endif /* __FreeBSD_version >= 500000 */ #endif +#if defined(__linux__) +#include <sys/sysmacros.h> +#endif #include <unistd.h> #include "vmware.h"
{ "pile_set_name": "Github" }
/** * P6Spy * * Copyright (C) 2002 - 2020 P6Spy * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.p6spy.engine.spy.appender; import com.p6spy.engine.common.P6Util; /** * @author Quinton McCombs * @since 09/2013 */ public class SingleLineFormat implements MessageFormattingStrategy { /** * Formats a log message for the logging module * * @param connectionId the id of the connection * @param now the current ime expressing in milliseconds * @param elapsed the time in milliseconds that the operation took to complete * @param category the category of the operation * @param prepared the SQL statement with all bind variables replaced with actual values * @param sql the sql statement executed * @param url the database url where the sql statement executed * @return the formatted log message */ @Override public String formatMessage(final int connectionId, final String now, final long elapsed, final String category, final String prepared, final String sql, final String url) { return now + "|" + elapsed + "|" + category + "|connection " + connectionId + "|url " + url + "|" + P6Util.singleLine(prepared) + "|" + P6Util.singleLine(sql); } }
{ "pile_set_name": "Github" }
.. title:: clang-tidy - cppcoreguidelines-pro-bounds-array-to-pointer-decay cppcoreguidelines-pro-bounds-array-to-pointer-decay =================================================== This check flags all array to pointer decays. Pointers should not be used as arrays. ``span<T>`` is a bounds-checked, safe alternative to using pointers to access arrays. This rule is part of the "Bounds safety" profile of the C++ Core Guidelines, see https://github.com/isocpp/CppCoreGuidelines/blob/master/CppCoreGuidelines.md#Pro-bounds-decay.
{ "pile_set_name": "Github" }
SOURCES = main.cpp CONFIG += CONSOLE
{ "pile_set_name": "Github" }
--- -api-id: P:Windows.UI.ViewManagement.UISettings.AdvancedEffectsEnabled -api-type: winrt property --- <!-- Property syntax. public bool AdvancedEffectsEnabled { get; } --> # Windows.UI.ViewManagement.UISettings.AdvancedEffectsEnabled ## -description Gets a value that indicates whether the system Transparency effects setting is enabled. ## -property-value **true** if Transparency UI effects are enabled; otherwise, **false**. ## -remarks ## -see-also ## -examples
{ "pile_set_name": "Github" }
/*============================================================================= Copyright (c) 2001-2008 Joel de Guzman Copyright (c) 2001-2008 Hartmut Kaiser http://spirit.sourceforge.net/ Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) =============================================================================*/ #ifndef BOOST_SPIRIT_DEPRECATED_INCLUDE_COMMON_FWD #define BOOST_SPIRIT_DEPRECATED_INCLUDE_COMMON_FWD #include <boost/version.hpp> #if BOOST_VERSION >= 103800 #if defined(_MSC_VER) || defined(__BORLANDC__) || defined(__DMC__) # pragma message ("Warning: This header is deprecated. Please use: boost/spirit/include/classic_common_fwd.hpp") #elif defined(__GNUC__) || defined(__HP_aCC) || defined(__SUNPRO_CC) || defined(__IBMCPP__) # warning "This header is deprecated. Please use: boost/spirit/include/classic_common_fwd.hpp" #endif #endif #if !defined(BOOST_SPIRIT_USE_OLD_NAMESPACE) #define BOOST_SPIRIT_USE_OLD_NAMESPACE #endif #include <boost/spirit/include/classic_common_fwd.hpp> #endif
{ "pile_set_name": "Github" }
context("dropdown") test_that("Default", { tagdrop <- dropdown( "Content goes here", style = "unite", icon = shiny::icon("gear"), status = "danger", width = "300px", animate = animateOptions( enter = animations$fading_entrances$fadeInLeftBig, exit = animations$fading_exits$fadeOutRightBig ) ) expect_identical(tagdrop$attribs$class, "sw-dropdown") }) test_that("inputId", { tagdrop <- dropdown( "Content goes here", style = "default", icon = shiny::icon("gear"), status = "danger", width = "300px", inputId = "MYID" ) expect_identical(tagdrop$attribs$id, "sw-drop-MYID") })
{ "pile_set_name": "Github" }
import os,sys import exifread import helper import hashlib import magic from PIL import Image, ImageChops, ImageEnhance from PIL.ExifTags import TAGS, GPSTAGS import imagehash import nude from nude import Nude import datetime from geopy.geocoders import Nominatim def basic_info(filename): print ("Extraction of basic information: %s" % (filename,)) statinfo = os.stat(filename) mime = magic.from_file(filename, mime=True) helper.sqlite_insert("MIME",mime,os.path.basename(filename)) helper.sqlite_insert("Size_Bytes",str(statinfo.st_size),os.path.basename(filename)) helper.sqlite_insert("Last_Modification_Time_UTC",str(datetime.datetime.utcfromtimestamp(statinfo.st_mtime).strftime("%Y-%m-%d %H:%M:%S")),os.path.basename(filename)) helper.sqlite_insert("Last_Access_Time_UTC",str(datetime.datetime.utcfromtimestamp(statinfo.st_atime).strftime("%Y-%m-%d %H:%M:%S")),os.path.basename(filename)) helper.sqlite_insert("Creation_Time_UTC",str(datetime.datetime.utcfromtimestamp(statinfo.st_ctime).strftime("%Y-%m-%d %H:%M:%S")),os.path.basename(filename)) return statinfo, mime # Extraction of all exif data def exif_info(filename): print ("Extraction of EXIF data from: %s" % (filename,)) f = open(filename,'rb') tags = exifread.process_file(f) for tag in tags.keys(): if tag not in ('JPEGThumbnail', 'TIFFThumbnail', 'Filename', 'EXIF MakerNote'): type_tag = tag.split(" ", 1)[0] tag_key = tag.split(" ", 1)[1] helper.sqlite_insert(tag_key,tags[tag],os.path.basename(filename)) return filename def md5(filename): print ("Calculating md5 of: %s" % (filename,)) hash_md5 = hashlib.md5() with open(filename, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) md5 = hash_md5.hexdigest() helper.sqlite_insert("md5",md5,os.path.basename(filename)) return md5 def sha256(filename): print ("Calculating sha256 of: %s" % (filename,)) hash_sha256 = hashlib.sha256() with open(filename, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_sha256.update(chunk) sha256 = hash_sha256.hexdigest() helper.sqlite_insert("sha256",sha256,os.path.basename(filename)) return sha256 def sha512(filename): print ("Calculating sha512 of: %s" % (filename,)) hash_sha512 = hashlib.sha512() with open(filename, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_sha512.update(chunk) sha512 = hash_sha512.hexdigest() helper.sqlite_insert("sha512",sha512,os.path.basename(filename)) return sha512 #modified version of a gist by: https://github.com/ewencp ##BETA## def ela(filename, output_path): print "****ELA is in BETA****" if magic.from_file(filename, mime=True) == "image/jpeg": quality_level = 85 tmp_img = os.path.join(output_path,os.path.basename(filename)+".tmp.jpg") ela = os.path.join(output_path,os.path.basename(filename)+".ela.jpg") image = Image.open(filename) image.save(tmp_img, 'JPEG', quality=quality_level) tmp_img_file = Image.open(tmp_img) ela_image = ImageChops.difference(image, tmp_img_file) extrema = ela_image.getextrema() max_diff = max([ex[1] for ex in extrema]) scale = 255.0/max_diff ela_image = ImageEnhance.Brightness(ela_image).enhance(scale) ela_image.save(ela) os.remove(tmp_img) else: print "ELA works only with JPEG" #Modified version of a gist by: https://github.com/erans def PIL_exif_data_GPS(filename): if magic.from_file(filename, mime=True) == "image/jpeg": print ("Extraction of GPS data from: %s" % (filename,)) image = Image.open(filename) exif_data = {} exif = image._getexif() latitude = None longitude = None if exif: for tag, value in exif.items(): decoded = TAGS.get(tag, tag) if decoded == "GPSInfo": gps_data = {} for t in value: sub_decoded = GPSTAGS.get(t, t) gps_data[sub_decoded] = value[t] exif_data[decoded] = gps_data else: exif_data[decoded] = value if "GPSInfo" in exif_data: gps_info = exif_data["GPSInfo"] gps_longitude = None gps_latitude = None if "GPSLatitude" in gps_info: gps_latitude = gps_info["GPSLatitude"] if "GPSLatitudeRef" in gps_info: gps_latitude_ref = gps_info["GPSLatitudeRef"] if "GPSLongitude" in gps_info: gps_longitude = gps_info["GPSLongitude"] if "GPSLongitudeRef" in gps_info: gps_longitude_ref = gps_info["GPSLongitudeRef"] if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref: latitude = helper.to_degress(gps_latitude) if gps_latitude_ref != "N": latitude = 0 - latitude longitude = helper.to_degress(gps_longitude) if gps_longitude_ref != "E": longitude = 0 - longitude helper.sqlite_insert("Parsed_GPS_Latitude",str(latitude),os.path.basename(filename)) helper.sqlite_insert("Parsed_GPS_Langitude",str(longitude),os.path.basename(filename)) try: if latitude != None and longitude != None: geolocator = Nominatim(user_agent="imago-forensics") ls = str(latitude)+","+str(longitude) location = geolocator.reverse(ls) address = location.raw["address"] for a in address.keys(): helper.sqlite_insert(a,str(address[a]),os.path.basename(filename)) except: print "Problem during geopy decode" return latitude, longitude else: print "GPS works only with JPEG" return None # based on nude.py # https://github.com/hhatto/nude.py # BETA def detect_nudity(filename): if magic.from_file(filename, mime=True) == "image/jpeg": print ("Check if the image contains nudity: %s" % (filename,)) n = Nude(filename) n.parse() nudity = str(n.result) helper.sqlite_insert("Nudity",nudity,os.path.basename(filename)) return nudity else: print "Nudity Detection works only with JPEG" return None #based on JohannesBuchner imagehash #https://github.com/JohannesBuchner/imagehash def ahash(filename): if "image" in magic.from_file(filename, mime=True): print ("Calculating aHash of: %s" % (filename,)) hash = imagehash.average_hash(Image.open(filename)) helper.sqlite_insert("aHash",str(hash),os.path.basename(filename)) return hash else: print "aHash works only with images" return None #based on JohannesBuchner imagehash #https://github.com/JohannesBuchner/imagehash def phash(filename): if "image" in magic.from_file(filename, mime=True): print ("Calculating pHash of: %s" % (filename,)) hash = imagehash.phash(Image.open(filename)) helper.sqlite_insert("pHash",str(hash),os.path.basename(filename)) return hash else: print "pHash works only with images" return None #based on JohannesBuchner imagehash #https://github.com/JohannesBuchner/imagehash def whash(filename): if "image" in magic.from_file(filename, mime=True): print ("Calculating wHash of: %s" % (filename,)) hash = imagehash.whash(Image.open(filename)) helper.sqlite_insert("wHash",str(hash),os.path.basename(filename)) return hash else: print "wHash works only with image images" return None #based on JohannesBuchner imagehash #https://github.com/JohannesBuchner/imagehash def dhash(filename): if "image" in magic.from_file(filename, mime=True): print ("Calculating dHash Vertical of: %s" % (filename,)) hash = imagehash.average_hash(Image.open(filename)) helper.sqlite_insert("dHash",str(hash),os.path.basename(filename)) return hash else: print "dHash vertical works only with image images" return None
{ "pile_set_name": "Github" }
'use strict'; /* jshint quotmark: double */ window.SwaggerTranslator.learn({ "Warning: Deprecated":"Предупреждение: Устарело", "Implementation Notes":"Заметки", "Response Class":"Пример ответа", "Status":"Статус", "Parameters":"Параметры", "Parameter":"Параметр", "Value":"Значение", "Description":"Описание", "Parameter Type":"Тип параметра", "Data Type":"Тип данных", "HTTP Status Code":"HTTP код", "Reason":"Причина", "Response Model":"Структура ответа", "Request URL":"URL запроса", "Response Body":"Тело ответа", "Response Code":"HTTP код ответа", "Response Headers":"Заголовки ответа", "Hide Response":"Спрятать ответ", "Headers":"Заголовки", "Response Messages":"Что может прийти в ответ", "Try it out!":"Попробовать!", "Show/Hide":"Показать/Скрыть", "List Operations":"Операции кратко", "Expand Operations":"Операции подробно", "Raw":"В сыром виде", "can't parse JSON. Raw result":"Не удается распарсить ответ:", "Example Value":"Пример", "Model Schema":"Структура", "Model":"Описание", "Click to set as parameter value":"Нажмите, чтобы испльзовать в качестве значения параметра", "apply":"применить", "Username":"Имя пользователя", "Password":"Пароль", "Terms of service":"Условия использования", "Created by":"Разработано", "See more at":"Еще тут", "Contact the developer":"Связаться с разработчиком", "api version":"Версия API", "Response Content Type":"Content Type ответа", "Parameter content type:":"Content Type параметра:", "fetching resource":"Получение ресурса", "fetching resource list":"Получение ресурсов", "Explore":"Показать", "Show Swagger Petstore Example Apis":"Показать примеры АПИ", "Can't read from server. It may not have the appropriate access-control-origin settings.":"Не удается получить ответ от сервера. Возможно, проблема с настройками доступа", "Please specify the protocol for":"Пожалуйста, укажите протокол для", "Can't read swagger JSON from":"Не получается прочитать swagger json из", "Finished Loading Resource Information. Rendering Swagger UI":"Загрузка информации о ресурсах завершена. Рендерим", "Unable to read api":"Не удалось прочитать api", "from path":"по адресу", "server returned":"сервер сказал" });
{ "pile_set_name": "Github" }
#!/usr/bin/env python3 from utils import * with TemporaryDirectory() as tmp, \ Memo() as memo_1, Memo() as memo_2: memo_1.run(['user', 'create', 'user']) memo_1.run([ 'silo', 'create', 'filesystem', 'storage', '--path', tmp.dir, ]) network_name = memo_1.run([ 'network', 'create', 'network', '--silo', 'storage', '--kelips', '--k', '1', '--as', 'user', ]) network = memo_1.run_json([ 'network', 'export', 'network', '--as', 'user', ]) # Device 2 memo_2.run(['user', 'create', 'user2']) memo_2.run([ 'silo', 'create', 'filesystem', 'storage', '--path', tmp.dir, ]) memo_2.run(['network', 'import'], input = network) user2 = memo_2.run_json ([ 'user', 'export', 'user2' ]) memo_1.run(['user', 'import'], input = user2) passport = memo_1.run_json(['passport', 'create', '--user', 'user2', '--network', 'network', '--as', 'user', '--deny-write', '--deny-storage', '--output', '-' ]) memo_2.run(['passport', 'import'], input = passport) try: memo_2.run(['network', 'link', 'user/network', '--silo', 'storage', '--as', 'user2']) except: pass else: raise Exception('link with storage should have failed') memo_2.run(['network', 'link', 'user/network', '--as', 'user2'])
{ "pile_set_name": "Github" }
``` /** 给定一个单链表,其中的元素按升序排序,将其转换为高度平衡的二叉搜索树。 本题中,一个高度平衡二叉树是指一个二叉树每个节点 的左右两个子树的高度差的绝对值不超过 1。 示例: 给定的有序链表: [-10, -3, 0, 5, 9], 一个可能的答案是:[0, -3, 9, -10, null, 5], 它可以表示下面这个高度平衡二叉搜索树: 0 / \ -3 9 / / -10 5 * Definition for singly-linked list. * public class ListNode { * int val; * ListNode next; * ListNode(int x) { val = x; } * } */ /** * Definition for a binary tree node. * public class TreeNode { * int val; * TreeNode left; * TreeNode right; * TreeNode(int x) { val = x; } * } */ class Solution { public TreeNode sortedListToBST(ListNode head) { if(head==null){ return null; } return sortedListToBST(head,null); } public TreeNode sortedListToBST(ListNode head,ListNode end){ if(head==end){ return null; } ListNode fast=head; ListNode slow=head; while(fast!=end&&fast.next!=end){ fast=fast.next.next; slow=slow.next; } TreeNode node=new TreeNode(slow.val); node.left=sortedListToBST(head,slow); node.right=sortedListToBST(slow.next,end); return node; } } ```
{ "pile_set_name": "Github" }
import { findAPortNotInUse } from 'portscanner'; import { webFrame, remote } from 'electron'; import React from 'react'; import { render } from 'react-dom'; import { Provider } from 'react-redux'; import launchEditor from 'react-dev-utils/launchEditor'; import './setup'; import App from './containers/App'; import configureStore from './store/configureStore'; import { beforeWindowClose } from './actions/debugger'; import { invokeDevMethod } from './utils/devMenu'; import { client, tryADBReverse } from './utils/adb'; import config from './utils/config'; import { toggleOpenInEditor, isOpenInEditorEnabled } from './utils/devtools'; const currentWindow = remote.getCurrentWindow(); webFrame.setZoomFactor(1); webFrame.setVisualZoomLevelLimits(1, 1); // Prevent dropped file document.addEventListener('drop', e => { e.preventDefault(); e.stopPropagation(); }); document.addEventListener('dragover', e => { e.preventDefault(); e.stopPropagation(); }); const store = configureStore(); // Provide for user window.adb = client; window.adb.reverseAll = tryADBReverse; window.adb.reversePackager = () => tryADBReverse(store.getState().debugger.location.port); window.checkWindowInfo = () => { const debuggerState = store.getState().debugger; return { isWorkerRunning: !!debuggerState.worker, location: debuggerState.location, isPortSettingRequired: debuggerState.isPortSettingRequired, }; }; window.beforeWindowClose = () => new Promise(resolve => (store.dispatch(beforeWindowClose()) ? setTimeout(resolve, 200) : resolve()) ); // For security, we should disable nodeIntegration when user use this open a website const originWindowOpen = window.open; window.open = (url, frameName, features = '') => { const featureList = features.split(','); featureList.push('nodeIntegration=0'); return originWindowOpen.call(window, url, frameName, featureList.join(',')); }; window.openInEditor = (file, lineNumber) => launchEditor(file, lineNumber); window.toggleOpenInEditor = () => { const { host, port } = store.getState().debugger.location; return toggleOpenInEditor(currentWindow, host, port); }; window.isOpenInEditorEnabled = () => isOpenInEditorEnabled(currentWindow); window.invokeDevMethod = name => invokeDevMethod(name)(); // Package will missing /usr/local/bin, // we need fix it for ensure child process work // (like launchEditor of react-devtools) if ( process.env.NODE_ENV === 'production' && process.platform === 'darwin' && process.env.PATH.indexOf('/usr/local/bin') === -1 ) { process.env.PATH = `${process.env.PATH}:/usr/local/bin`; } const { defaultReactDevToolsPort = 19567 } = config; findAPortNotInUse(Number(defaultReactDevToolsPort)).then(port => { window.reactDevToolsPort = port; render( <Provider store={store}> <App /> </Provider>, document.getElementById('root') ); });
{ "pile_set_name": "Github" }
// Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build go1.9 package ipv6 import ( "net" "runtime" "syscall" "golang.org/x/net/internal/socket" ) // BUG(mikio): On Windows, the ReadBatch and WriteBatch methods of // PacketConn are not implemented. // A Message represents an IO message. // // type Message struct { // Buffers [][]byte // OOB []byte // Addr net.Addr // N int // NN int // Flags int // } // // The Buffers fields represents a list of contiguous buffers, which // can be used for vectored IO, for example, putting a header and a // payload in each slice. // When writing, the Buffers field must contain at least one byte to // write. // When reading, the Buffers field will always contain a byte to read. // // The OOB field contains protocol-specific control or miscellaneous // ancillary data known as out-of-band data. // It can be nil when not required. // // The Addr field specifies a destination address when writing. // It can be nil when the underlying protocol of the endpoint uses // connection-oriented communication. // After a successful read, it may contain the source address on the // received packet. // // The N field indicates the number of bytes read or written from/to // Buffers. // // The NN field indicates the number of bytes read or written from/to // OOB. // // The Flags field contains protocol-specific information on the // received message. type Message = socket.Message // ReadBatch reads a batch of messages. // // The provided flags is a set of platform-dependent flags, such as // syscall.MSG_PEEK. // // On a successful read it returns the number of messages received, up // to len(ms). // // On Linux, a batch read will be optimized. // On other platforms, this method will read only a single message. func (c *payloadHandler) ReadBatch(ms []Message, flags int) (int, error) { if !c.ok() { return 0, syscall.EINVAL } switch runtime.GOOS { case "linux": n, err := c.RecvMsgs([]socket.Message(ms), flags) if err != nil { err = &net.OpError{Op: "read", Net: c.PacketConn.LocalAddr().Network(), Source: c.PacketConn.LocalAddr(), Err: err} } return n, err default: n := 1 err := c.RecvMsg(&ms[0], flags) if err != nil { n = 0 err = &net.OpError{Op: "read", Net: c.PacketConn.LocalAddr().Network(), Source: c.PacketConn.LocalAddr(), Err: err} } return n, err } } // WriteBatch writes a batch of messages. // // The provided flags is a set of platform-dependent flags, such as // syscall.MSG_DONTROUTE. // // It returns the number of messages written on a successful write. // // On Linux, a batch write will be optimized. // On other platforms, this method will write only a single message. func (c *payloadHandler) WriteBatch(ms []Message, flags int) (int, error) { if !c.ok() { return 0, syscall.EINVAL } switch runtime.GOOS { case "linux": n, err := c.SendMsgs([]socket.Message(ms), flags) if err != nil { err = &net.OpError{Op: "write", Net: c.PacketConn.LocalAddr().Network(), Source: c.PacketConn.LocalAddr(), Err: err} } return n, err default: n := 1 err := c.SendMsg(&ms[0], flags) if err != nil { n = 0 err = &net.OpError{Op: "write", Net: c.PacketConn.LocalAddr().Network(), Source: c.PacketConn.LocalAddr(), Err: err} } return n, err } }
{ "pile_set_name": "Github" }
package com.neu.his.cloud.service.dms.mapper; import com.neu.his.cloud.service.dms.model.DmsDrug; import com.neu.his.cloud.service.dms.model.DmsDrugExample; import java.util.List; import org.apache.ibatis.annotations.Param; public interface DmsDrugMapper { int countByExample(DmsDrugExample example); int deleteByExample(DmsDrugExample example); int deleteByPrimaryKey(Long id); int insert(DmsDrug record); int insertSelective(DmsDrug record); List<DmsDrug> selectByExample(DmsDrugExample example); DmsDrug selectByPrimaryKey(Long id); int updateByExampleSelective(@Param("record") DmsDrug record, @Param("example") DmsDrugExample example); int updateByExample(@Param("record") DmsDrug record, @Param("example") DmsDrugExample example); int updateByPrimaryKeySelective(DmsDrug record); int updateByPrimaryKey(DmsDrug record); }
{ "pile_set_name": "Github" }
// // NSString+Stripe_CardBrands.h // Stripe // // Created by Jack Flintermann on 1/15/16. // Copyright © 2016 Stripe, Inc. All rights reserved. // #import <Foundation/Foundation.h> #import "STPCardBrand.h" @interface NSString (Stripe_CardBrands) + (nonnull instancetype)stp_stringWithCardBrand:(STPCardBrand)brand; @end void linkNSStringCardBrandsCategory(void);
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 7ec8d075b610046c495df1093f81a054 timeCreated: 1499442348 licenseType: Pro TextureImporter: fileIDToRecycleName: {} serializedVersion: 4 mipmaps: mipMapMode: 0 enableMipMap: 1 sRGBTexture: 1 linearTexture: 0 fadeOut: 0 borderMipMap: 1 mipMapFadeDistanceStart: 1 mipMapFadeDistanceEnd: 3 bumpmap: convertToNormalMap: 0 externalNormalMap: 0 heightScale: 0.25 normalMapFilter: 0 isReadable: 0 grayScaleToAlpha: 0 generateCubemap: 6 cubemapConvolution: 0 seamlessCubemap: 0 textureFormat: 1 maxTextureSize: 2048 textureSettings: filterMode: 1 aniso: 16 mipBias: -1 wrapMode: 1 nPOTScale: 0 lightmap: 0 compressionQuality: 50 spriteMode: 1 spriteExtrude: 1 spriteMeshType: 1 alignment: 2 spritePivot: {x: 0.5, y: 0.5} spriteBorder: {x: 0, y: 0, z: 0, w: 0} spritePixelsToUnits: 200 alphaUsage: 1 alphaIsTransparency: 1 spriteTessellationDetail: -1 textureType: 8 textureShape: 1 maxTextureSizeSet: 0 compressionQualitySet: 0 textureFormatSet: 0 platformSettings: - buildTarget: DefaultTexturePlatform maxTextureSize: 2048 textureFormat: -1 textureCompression: 1 compressionQuality: 50 crunchedCompression: 0 allowsAlphaSplitting: 0 overridden: 0 - buildTarget: Standalone maxTextureSize: 2048 textureFormat: -1 textureCompression: 1 compressionQuality: 50 crunchedCompression: 0 allowsAlphaSplitting: 0 overridden: 0 - buildTarget: Android maxTextureSize: 2048 textureFormat: -1 textureCompression: 1 compressionQuality: 50 crunchedCompression: 0 allowsAlphaSplitting: 0 overridden: 0 spriteSheet: serializedVersion: 2 sprites: [] outline: [] spritePackingTag: userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
#! /bin/bash set -e FILETOSIGN=$1 [ -z $2 ] && SIGFILE="$FILETOSIGN.asc" || SIGFILE="$2" [ ! -z "$3" ] && CLRSIGNEDFILE="$3" if [ -z "$FILETOSIGN" -o ! -r "$FILETOSIGN" ]; then echo "No file to sign provided" >&2 exit 1 fi if [ ! -r /etc/rhn/signing.conf ]; then echo "No config file found: /etc/rhn/signing.conf" >&2 exit 1 fi source /etc/rhn/signing.conf if [ -z "$KEYID" ]; then echo "Unable to find GPG KEYID in config" >&2 exit 1 fi if [ -z "$GPGPASS" ]; then echo "Unable to find GPG PASSWORD in config" >&2 exit 1 fi KEYFILE="$FILETOSIGN.key" # Debian systems require the hashing algorithm to be at least SHA256 if [ ! -z "$DIGESTPREF" ]; then DIGESTOPT="--personal-digest-preferences $DIGESTPREF" else DIGESTOPT="--personal-digest-preferences SHA256" fi rm -f $SIGFILE echo "$GPGPASS" | gpg -sab --batch -u $KEYID --passphrase-fd 0 --pinentry-mode loopback $DIGESTOPT -o $SIGFILE $FILETOSIGN rm -f $KEYFILE gpg --batch --export -a -o $KEYFILE $KEYID if [ ! -z "$CLRSIGNEDFILE" ]; then rm -f $CLRSIGNEDFILE echo "$GPGPASS" | gpg --batch -u $KEYID --passphrase-fd 0 --pinentry-mode loopback $DIGESTOPT --clearsign -o $CLRSIGNEDFILE $FILETOSIGN fi exit 0
{ "pile_set_name": "Github" }
/* * BSD 3-Clause License * * Copyright (c) 2019, NTT Ltd. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import axios from "axios"; export default class Connector { constructor(name, params, env){ this.version = env.version; this.config = env.config; this.logger = env.logger; this.pubSub = env.pubSub; this.params = params; this.name = name; this.messageCallback = null; this.connectCallback = null; this.errorCallback = null; this.disconnectCallback = null; if (!this.params.noProxy && env.agent) { axios.defaults.httpsAgent = env.agent; } this.axios = axios; } connect = () => new Promise((resolve, reject) => reject(new Error('The method connect MUST be implemented'))); _error = (error) => { if (this.errorCallback) this.errorCallback(error); }; subscribe = (input) => { throw new Error('The method subscribe MUST be implemented'); }; _disconnect = (message) => { this.connected = false; if (this.disconnectCallback) this.disconnectCallback(message); }; _message = (message) => { if (this.messageCallback) this.messageCallback(message); }; _connect = (message) => { this.connected = true; if (this.connectCallback) this.connectCallback(message); }; static transform = (message) => { throw new Error('The method transform (STATIC) MUST be implemented'); }; onConnect = (callback) => { this.connectCallback = callback; }; onMessage = (callback) => { this.messageCallback = callback; }; onError = (callback) => { this.errorCallback = callback; }; onDisconnect = (callback) => { this.disconnectCallback = callback; }; }
{ "pile_set_name": "Github" }
//----------------------------------------------------------------------------- // Copyright (c) 2012 GarageGames, LLC // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. //----------------------------------------------------------------------------- #ifdef TORQUE_TESTS_ENABLED #include "testing/unitTesting.h" #include "platform/platform.h" #include "core/fileio.h" #include "core/util/tVector.h" #include "console/console.h" TEST(Platform, ExcludedDirectories) { // Just dump everything under the current directory. We should // find at least one file. // Exclude .svn and CVS Platform::clearExcludedDirectories(); Platform::addExcludedDirectory(".svn"); Platform::addExcludedDirectory("CVS"); EXPECT_TRUE(Platform::isExcludedDirectory(".svn")) << "On list, should be excluded."; EXPECT_TRUE(Platform::isExcludedDirectory("CVS")) << "On list, should be excluded."; EXPECT_FALSE(Platform::isExcludedDirectory("foo")) << "Doesn't match list, shouldn't be excluded."; EXPECT_FALSE(Platform::isExcludedDirectory(".svnCVS")) << "Looks like a duck, but it shouldn't be excluded cuz it's distinct from all entries on the exclusion list."; // Ok, now our exclusion list is setup, so let's dump some paths. Vector<Platform::FileInfo> pathInfo; Platform::dumpPath(Platform::getCurrentDirectory(), pathInfo, 2); EXPECT_GT(pathInfo.size(), 0) << "Should find at least SOMETHING in the current directory!"; // This'll nuke info if we run it in a live situation... so don't run unit // tests in a live situation. ;) Platform::clearExcludedDirectories(); }; TEST(File, TouchAndTime) { FileTime create[2], modify[2]; // Create a file and sleep for a second. File f; f.open("testTouch.file", File::WriteAppend); f.close(); // Touch a file and note its last-modified. dFileTouch("testTouch.file"); EXPECT_TRUE(Platform::isFile("testTouch.file")) << "We just touched this file - it should exist."; EXPECT_TRUE(Platform::getFileTimes("testTouch.file", &create[0], &modify[0])) << "Failed to get filetimes for a file we just created."; // Sleep for a tick Platform::sleep(10); // Touch it again, and compare the last-modifieds. EXPECT_TRUE(Platform::isFile("testTouch.file")) << "We just touched this file - it should exist."; dFileTouch("testTouch.file"); EXPECT_TRUE(Platform::isFile("testTouch.file")) << "We just touched this file - it should exist."; EXPECT_TRUE(Platform::getFileTimes("testTouch.file", &create[1], &modify[1])) << "Failed to get filetimes for a file we just created."; // Now compare the times... EXPECT_LT(Platform::compareFileTimes(modify[0], modify[1]), 0) << "Timestamps are wrong - modify[0] should be before modify[1]!"; EXPECT_EQ(Platform::compareFileTimes(create[0], create[1]), 0) << "Create timestamps should match - we didn't delete the file during this test."; // Clean up.. dFileDelete("testTouch.file"); EXPECT_FALSE(Platform::isFile("testTouch.file")) << "Somehow failed to delete our test file."; }; // Mac/Linux have no implementations for these functions, so we 'def it out for now. #ifdef WIN32 TEST(Platform, Volumes) { Vector<const char*> names; Platform::getVolumeNamesList(names); EXPECT_GT(names.size(), 0) << "We should have at least one volume..."; Vector<Platform::VolumeInformation> info; Platform::getVolumeInformationList(info); EXPECT_EQ(names.size(), info.size()) << "Got inconsistent number of volumes back from info vs. name list functions!"; }; #endif #endif
{ "pile_set_name": "Github" }
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Batching dataset transformations.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.util import nest from tensorflow.python.data.util import sparse from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import gen_dataset_ops from tensorflow.python.ops import math_ops def dense_to_sparse_batch(batch_size, row_shape): """A transformation that batches ragged elements into `tf.SparseTensor`s. Like `Dataset.padded_batch()`, this transformation combines multiple consecutive elements of the dataset, which might have different shapes, into a single element. The resulting element has three components (`indices`, `values`, and `dense_shape`), which comprise a `tf.SparseTensor` that represents the same data. The `row_shape` represents the dense shape of each row in the resulting `tf.SparseTensor`, to which the effective batch size is prepended. For example: ```python # NOTE: The following examples use `{ ... }` to represent the # contents of a dataset. a = { ['a', 'b', 'c'], ['a', 'b'], ['a', 'b', 'c', 'd'] } a.apply(tf.contrib.data.dense_to_sparse_batch(batch_size=2, row_shape=[6])) == { ([[0, 0], [0, 1], [0, 2], [1, 0], [1, 1]], # indices ['a', 'b', 'c', 'a', 'b'], # values [2, 6]), # dense_shape ([[0, 0], [0, 1], [0, 2], [0, 3]], ['a', 'b', 'c', 'd'], [1, 6]) } ``` Args: batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of consecutive elements of this dataset to combine in a single batch. row_shape: A `tf.TensorShape` or `tf.int64` vector tensor-like object representing the equivalent dense shape of a row in the resulting `tf.SparseTensor`. Each element of this dataset must have the same rank as `row_shape`, and must have size less than or equal to `row_shape` in each dimension. Returns: A `Dataset` transformation function, which can be passed to @{tf.data.Dataset.apply}. """ def _apply_fn(dataset): return DenseToSparseBatchDataset(dataset, batch_size, row_shape) return _apply_fn def unbatch(): """A Transformation which splits the elements of a dataset. For example, if elements of the dataset are shaped `[B, a0, a1, ...]`, where `B` may vary from element to element, then for each element in the dataset, the unbatched dataset will contain `B` consecutive elements of shape `[a0, a1, ...]`. Returns: A `Dataset` transformation function, which can be passed to @{tf.data.Dataset.apply}. """ def _apply_fn(dataset): def unbatch_map(arg, *rest): if rest: return dataset_ops.Dataset.from_tensor_slices((arg,) + rest) else: return dataset_ops.Dataset.from_tensor_slices(arg) return dataset.flat_map(map_func=unbatch_map) return _apply_fn def filter_irregular_batches(batch_size): """Transformation that filters out batches that are not of size batch_size.""" def _apply_fn(dataset): """Function from `Dataset` to `Dataset` that applies the transformation.""" tensor_batch_size = ops.convert_to_tensor( batch_size, dtype=dtypes.int64, name="batch_size") flattened = _RestructuredDataset( dataset, tuple(nest.flatten(dataset.output_types)), output_classes=tuple(nest.flatten(dataset.output_classes))) def _predicate(*xs): """Return `True` if this element is a full batch.""" # Extract the dynamic batch size from the first component of the flattened # batched element. first_component = xs[0] first_component_batch_size = array_ops.shape( first_component, out_type=dtypes.int64)[0] return math_ops.equal(first_component_batch_size, tensor_batch_size) filtered = flattened.filter(_predicate) maybe_constant_batch_size = tensor_util.constant_value(tensor_batch_size) def _set_first_dimension(shape): return shape.merge_with( tensor_shape.vector(maybe_constant_batch_size).concatenate(shape[1:])) known_shapes = nest.map_structure(_set_first_dimension, dataset.output_shapes) return _RestructuredDataset( filtered, dataset.output_types, known_shapes, output_classes=dataset.output_classes) return _apply_fn def batch_and_drop_remainder(batch_size): """A batching transformation that omits the final small batch (if present). Like @{tf.data.Dataset.batch}, this transformation combines consecutive elements of this dataset into batches. However, if the batch size does not evenly divide the input dataset size, this transformation will drop the final smaller element. The following example illustrates the difference between this transformation and `Dataset.batch()`: ```python dataset = tf.data.Dataset.range(200) batched = dataset.apply(tf.contrib.data.batch_and_drop_remainder(128)) print(batched.output_shapes) # ==> "(128,)" (the batch dimension is known) ``` By contrast, `dataset.batch(128)` would yield a two-element dataset with shapes `(128,)` and `(72,)`, so the batch dimension would not be statically known. Args: batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of consecutive elements of this dataset to combine in a single batch. Returns: A `Dataset` transformation function, which can be passed to @{tf.data.Dataset.apply} """ def _apply_fn(dataset): """Function from `Dataset` to `Dataset` that applies the transformation.""" batched = dataset.batch(batch_size) return filter_irregular_batches(batch_size)(batched) return _apply_fn def padded_batch_and_drop_remainder(batch_size, padded_shapes, padding_values=None): """A batching and padding transformation that omits the final small batch. Like @{tf.data.Dataset.padded_batch}, this transformation combines consecutive elements of this dataset into batches. However, if the batch size does not evenly divide the input dataset size, this transformation will drop the final smaller element. See `@{tf.contrib.data.batch_and_drop_remainder}` for more details. Args: batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of consecutive elements of this dataset to combine in a single batch. padded_shapes: A nested structure of `tf.TensorShape` or `tf.int64` vector tensor-like objects. See @{tf.data.Dataset.padded_batch} for details. padding_values: (Optional.) A nested structure of scalar-shaped `tf.Tensor`. See @{tf.data.Dataset.padded_batch} for details. Returns: A `Dataset` transformation function, which can be passed to @{tf.data.Dataset.apply} """ def _apply_fn(dataset): """Function from `Dataset` to `Dataset` that applies the transformation.""" batched = dataset.padded_batch( batch_size, padded_shapes=padded_shapes, padding_values=padding_values) return filter_irregular_batches(batch_size)(batched) return _apply_fn class DenseToSparseBatchDataset(dataset_ops.Dataset): """A `Dataset` that batches ragged dense elements into `tf.SparseTensor`s.""" def __init__(self, input_dataset, batch_size, row_shape): """See `Dataset.dense_to_sparse_batch()` for more details.""" super(DenseToSparseBatchDataset, self).__init__() if not isinstance(input_dataset.output_types, dtypes.DType): raise TypeError("DenseToSparseDataset requires an input whose elements " "have a single component, whereas the input has %r." % input_dataset.output_types) self._input_dataset = input_dataset self._batch_size = batch_size self._row_shape = row_shape def _as_variant_tensor(self): return gen_dataset_ops.dense_to_sparse_batch_dataset( self._input_dataset._as_variant_tensor(), # pylint: disable=protected-access self._batch_size, row_shape=dataset_ops._partial_shape_to_tensor(self._row_shape), # pylint: disable=protected-access output_shapes=nest.flatten( sparse.as_dense_shapes(self.output_shapes, self.output_classes)), output_types=nest.flatten( sparse.as_dense_types(self.output_types, self.output_classes))) @property def output_classes(self): return sparse_tensor.SparseTensor @property def output_shapes(self): return tensor_shape.vector(None).concatenate(self._row_shape) @property def output_types(self): return self._input_dataset.output_types class _RestructuredDataset(dataset_ops.Dataset): """An internal helper for changing the structure and shape of a dataset.""" def __init__(self, dataset, output_types, output_shapes=None, output_classes=None): """Creates a new dataset with the given output types and shapes. The given `dataset` must have a structure that is convertible: * `dataset.output_types` must be the same as `output_types` module nesting. * Each shape in `dataset.output_shapes` must be compatible with each shape in `output_shapes` (if given). Note: This helper permits "unsafe casts" for shapes, equivalent to using `tf.Tensor.set_shape()` where domain-specific knowledge is available. Args: dataset: A `Dataset` object. output_types: A nested structure of `tf.DType` objects. output_shapes: (Optional.) A nested structure of `tf.TensorShape` objects. If omitted, the shapes will be inherited from `dataset`. output_classes: (Optional.) A nested structure of class types. If omitted, the class types will be inherited from `dataset`. Raises: ValueError: If either `output_types` or `output_shapes` is not compatible with the structure of `dataset`. """ super(_RestructuredDataset, self).__init__() self._dataset = dataset # Validate that the types are compatible. output_types = nest.map_structure(dtypes.as_dtype, output_types) flat_original_types = nest.flatten(dataset.output_types) flat_new_types = nest.flatten(output_types) if flat_original_types != flat_new_types: raise ValueError( "Dataset with output types %r cannot be restructured to have output " "types %r" % (dataset.output_types, output_types)) self._output_types = output_types if output_shapes is None: # Inherit shapes from the original `dataset`. self._output_shapes = nest.pack_sequence_as(output_types, nest.flatten( dataset.output_shapes)) else: # Validate that the shapes are compatible. nest.assert_same_structure(output_types, output_shapes) flat_original_shapes = nest.flatten(dataset.output_shapes) flat_new_shapes = nest.flatten_up_to(output_types, output_shapes) for original_shape, new_shape in zip(flat_original_shapes, flat_new_shapes): if not original_shape.is_compatible_with(new_shape): raise ValueError( "Dataset with output shapes %r cannot be restructured to have " "incompatible output shapes %r" % (dataset.output_shapes, output_shapes)) self._output_shapes = nest.map_structure_up_to( output_types, tensor_shape.as_shape, output_shapes) if output_classes is None: # Inherit class types from the original `dataset`. self._output_classes = nest.pack_sequence_as(output_types, nest.flatten( dataset.output_classes)) else: self._output_classes = output_classes def _as_variant_tensor(self): return self._dataset._as_variant_tensor() # pylint: disable=protected-access @property def output_classes(self): return self._output_classes @property def output_types(self): return self._output_types @property def output_shapes(self): return self._output_shapes class _MapAndBatchDataset(dataset_ops.MapDataset): """A `Dataset` that maps a function over a batch of elements.""" def __init__(self, input_dataset, map_func, batch_size, num_parallel_batches): """See `Dataset.map()` for details.""" super(_MapAndBatchDataset, self).__init__(input_dataset, map_func) self._batch_size = ops.convert_to_tensor( batch_size, dtype=dtypes.int64, name="batch_size") self._num_parallel_batches = ops.convert_to_tensor( num_parallel_batches, dtype=dtypes.int64, name="num_parallel_batches") def _as_variant_tensor(self): # pylint: disable=protected-access input_resource = self._input_dataset._as_variant_tensor() return gen_dataset_ops.map_and_batch_dataset( input_resource, self._map_func.captured_inputs, f=self._map_func, batch_size=self._batch_size, num_parallel_batches=self._num_parallel_batches, output_types=nest.flatten( sparse.as_dense_types(self.output_types, self.output_classes)), output_shapes=nest.flatten( sparse.as_dense_shapes(self.output_shapes, self.output_classes))) # pylint: enable=protected-access @property def output_shapes(self): return nest.pack_sequence_as(self._output_shapes, [ tensor_shape.vector(tensor_util.constant_value( self._batch_size)).concatenate(s) for s in nest.flatten(self._output_shapes) ]) @property def output_types(self): return self._output_types def map_and_batch(map_func, batch_size, num_parallel_batches=1): """Fused implementation of `map` and `batch`. Maps `map_func` across `batch_size` consecutive elements of this dataset and then combines them into a batch. Functionally, it is equivalent to `map` followed by `batch`. However, by fusing the two transformations together, the implementation can be more efficient. Surfacing this transformation in the API is temporary. Once automatic input pipeline optimization is implemented, the fusing of `map` and `batch` will happen automatically and this API will be deprecated. Args: map_func: A function mapping a nested structure of tensors to another nested structure of tensors. batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of consecutive elements of this dataset to combine in a single batch. num_parallel_batches: A `tf.int64` scalar `tf.Tensor`, representing the number of batches to create in parallel. On one hand, higher values can help mitigate the effect of stragglers. On the other hand, higher values can increasing contention if CPU is scarce. Returns: A `Dataset` transformation function, which can be passed to @{tf.contrib.data.Dataset.apply}. """ def _apply_fn(dataset): return _MapAndBatchDataset(dataset, map_func, batch_size, num_parallel_batches) return _apply_fn
{ "pile_set_name": "Github" }
// stylelint-disable selector-no-qualifying-type // // Base styles // .input-group { position: relative; display: flex; flex-wrap: wrap; // For form validation feedback align-items: stretch; width: 100%; > .form-control, > .custom-select, > .custom-file { position: relative; // For focus state's z-index flex: 1 1 auto; // Add width 1% and flex-basis auto to ensure that button will not wrap out // the column. Applies to IE Edge+ and Firefox. Chrome does not require this. width: 1%; margin-bottom: 0; // Bring the "active" form control to the top of surrounding elements &:focus { z-index: 3; } + .form-control, + .custom-select, + .custom-file { margin-left: -$input-border-width; } } > .form-control, > .custom-select { &:not(:last-child) { @include border-right-radius(0); } &:not(:first-child) { @include border-left-radius(0); } } // Custom file inputs have more complex markup, thus requiring different // border-radius overrides. > .custom-file { display: flex; align-items: center; &:not(:last-child) .custom-file-label, &:not(:last-child) .custom-file-label::before { @include border-right-radius(0); } &:not(:first-child) .custom-file-label, &:not(:first-child) .custom-file-label::before { @include border-left-radius(0); } } } // Prepend and append // // While it requires one extra layer of HTML for each, dedicated prepend and // append elements allow us to 1) be less clever, 2) simplify our selectors, and // 3) support HTML5 form validation. .input-group-prepend, .input-group-append { display: flex; // Ensure buttons are always above inputs for more visually pleasing borders. // This isn't needed for `.input-group-text` since it shares the same border-color // as our inputs. .btn { position: relative; z-index: 2; } .btn + .btn, .btn + .input-group-text, .input-group-text + .input-group-text, .input-group-text + .btn { margin-left: -$input-border-width; } } .input-group-prepend { margin-right: -$input-border-width; } .input-group-append { margin-left: -$input-border-width; } // Textual addons // // Serves as a catch-all element for any text or radio/checkbox input you wish // to prepend or append to an input. .input-group-text { display: flex; align-items: center; padding: $input-padding-y $input-padding-x; margin-bottom: 0; // Allow use of <label> elements by overriding our default margin-bottom font-size: $font-size-base; // Match inputs font-weight: $font-weight-normal; line-height: $input-line-height; color: $input-group-addon-color; text-align: center; white-space: nowrap; background-color: $input-group-addon-bg; border: $input-border-width solid $input-group-addon-border-color; @include border-radius($input-border-radius); // Nuke default margins from checkboxes and radios to vertically center within. input[type="radio"], input[type="checkbox"] { margin-top: 0; } } // Sizing // // Remix the default form control sizing classes into new ones for easier // manipulation. .input-group-lg > .form-control, .input-group-lg > .input-group-prepend > .input-group-text, .input-group-lg > .input-group-append > .input-group-text, .input-group-lg > .input-group-prepend > .btn, .input-group-lg > .input-group-append > .btn { @extend .form-control-lg; } .input-group-sm > .form-control, .input-group-sm > .input-group-prepend > .input-group-text, .input-group-sm > .input-group-append > .input-group-text, .input-group-sm > .input-group-prepend > .btn, .input-group-sm > .input-group-append > .btn { @extend .form-control-sm; } // Prepend and append rounded corners // // These rulesets must come after the sizing ones to properly override sm and lg // border-radius values when extending. They're more specific than we'd like // with the `.input-group >` part, but without it, we cannot override the sizing. .input-group > .input-group-prepend > .btn, .input-group > .input-group-prepend > .input-group-text, .input-group > .input-group-append:not(:last-child) > .btn, .input-group > .input-group-append:not(:last-child) > .input-group-text, .input-group > .input-group-append:last-child > .btn:not(:last-child):not(.dropdown-toggle), .input-group > .input-group-append:last-child > .input-group-text:not(:last-child) { @include border-right-radius(0); } .input-group > .input-group-append > .btn, .input-group > .input-group-append > .input-group-text, .input-group > .input-group-prepend:not(:first-child) > .btn, .input-group > .input-group-prepend:not(:first-child) > .input-group-text, .input-group > .input-group-prepend:first-child > .btn:not(:first-child), .input-group > .input-group-prepend:first-child > .input-group-text:not(:first-child) { @include border-left-radius(0); }
{ "pile_set_name": "Github" }
/* Copyright (c) 2011 Arduino. All right reserved. This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #ifndef _VARIANT_ARDUINO_DUE_X_ #define _VARIANT_ARDUINO_DUE_X_ /*---------------------------------------------------------------------------- * Definitions *----------------------------------------------------------------------------*/ /** Frequency of the board main oscillator */ #define VARIANT_MAINOSC 12000000 /** Master clock frequency */ #define VARIANT_MCK 84000000 /*---------------------------------------------------------------------------- * Headers *----------------------------------------------------------------------------*/ #include "Arduino.h" #ifdef __cplusplus #include "UARTClass.h" #include "USARTClass.h" #endif #ifdef __cplusplus extern "C"{ #endif // __cplusplus /** * Libc porting layers */ #if defined ( __GNUC__ ) /* GCC CS3 */ # include <syscalls.h> /** RedHat Newlib minimal stub */ #endif /*---------------------------------------------------------------------------- * Pins *----------------------------------------------------------------------------*/ // Number of pins defined in PinDescription array #define PINS_COUNT (100u) #define NUM_DIGITAL_PINS (85u) #define NUM_ANALOG_INPUTS (12u) #define digitalPinToPort(P) ( g_APinDescription[P].pPort ) #define digitalPinToBitMask(P) ( g_APinDescription[P].ulPin ) //#define digitalPinToTimer(P) ( ) //#define analogInPinToBit(P) ( ) #define portOutputRegister(port) ( &(port->PIO_ODSR) ) #define portInputRegister(port) ( &(port->PIO_PDSR) ) //#define portModeRegister(P) ( ) #define digitalPinHasPWM(P) ( g_APinDescription[P].ulPWMChannel != NOT_ON_PWM || g_APinDescription[P].ulTCChannel != NOT_ON_TIMER ) // Interrupts #define digitalPinToInterrupt(p) ((p) < NUM_DIGITAL_PINS ? (p) : -1) // LEDs #define PIN_LED_13 (36u) #define PIN_LED_RXL (72u) #define PIN_LED_TXL (73u) #define PIN_LED PIN_LED_13 #define PIN_LED2 PIN_LED_RXL #define PIN_LED3 PIN_LED_TXL #define LED_BUILTIN 36 /* * SPI Interfaces */ #define SPI_INTERFACES_COUNT 1 #define SPI_INTERFACE SPI0 #define SPI_INTERFACE_ID ID_SPI0 #define SPI_CHANNELS_NUM 4 #define PIN_SPI_SS0 (77u) #define PIN_SPI_SS1 (87u) #define PIN_SPI_SS2 (86u) #define PIN_SPI_SS3 (78u) #define PIN_SPI_MOSI (75u) #define PIN_SPI_MISO (74u) #define PIN_SPI_SCK (76u) #define BOARD_SPI_SS0 (10u) #define BOARD_SPI_SS1 (4u) #define BOARD_SPI_SS2 (52u) #define BOARD_SPI_SS3 PIN_SPI_SS3 #define BOARD_SPI_DEFAULT_SS BOARD_SPI_SS3 #define BOARD_PIN_TO_SPI_PIN(x) \ (x==BOARD_SPI_SS0 ? PIN_SPI_SS0 : \ (x==BOARD_SPI_SS1 ? PIN_SPI_SS1 : \ (x==BOARD_SPI_SS2 ? PIN_SPI_SS2 : PIN_SPI_SS3 ))) #define BOARD_PIN_TO_SPI_CHANNEL(x) \ (x==BOARD_SPI_SS0 ? 0 : \ (x==BOARD_SPI_SS1 ? 1 : \ (x==BOARD_SPI_SS2 ? 2 : 3))) static const uint8_t SS = BOARD_SPI_SS0; static const uint8_t SS1 = BOARD_SPI_SS1; static const uint8_t SS2 = BOARD_SPI_SS2; static const uint8_t SS3 = BOARD_SPI_SS3; static const uint8_t MOSI = PIN_SPI_MOSI; static const uint8_t MISO = PIN_SPI_MISO; static const uint8_t SCK = PIN_SPI_SCK; /* * Wire Interfaces */ #define WIRE_INTERFACES_COUNT 2 #define PIN_WIRE_SDA (20u) #define PIN_WIRE_SCL (21u) #define WIRE_INTERFACE TWI1 #define WIRE_INTERFACE_ID ID_TWI1 #define WIRE_ISR_HANDLER TWI1_Handler #define WIRE_ISR_ID TWI1_IRQn #define PIN_WIRE1_SDA (70u) #define PIN_WIRE1_SCL (71u) #define WIRE1_INTERFACE TWI0 #define WIRE1_INTERFACE_ID ID_TWI0 #define WIRE1_ISR_HANDLER TWI0_Handler #define WIRE1_ISR_ID TWI0_IRQn /* * UART/USART Interfaces */ // Serial #define PINS_UART (81u) // Serial1 #define PINS_USART0 (82u) // Serial2 #define PINS_USART1 (83u) // Serial3 #define PINS_USART3 (84u) /* * USB Interfaces */ #define PINS_USB (85u) /* * Analog pins */ static const uint8_t A0 = 54; static const uint8_t A1 = 55; static const uint8_t A2 = 56; static const uint8_t A3 = 57; static const uint8_t A4 = 58; static const uint8_t A5 = 59; static const uint8_t A6 = 60; static const uint8_t A7 = 61; static const uint8_t A8 = 62; static const uint8_t A9 = 63; static const uint8_t A10 = 64; static const uint8_t A11 = 65; static const uint8_t DAC0 = 66; static const uint8_t DAC1 = 67; //static const uint8_t CANRX = 68; //static const uint8_t CANTX = 69; #define ADC_RESOLUTION 12 /* * Complementary CAN pins */ //static const uint8_t CAN1RX = 88; //static const uint8_t CAN1TX = 89; // CAN0 //#define PINS_CAN0 (90u) // CAN1 //#define PINS_CAN1 (91u) /* * DACC */ #define DACC_INTERFACE DACC #define DACC_INTERFACE_ID ID_DACC #define DACC_RESOLUTION 12 #define DACC_ISR_HANDLER DACC_Handler #define DACC_ISR_ID DACC_IRQn /* * PWM */ #define PWM_INTERFACE PWM #define PWM_INTERFACE_ID ID_PWM #define PWM_FREQUENCY 1000 #define PWM_MAX_DUTY_CYCLE 255 #define PWM_MIN_DUTY_CYCLE 0 #define PWM_RESOLUTION 8 /* * TC */ #define TC_INTERFACE TC0 #define TC_INTERFACE_ID ID_TC0 #define TC_FREQUENCY 1000 #define TC_MAX_DUTY_CYCLE 255 #define TC_MIN_DUTY_CYCLE 0 #define TC_RESOLUTION 8 #ifdef __cplusplus } #endif /*---------------------------------------------------------------------------- * Arduino objects - C++ only *----------------------------------------------------------------------------*/ #ifdef __cplusplus extern UARTClass Serial; extern USARTClass Serial1; extern USARTClass Serial2; extern USARTClass Serial3; #endif // These serial port names are intended to allow libraries and architecture-neutral // sketches to automatically default to the correct port name for a particular type // of use. For example, a GPS module would normally connect to SERIAL_PORT_HARDWARE_OPEN, // the first hardware serial port whose RX/TX pins are not dedicated to another use. // // SERIAL_PORT_MONITOR Port which normally prints to the Arduino Serial Monitor // // SERIAL_PORT_USBVIRTUAL Port which is USB virtual serial // // SERIAL_PORT_LINUXBRIDGE Port which connects to a Linux system via Bridge library // // SERIAL_PORT_HARDWARE Hardware serial port, physical RX & TX pins. // // SERIAL_PORT_HARDWARE_OPEN Hardware serial ports which are open for use. Their RX & TX // pins are NOT connected to anything by default. #define SERIAL_PORT_MONITOR Serial #define SERIAL_PORT_USBVIRTUAL SerialUSB #define SERIAL_PORT_HARDWARE_OPEN Serial1 #define SERIAL_PORT_HARDWARE_OPEN1 Serial2 #define SERIAL_PORT_HARDWARE_OPEN2 Serial3 #define SERIAL_PORT_HARDWARE Serial #define SERIAL_PORT_HARDWARE1 Serial1 #define SERIAL_PORT_HARDWARE2 Serial2 #define SERIAL_PORT_HARDWARE3 Serial3 #endif /* _VARIANT_ARDUINO_DUE_X_ */
{ "pile_set_name": "Github" }
/* === This file is part of Tomahawk Player - <http://tomahawk-player.org> === * * Copyright 2010-2011, Christian Muehlhaeuser <[email protected]> * Copyright 2010-2011, Leo Franchi <[email protected]> * Copyright 2013, Teo Mrnjavac <[email protected]> * * Tomahawk is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Tomahawk is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Tomahawk. If not, see <http://www.gnu.org/licenses/>. */ #include "DatabaseCollection.h" #include "database/Database.h" #include "database/DatabaseCommand_AllArtists.h" #include "database/DatabaseCommand_AllAlbums.h" #include "database/DatabaseCommand_AllTracks.h" #include "database/DatabaseCommand_AddFiles.h" #include "database/DatabaseCommand_DeleteFiles.h" #include "database/DatabaseCommand_LoadAllPlaylists.h" #include "database/DatabaseCommand_LoadAllAutoPlaylists.h" #include "database/DatabaseCommand_LoadAllStations.h" #include "utils/Logger.h" #include "PlaylistEntry.h" using namespace Tomahawk; DatabaseCollection::DatabaseCollection( const source_ptr& src, QObject* parent ) : Collection( src, QString( "dbcollection:%1" ).arg( src->nodeId() ), parent ) { m_browseCapabilities << CapabilityBrowseArtists << CapabilityBrowseAlbums << CapabilityBrowseTracks; connect( source().data(), SIGNAL( online() ), SIGNAL( online() ) ); connect( source().data(), SIGNAL( offline() ), SIGNAL( offline() ) ); } bool DatabaseCollection::isOnline() const { return source()->isOnline(); } void DatabaseCollection::loadPlaylists() { DatabaseCommand_LoadAllPlaylists* cmd = new DatabaseCommand_LoadAllPlaylists( source() ); connect( cmd, SIGNAL( done( const QList<Tomahawk::playlist_ptr>& ) ), SLOT( setPlaylists( const QList<Tomahawk::playlist_ptr>& ) ) ); Database::instance()->enqueue( Tomahawk::dbcmd_ptr( cmd ) ); } void DatabaseCollection::loadAutoPlaylists() { DatabaseCommand_LoadAllAutoPlaylists* cmd = new DatabaseCommand_LoadAllAutoPlaylists( source() ); connect( cmd, SIGNAL( autoPlaylistLoaded( Tomahawk::source_ptr, QVariantList ) ), SLOT( autoPlaylistCreated( const Tomahawk::source_ptr&, const QVariantList& ) ) ); Database::instance()->enqueue( Tomahawk::dbcmd_ptr( cmd ) ); } void DatabaseCollection::loadStations() { DatabaseCommand_LoadAllStations* cmd = new DatabaseCommand_LoadAllStations( source() ); connect( cmd, SIGNAL( stationLoaded( Tomahawk::source_ptr, QVariantList ) ), SLOT( stationCreated( const Tomahawk::source_ptr&, const QVariantList& ) ) ); Database::instance()->enqueue( Tomahawk::dbcmd_ptr( cmd ) ); } void DatabaseCollection::addTracks( const QList<QVariant>& newitems ) { qDebug() << Q_FUNC_INFO << newitems.length(); DatabaseCommand_AddFiles* cmd = new DatabaseCommand_AddFiles( newitems, source() ); Database::instance()->enqueue( Tomahawk::dbcmd_ptr( cmd ) ); } void DatabaseCollection::removeTracks( const QDir& dir ) { qDebug() << Q_FUNC_INFO << dir; DatabaseCommand_DeleteFiles* cmd = new DatabaseCommand_DeleteFiles( dir, source() ); Database::instance()->enqueue( Tomahawk::dbcmd_ptr( cmd ) ); } QList< Tomahawk::playlist_ptr > DatabaseCollection::playlists() { if ( Collection::playlists().isEmpty() ) { loadPlaylists(); } return Collection::playlists(); } QList< dynplaylist_ptr > DatabaseCollection::autoPlaylists() { // echonest is dead, disable all echonest code /* if ( Collection::autoPlaylists().isEmpty() ) { loadAutoPlaylists(); } */ return Collection::autoPlaylists(); } QList< dynplaylist_ptr > DatabaseCollection::stations() { // echonest is dead, disable all echonest code /* if ( Collection::stations().isEmpty() ) { loadStations(); } */ return Collection::stations(); } Tomahawk::ArtistsRequest* DatabaseCollection::requestArtists() { //FIXME: assuming there's only one dbcollection per source, and that this is the one Tomahawk::collection_ptr thisCollection = source()->dbCollection(); if ( thisCollection->name() != this->name() ) return 0; Tomahawk::ArtistsRequest* cmd = new DatabaseCommand_AllArtists( thisCollection ); return cmd; } Tomahawk::AlbumsRequest* DatabaseCollection::requestAlbums( const Tomahawk::artist_ptr& artist ) { //FIXME: assuming there's only one dbcollection per source, and that this is the one Tomahawk::collection_ptr thisCollection = source()->dbCollection(); if ( thisCollection->name() != this->name() ) return 0; Tomahawk::AlbumsRequest* cmd = new DatabaseCommand_AllAlbums( thisCollection, artist ); return cmd; } Tomahawk::TracksRequest* DatabaseCollection::requestTracks( const Tomahawk::album_ptr& album ) { //FIXME: assuming there's only one dbcollection per source, and that this is the one Tomahawk::collection_ptr thisCollection = source()->dbCollection(); if ( thisCollection->name() != this->name() ) return 0; DatabaseCommand_AllTracks* cmd = new DatabaseCommand_AllTracks( thisCollection ); if ( album ) { cmd->setAlbum( album->weakRef() ); cmd->setSortOrder( DatabaseCommand_AllTracks::AlbumPosition ); } return cmd; } int DatabaseCollection::trackCount() const { return source()->trackCount(); } QPixmap DatabaseCollection::icon( const QSize& size ) const { return source()->avatar( TomahawkUtils::RoundedCorners, size, true ); } void DatabaseCollection::autoPlaylistCreated( const source_ptr& source, const QVariantList& data ) { dynplaylist_ptr p( new DynamicPlaylist( source, //src data[0].toString(), //current rev data[1].toString(), //title data[2].toString(), //info data[3].toString(), //creator data[4].toUInt(), // createdOn data[5].toString(), // dynamic type static_cast<GeneratorMode>(data[6].toInt()), // dynamic mode data[7].toBool(), //shared data[8].toInt(), //lastmod data[9].toString() ), &QObject::deleteLater ); //GUID p->setWeakSelf( p.toWeakRef() ); addAutoPlaylist( p ); } void DatabaseCollection::stationCreated( const source_ptr& source, const QVariantList& data ) { dynplaylist_ptr p( new DynamicPlaylist( source, //src data[0].toString(), //current rev data[1].toString(), //title data[2].toString(), //info data[3].toString(), //creator data[4].toUInt(), // createdOn data[5].toString(), // dynamic type static_cast<GeneratorMode>(data[6].toInt()), // dynamic mode data[7].toBool(), //shared data[8].toInt(), //lastmod data[9].toString() ), &QObject::deleteLater ); //GUID p->setWeakSelf( p.toWeakRef() ); addStation( p ); } /* * Resolver interface * * We implement searching the database in the DatabaseResolver which avoids a n+1 query here. * We can't simply let ScriptCollection inherit Collection and Resolver because both are QObjects, * although Resolver doesn't need to be a QObject atm, blocking adding signals/slots to Resolver * in future seems to me worse than violating Liskov's law here. ~ domme */ unsigned int DatabaseCollection::timeout() const { return 0; } unsigned int DatabaseCollection::weight() const { return 0; } void DatabaseCollection::resolve( const Tomahawk::query_ptr& query ) { Q_UNUSED( query ); Q_ASSERT(false); }
{ "pile_set_name": "Github" }
--- id: readme title: Overview category: Getting Started --- This document contains information on two main sections: - How the css components are organized - How the build process and architecture concepts ## Solar framework css overview Solar framework includes four concepts of code: - Base - Basics - Components - Utilities (still under design) ### Solar Base The solar base is a set of styles baked in to the solar-core to make the overall css development experience better. It includes css normalize and other extremely basic things such as configuring border-box. It does not come with any mixins or classes that developers can use. ### Solar Basics (non-themable) Solar provides "basics" which are **non-themable** reusable building blocks. They come in the form of sass mixins along with classes for convenience and are namespaced with `S-`. Think of them as the screws and motors in a robot--these are standard parts that aren't painted/themed. ### Solar Components (themable) Solar provides components which are **themable** reusable building blocks of a website. They are only available as classes. Components should be as generic as possible so that they can be reused in many different places. When using a solar module, do it alongside your own defined class. For example, do the following which uses the module class alongside your own class: ``` <ul class="s-webApp-tabBar state-one-tabBar"> ``` Do not use the @extend feature. ### Solar utilities Still to be designed. The current thought is that utilities will live inside a solar extension (with namespace `U-`). Consumers can then create their own utilities as needed. This is still under design and may be later moved out as an extension. ### Sass Sass is a css preprocessor that solar uses. It provides functionality such as mixins and includes. To use a mixin, do: ``` @include mixinName(arguments); ``` In the solar framework, only use the following features: - mixins - variables - solar functions (shade/tint) Don't use these features: - @extend - nesting
{ "pile_set_name": "Github" }
# Global flow The tool starts in evoke/src/main.cpp. It reads command-line arguments and then constructs a Project object. This object's constructor collects the whole project status & information immediately. Directly after this point it runs the actions that the user instructed it to do. This part of the program may be changed; right now it offers a project dump in verbose mode, a compilation database dump and running the build itself with the built-in executor. Directly after creating this, the main loop uses a toolchain (from `toolsets/src/Toolset.cpp`) to determine what commands need to be run and what their rebuild-if-changed dependencies are. These are then fed to the executor to run in any order it likes. # Reading the project Reading the project happens in the Project constructor. - It first reads all the files in the source tree that look like a source file and that are not blacklisted. This fills in the `rawImports`, `rawIncludes`, `moduleName` in the File structs. It also recognizes components by the existence of `include` and `src` folders and adds the files to the correct components. - It creates mappings from symbol-wise import and filename-wise import/include to whichever file it points to. Any entry that can have two or more targets is marked as invalid by overwriting the target with the tag "INVALID". - It then uses the mappings to find out which files refer to which other files. If the file is being referred to without requiring precompilation, it's added to the `dependencies`, otherwise it's added to the `modImports` list. The latter may (depending on compiler and toolchain) be used directly anyway, but in at least one compiler this causes a build-order dependency that does not exist for the `dependencies` files. At this point, there is no difference between importing by name and importing by filename any more; both need precompilation. - It propagates file usage information to the component level and extracts what include paths need to be exposed on a component to make the includes/imports work out. This creates a component-level dependency graph too, with (given how files include others) public and private include paths, and public and private component dependencies. # Expected / planned changes - The project reading does not fully do modules well; this needs minor fixes to be added still. Depends on how compilers want to receive module information too. - The whole setup of toolset/ needs to change; this needs to be a file-driven thing where a few default configs are included. Some parts will need to be in code I suspect (as they rely on modImports for example) but most of it should be fine without. This also enables people to add other tools (protobuf compiler, Qt Moc, similar things) to the build setup. - Unit tests all over the place. - Online integration tests with Travis and Appveyor. # Design principles ## Componentization required by Evoke Evoke requires you to restructure your code base, so that the code base structure explains how the code is laid out. After this, you can still use any descriptive build system to build the code, but you can also use Evoke to build it without adding configuration. It additionally means that anybody who knows the project layout method can read your code base to see what does what, and any tool using it will instantly know what is used where and how things are to be used, created and built. The idea is not to make a better configuration file, but to make it unnecessary. The idea is that a project buildable with Evoke is subdivided into components, which themselves do not have an internal subdivision. So a component is always either fully included, or not included, and at the same time if you include a header from a component you should logically need to use the whole component. Those components are called coherent. As far as Evoke is concerned, your folders are arbitrarily nested and contain components. You can have inner components, you can have any levels deep of components, you can have cyclic dependencies, you can have simple dependencies. It really does not care how you structure your components or dependencies themselves - that's up to you. I would personally argue against inner components and cyclic dependencies, but there's no technical reason in Evoke they wouldn't work, so they just work. In the case of cyclic dependencies, the code base explicitly creates linker lines that always work when you have them (rather than the repeat-N-times thing that CMake does). Supporting large projects is the same as small projects; you're just more likely to accidentally have created problems in a large project. Dependency detection works the same, ambiguous headers are still ambiguous, projects that somebody includes a header of still are libraries. ## Component detection Evoke requires - Each component has a `src` or `include` folder at its root - The unit test for a component is in a folder `test` next to its `src` and/or `include` folders. - All files found within a component, but not more closely in another component, are part of it The names of these folders are not modifiable - adding such a capability is not all that hard, but would remove much of the benefit you get from the recognizability of the code base structure.
{ "pile_set_name": "Github" }
package ansiterm import ( "strconv" ) func sliceContains(bytes []byte, b byte) bool { for _, v := range bytes { if v == b { return true } } return false } func convertBytesToInteger(bytes []byte) int { s := string(bytes) i, _ := strconv.Atoi(s) return i }
{ "pile_set_name": "Github" }
Subject: al stats From: "michael mcguire" <[email protected]> Reply-To: "michael mcguire" <[email protected]> Distribution: rec Organization: Canada Remote Systems Lines: 11 I am looking for a source of American League baseball stats for individual players in the same format as printed in newspapers, ie. I do not want to provide a list of players and get back nice printed reports for $35 a week. Does anyone know of such statistics availability and an idea of the cost? -- Canada Remote Systems - Toronto, Ontario 416-629-7000/629-7044
{ "pile_set_name": "Github" }
import { OperatorDoc } from '../operator.model'; export const merge: OperatorDoc = { name: 'merge', operatorType: 'combination', signature: 'public merge(other: ObservableInput, concurrent: number, scheduler: Scheduler): Observable', parameters: [ { name: 'other', type: 'ObservableInput', attribute: '', description: `An input Observable to merge with the source Observable. More than one input Observables may be given as argument.` }, { name: 'concurrent', type: 'number', attribute: 'optional, default: Number.POSITIVE_INFINITY', description: `Maximum number of input Observables being subscribed to concurrently.` }, { name: 'scheduler', type: 'Scheduler', attribute: 'optional, default: null', description: `The IScheduler to use for managing concurrency of input Observables.` } ], marbleUrl: 'http://reactivex.io/rxjs/img/merge.png', shortDescription: { description: `Creates an output Observable which concurrently emits all values from every given input Observable. <span class="informal">Flattens multiple Observables together by blending their values into one Observable.</span>` }, walkthrough: { description: ` <p><span class="markdown-code">Merge</span> subscribes to each given input Observable (either the source or an Observable given as argument), and simply forwards (without doing any transformation) all the values from all the input Observables to the output Observable. The output Observable only completes once all input Observables have completed. Any error delivered by an input Observable will be immediately emitted on the output Observable.</p> ` }, examples: [ { name: 'Merge together two Observables: 1s interval and clicks', code: ` import { merge } from 'rxjs/operators'; import { fromEvent } from 'rxjs/observable/fromEvent'; import { interval } from 'rxjs/observable/interval'; const clicks = fromEvent(document, 'click'); const timer = interval(1000); const clicksOrTimer = clicks.pipe(merge(timer)); clicksOrTimer.subscribe(x => console.log(x)); `, externalLink: { platform: 'JSBin', url: 'http://jsbin.com/wihafapiva/1/embed?js,output' } }, { name: 'Merge together 3 Observables, but only 2 run concurrently', code: ` import { take } from 'rxjs/operators'; import { merge } from 'rxjs/observable/merge'; import { interval } from 'rxjs/observable/interval'; const timer1 = interval(1000).pipe(take(10)); const timer2 = interval(2000).pipe(take(6)); const timer3 = interval(500).pipe(take(10)); const concurrent = 2; // the argument const merged = timer1.pipe(merge(timer2, timer3, concurrent)); merged.subscribe(x => console.log(x)); `, externalLink: { platform: 'JSBin', url: 'http://jsbin.com/midosuqaga/1/embed?js,output' } } ], relatedOperators: ['mergeAll', 'mergeMap', 'mergeMapTo', 'mergeScan'] };
{ "pile_set_name": "Github" }
#!/usr/bin/python # -*- coding: utf-8 -*- # # Ansible module to manage PaloAltoNetworks Firewall # (c) 2016, techbizdev <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: panos_admin short_description: Add or modify PAN-OS user accounts password. description: - PanOS module that allows changes to the user account passwords by doing API calls to the Firewall using pan-api as the protocol. author: "Luigi Mori (@jtschichold), Ivan Bojer (@ivanbojer)" version_added: "2.3" requirements: - pan-python options: ip_address: description: - IP address (or hostname) of PAN-OS device required: true password: description: - password for authentication required: true username: description: - username for authentication required: false default: "admin" admin_username: description: - username for admin user required: false default: "admin" admin_password: description: - password for admin user required: true role: description: - role for admin user required: false default: null commit: description: - commit if changed required: false default: true ''' EXAMPLES = ''' # Set the password of user admin to "badpassword" # Doesn't commit the candidate config - name: set admin password panos_admin: ip_address: "192.168.1.1" password: "admin" admin_username: admin admin_password: "badpassword" commit: False ''' RETURN = ''' status: description: success status returned: success type: string sample: "okey dokey" ''' from ansible.module_utils.basic import AnsibleModule try: import pan.xapi HAS_LIB = True except ImportError: HAS_LIB = False _ADMIN_XPATH = "/config/mgt-config/users/entry[@name='%s']" def admin_exists(xapi, admin_username): xapi.get(_ADMIN_XPATH % admin_username) e = xapi.element_root.find('.//entry') return e def admin_set(xapi, module, admin_username, admin_password, role): if admin_password is not None: xapi.op(cmd='request password-hash password "%s"' % admin_password, cmd_xml=True) r = xapi.element_root phash = r.find('.//phash').text if role is not None: rbval = "yes" if role != "superuser" and role != 'superreader': rbval = "" ea = admin_exists(xapi, admin_username) if ea is not None: # user exists changed = False if role is not None: rb = ea.find('.//role-based') if rb is not None: if rb[0].tag != role: changed = True xpath = _ADMIN_XPATH % admin_username xpath += '/permissions/role-based/%s' % rb[0].tag xapi.delete(xpath=xpath) xpath = _ADMIN_XPATH % admin_username xpath += '/permissions/role-based' xapi.set(xpath=xpath, element='<%s>%s</%s>' % (role, rbval, role)) if admin_password is not None: xapi.edit(xpath=_ADMIN_XPATH % admin_username+'/phash', element='<phash>%s</phash>' % phash) changed = True return changed # setup the non encrypted part of the monitor exml = [] exml.append('<phash>%s</phash>' % phash) exml.append('<permissions><role-based><%s>%s</%s>' '</role-based></permissions>' % (role, rbval, role)) exml = ''.join(exml) # module.fail_json(msg=exml) xapi.set(xpath=_ADMIN_XPATH % admin_username, element=exml) return True def main(): argument_spec = dict( ip_address=dict(), password=dict(no_log=True), username=dict(default='admin'), admin_username=dict(default='admin'), admin_password=dict(no_log=True), role=dict(), commit=dict(type='bool', default=True) ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) if not HAS_LIB: module.fail_json(msg='pan-python required for this module') ip_address = module.params["ip_address"] if not ip_address: module.fail_json(msg="ip_address should be specified") password = module.params["password"] if not password: module.fail_json(msg="password is required") username = module.params['username'] xapi = pan.xapi.PanXapi( hostname=ip_address, api_username=username, api_password=password ) admin_username = module.params['admin_username'] if admin_username is None: module.fail_json(msg="admin_username is required") admin_password = module.params['admin_password'] role = module.params['role'] commit = module.params['commit'] changed = admin_set(xapi, module, admin_username, admin_password, role) if changed and commit: xapi.commit(cmd="<commit></commit>", sync=True, interval=1) module.exit_json(changed=changed, msg="okey dokey") if __name__ == '__main__': main()
{ "pile_set_name": "Github" }
/* WBMP ** ---- ** WBMP Level 0: B/W, Uncompressed ** This implements the WBMP format as specified in WAPSpec 1.1 and 1.2. ** It does not support ExtHeaders as defined in the spec. The spec states ** that a WAP client does not need to implement ExtHeaders. ** ** (c) 2000 Johan Van den Brande <[email protected]> */ #include <stdio.h> #include <stddef.h> #include <stdlib.h> #include <string.h> #include "wbmp.h" #include "gd.h" #include "gdhelpers.h" #ifdef NOTDEF #define __TEST /* Compile with main function */ #define __DEBUG /* Extra verbose when with __TEST */ #define __WRITE /* readwbmp and writewbmp(stdout) */ #define __VIEW /* view the wbmp on stdout */ #endif /* getmbi ** ------ ** Get a multibyte integer from a generic getin function ** 'getin' can be getc, with in = NULL ** you can find getin as a function just above the main function ** This way you gain a lot of flexibilty about how this package ** reads a wbmp file. */ int getmbi (int (*getin) (void *in), void *in) { int i, mbi = 0; do { i = getin (in); if (i < 0) return (-1); mbi = (mbi << 7) | (i & 0x7f); } while (i & 0x80); return (mbi); } /* putmbi ** ------ ** Put a multibyte intgerer in some kind of output stream ** I work here with a function pointer, to make it as generic ** as possible. Look at this function as an iterator on the ** mbi integers it spits out. ** */ void putmbi (int i, void (*putout) (int c, void *out), void *out) { int cnt, l, accu; /* Get number of septets */ cnt = 0; accu = 0; while (accu != i) accu += i & 0x7f << 7 * cnt++; /* Produce the multibyte output */ for (l = cnt - 1; l > 0; l--) putout (0x80 | (i & 0x7f << 7 * l) >> 7 * l, out); putout (i & 0x7f, out); } /* skipheader ** ---------- ** Skips the ExtHeader. Not needed for the moment ** */ int skipheader (int (*getin) (void *in), void *in) { int i; do { i = getin (in); if (i < 0) return (-1); } while (i & 0x80); return (0); } /* create wbmp ** ----------- ** create an empty wbmp ** */ Wbmp * createwbmp (int width, int height, int color) { int i; Wbmp *wbmp; if ((wbmp = (Wbmp *) gdMalloc (sizeof (Wbmp))) == NULL) return (NULL); if (overflow2(sizeof (int), width)) { gdFree(wbmp); return NULL; } if (overflow2(sizeof (int) * width, height)) { gdFree(wbmp); return NULL; } if ((wbmp->bitmap = (int *) safe_emalloc(sizeof(int), width * height, 0)) == NULL) { gdFree (wbmp); return (NULL); } wbmp->width = width; wbmp->height = height; for (i = 0; i < width * height; wbmp->bitmap[i++] = color); return (wbmp); } /* readwbmp ** ------- ** Actually reads the WBMP format from an open file descriptor ** It goes along by returning a pointer to a WBMP struct. ** */ int readwbmp (int (*getin) (void *in), void *in, Wbmp ** return_wbmp) { int row, col, byte, pel, pos; Wbmp *wbmp; if ((wbmp = (Wbmp *) gdMalloc (sizeof (Wbmp))) == NULL) return (-1); wbmp->type = getin (in); if (wbmp->type != 0) { gdFree (wbmp); return (-1); } if (skipheader (getin, in)) { gdFree (wbmp); return (-1); } wbmp->width = getmbi (getin, in); if (wbmp->width == -1) { gdFree (wbmp); return (-1); } wbmp->height = getmbi (getin, in); if (wbmp->height == -1) { gdFree (wbmp); return (-1); } #ifdef __DEBUG printf ("W: %d, H: %d\n", wbmp->width, wbmp->height); #endif if (overflow2(sizeof (int), wbmp->width) || overflow2(sizeof (int) * wbmp->width, wbmp->height)) { gdFree(wbmp); return (-1); } if ((wbmp->bitmap = (int *) safe_emalloc((size_t)wbmp->width * wbmp->height, sizeof(int), 0)) == NULL) { gdFree (wbmp); return (-1); } #ifdef __DEBUG printf ("DATA CONSTRUCTED\n"); #endif pos = 0; for (row = 0; row < wbmp->height; row++) { for (col = 0; col < wbmp->width;) { byte = getin (in); for (pel = 7; pel >= 0; pel--) { if (col++ < wbmp->width) { if (byte & 1 << pel) { wbmp->bitmap[pos] = WBMP_WHITE; } else { wbmp->bitmap[pos] = WBMP_BLACK; } pos++; } } } } *return_wbmp = wbmp; return (0); } /* writewbmp ** --------- ** Write a wbmp to a file descriptor ** ** Why not just giving a filedescriptor to this function? ** Well, the incentive to write this function was the complete ** integration in gd library from www.boutell.com. They use ** their own io functions, so the passing of a function seemed to be ** a logic(?) decision ... ** */ int writewbmp (Wbmp * wbmp, void (*putout) (int c, void *out), void *out) { int row, col; int bitpos, octet; /* Generate the header */ putout (0, out); /* WBMP Type 0: B/W, Uncompressed bitmap */ putout (0, out); /* FixHeaderField */ /* Size of the image */ putmbi (wbmp->width, putout, out); /* width */ putmbi (wbmp->height, putout, out); /* height */ /* Image data */ for (row = 0; row < wbmp->height; row++) { bitpos = 8; octet = 0; for (col = 0; col < wbmp->width; col++) { octet |= ((wbmp->bitmap[row * wbmp->width + col] == 1) ? WBMP_WHITE : WBMP_BLACK) << --bitpos; if (bitpos == 0) { bitpos = 8; putout (octet, out); octet = 0; } } if (bitpos != 8) putout (octet, out); } return (0); } /* freewbmp ** -------- ** gdFrees up memory occupied by a WBMP structure ** */ void freewbmp (Wbmp * wbmp) { gdFree (wbmp->bitmap); gdFree (wbmp); } /* printwbmp ** --------- ** print a WBMP to stdout for visualisation ** */ void printwbmp (Wbmp * wbmp) { int row, col; for (row = 0; row < wbmp->height; row++) { for (col = 0; col < wbmp->width; col++) { if (wbmp->bitmap[wbmp->width * row + col] == WBMP_BLACK) { putchar ('#'); } else { putchar (' '); } } putchar ('\n'); } } #ifdef __TEST /* putout to file descriptor ** ------------------------- */ int putout (int c, void *out) { return (putc (c, (FILE *) out)); } /* getin from file descriptor ** -------------------------- */ int getin (void *in) { return (getc ((FILE *) in)); } /* Main function ** ------------- ** */ int main (int argc, char *argv[]) { FILE *wbmp_file; Wbmp *wbmp; wbmp_file = fopen (argv[1], "rb"); if (wbmp_file) { readwbmp (&getin, wbmp_file, &wbmp); #ifdef __VIEW #ifdef __DEBUG printf ("\nVIEWING IMAGE\n"); #endif printwbmp (wbmp); #endif #ifdef __WRITE #ifdef __DEBUG printf ("\nDUMPING WBMP to STDOUT\n"); #endif writewbmp (wbmp, &putout, stdout); #endif freewbmp (wbmp); fclose (wbmp_file); } } #endif
{ "pile_set_name": "Github" }
require('../../modules/es6.string.trim'); module.exports = require('../../modules/_core').String.trim;
{ "pile_set_name": "Github" }
{ "version": 2, "waiters": { "AppExists": { "delay": 1, "operation": "DescribeApps", "maxAttempts": 40, "acceptors": [ { "expected": 200, "matcher": "status", "state": "success" }, { "matcher": "status", "expected": 400, "state": "failure" } ] }, "DeploymentSuccessful": { "delay": 15, "operation": "DescribeDeployments", "maxAttempts": 40, "description": "Wait until a deployment has completed successfully", "acceptors": [ { "expected": "successful", "matcher": "pathAll", "state": "success", "argument": "Deployments[].Status" }, { "expected": "failed", "matcher": "pathAny", "state": "failure", "argument": "Deployments[].Status" } ] }, "InstanceOnline": { "delay": 15, "operation": "DescribeInstances", "maxAttempts": 40, "description": "Wait until OpsWorks instance is online.", "acceptors": [ { "expected": "online", "matcher": "pathAll", "state": "success", "argument": "Instances[].Status" }, { "expected": "setup_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "shutting_down", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "start_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "stopped", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "stopping", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "terminating", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "terminated", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "stop_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" } ] }, "InstanceRegistered": { "delay": 15, "operation": "DescribeInstances", "maxAttempts": 40, "description": "Wait until OpsWorks instance is registered.", "acceptors": [ { "expected": "registered", "matcher": "pathAll", "state": "success", "argument": "Instances[].Status" }, { "expected": "setup_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "shutting_down", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "stopped", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "stopping", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "terminating", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "terminated", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "stop_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" } ] }, "InstanceStopped": { "delay": 15, "operation": "DescribeInstances", "maxAttempts": 40, "description": "Wait until OpsWorks instance is stopped.", "acceptors": [ { "expected": "stopped", "matcher": "pathAll", "state": "success", "argument": "Instances[].Status" }, { "expected": "booting", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "online", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "pending", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "rebooting", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "requested", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "running_setup", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "setup_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "start_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "stop_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" } ] }, "InstanceTerminated": { "delay": 15, "operation": "DescribeInstances", "maxAttempts": 40, "description": "Wait until OpsWorks instance is terminated.", "acceptors": [ { "expected": "terminated", "matcher": "pathAll", "state": "success", "argument": "Instances[].Status" }, { "expected": "ResourceNotFoundException", "matcher": "error", "state": "success" }, { "expected": "booting", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "online", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "pending", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "rebooting", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "requested", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "running_setup", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "setup_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" }, { "expected": "start_failed", "matcher": "pathAny", "state": "failure", "argument": "Instances[].Status" } ] } } }
{ "pile_set_name": "Github" }
// Protocol Buffers for Go with Gadgets // // Copyright (c) 2013, The GoGo Authors. All rights reserved. // http://github.com/gogo/protobuf // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. /* The gostring plugin generates a GoString method for each message. The GoString method is called whenever you use a fmt.Printf as such: fmt.Printf("%#v", mymessage) or whenever you actually call GoString() The output produced by the GoString method can be copied from the output into code and used to set a variable. It is totally valid Go Code and is populated exactly as the struct that was printed out. It is enabled by the following extensions: - gostring - gostring_all The gostring plugin also generates a test given it is enabled using one of the following extensions: - testgen - testgen_all Let us look at: github.com/gogo/protobuf/test/example/example.proto Btw all the output can be seen at: github.com/gogo/protobuf/test/example/* The following message: option (gogoproto.gostring_all) = true; message A { optional string Description = 1 [(gogoproto.nullable) = false]; optional int64 Number = 2 [(gogoproto.nullable) = false]; optional bytes Id = 3 [(gogoproto.customtype) = "github.com/gogo/protobuf/test/custom.Uuid", (gogoproto.nullable) = false]; } given to the gostring plugin, will generate the following code: func (this *A) GoString() string { if this == nil { return "nil" } s := strings1.Join([]string{`&test.A{` + `Description:` + fmt1.Sprintf("%#v", this.Description), `Number:` + fmt1.Sprintf("%#v", this.Number), `Id:` + fmt1.Sprintf("%#v", this.Id), `XXX_unrecognized:` + fmt1.Sprintf("%#v", this.XXX_unrecognized) + `}`}, ", ") return s } and the following test code: func TestAGoString(t *testing6.T) { popr := math_rand6.New(math_rand6.NewSource(time6.Now().UnixNano())) p := NewPopulatedA(popr, false) s1 := p.GoString() s2 := fmt2.Sprintf("%#v", p) if s1 != s2 { t.Fatalf("GoString want %v got %v", s1, s2) } _, err := go_parser.ParseExpr(s1) if err != nil { panic(err) } } Typically fmt.Printf("%#v") will stop to print when it reaches a pointer and not print their values, while the generated GoString method will always print all values, recursively. */ package gostring import ( "fmt" "os" "strconv" "strings" "github.com/gogo/protobuf/gogoproto" "github.com/gogo/protobuf/protoc-gen-gogo/generator" ) type gostring struct { *generator.Generator generator.PluginImports atleastOne bool localName string overwrite bool } func NewGoString() *gostring { return &gostring{} } func (p *gostring) Name() string { return "gostring" } func (p *gostring) Overwrite() { p.overwrite = true } func (p *gostring) Init(g *generator.Generator) { p.Generator = g } func (p *gostring) Generate(file *generator.FileDescriptor) { proto3 := gogoproto.IsProto3(file.FileDescriptorProto) p.PluginImports = generator.NewPluginImports(p.Generator) p.atleastOne = false p.localName = generator.FileName(file) fmtPkg := p.NewImport("fmt") stringsPkg := p.NewImport("strings") protoPkg := p.NewImport("github.com/gogo/protobuf/proto") if !gogoproto.ImportsGoGoProto(file.FileDescriptorProto) { protoPkg = p.NewImport("github.com/golang/protobuf/proto") } sortPkg := p.NewImport("sort") strconvPkg := p.NewImport("strconv") reflectPkg := p.NewImport("reflect") sortKeysPkg := p.NewImport("github.com/gogo/protobuf/sortkeys") extensionToGoStringUsed := false for _, message := range file.Messages() { if !p.overwrite && !gogoproto.HasGoString(file.FileDescriptorProto, message.DescriptorProto) { continue } if message.DescriptorProto.GetOptions().GetMapEntry() { continue } p.atleastOne = true packageName := file.GoPackageName() ccTypeName := generator.CamelCaseSlice(message.TypeName()) p.P(`func (this *`, ccTypeName, `) GoString() string {`) p.In() p.P(`if this == nil {`) p.In() p.P(`return "nil"`) p.Out() p.P(`}`) p.P(`s := make([]string, 0, `, strconv.Itoa(len(message.Field)+4), `)`) p.P(`s = append(s, "&`, packageName, ".", ccTypeName, `{")`) oneofs := make(map[string]struct{}) for _, field := range message.Field { nullable := gogoproto.IsNullable(field) repeated := field.IsRepeated() fieldname := p.GetFieldName(message, field) oneof := field.OneofIndex != nil if oneof { if _, ok := oneofs[fieldname]; ok { continue } else { oneofs[fieldname] = struct{}{} } p.P(`if this.`, fieldname, ` != nil {`) p.In() p.P(`s = append(s, "`, fieldname, `: " + `, fmtPkg.Use(), `.Sprintf("%#v", this.`, fieldname, `) + ",\n")`) p.Out() p.P(`}`) } else if p.IsMap(field) { m := p.GoMapType(nil, field) mapgoTyp, keyField, keyAliasField := m.GoType, m.KeyField, m.KeyAliasField keysName := `keysFor` + fieldname keygoTyp, _ := p.GoType(nil, keyField) keygoTyp = strings.Replace(keygoTyp, "*", "", 1) keygoAliasTyp, _ := p.GoType(nil, keyAliasField) keygoAliasTyp = strings.Replace(keygoAliasTyp, "*", "", 1) keyCapTyp := generator.CamelCase(keygoTyp) p.P(keysName, ` := make([]`, keygoTyp, `, 0, len(this.`, fieldname, `))`) p.P(`for k, _ := range this.`, fieldname, ` {`) p.In() if keygoAliasTyp == keygoTyp { p.P(keysName, ` = append(`, keysName, `, k)`) } else { p.P(keysName, ` = append(`, keysName, `, `, keygoTyp, `(k))`) } p.Out() p.P(`}`) p.P(sortKeysPkg.Use(), `.`, keyCapTyp, `s(`, keysName, `)`) mapName := `mapStringFor` + fieldname p.P(mapName, ` := "`, mapgoTyp, `{"`) p.P(`for _, k := range `, keysName, ` {`) p.In() if keygoAliasTyp == keygoTyp { p.P(mapName, ` += fmt.Sprintf("%#v: %#v,", k, this.`, fieldname, `[k])`) } else { p.P(mapName, ` += fmt.Sprintf("%#v: %#v,", k, this.`, fieldname, `[`, keygoAliasTyp, `(k)])`) } p.Out() p.P(`}`) p.P(mapName, ` += "}"`) p.P(`if this.`, fieldname, ` != nil {`) p.In() p.P(`s = append(s, "`, fieldname, `: " + `, mapName, `+ ",\n")`) p.Out() p.P(`}`) } else if (field.IsMessage() && !gogoproto.IsCustomType(field) && !gogoproto.IsStdTime(field) && !gogoproto.IsStdDuration(field)) || p.IsGroup(field) { if nullable || repeated { p.P(`if this.`, fieldname, ` != nil {`) p.In() } if nullable { p.P(`s = append(s, "`, fieldname, `: " + `, fmtPkg.Use(), `.Sprintf("%#v", this.`, fieldname, `) + ",\n")`) } else if repeated { if nullable { p.P(`s = append(s, "`, fieldname, `: " + `, fmtPkg.Use(), `.Sprintf("%#v", this.`, fieldname, `) + ",\n")`) } else { goTyp, _ := p.GoType(message, field) goTyp = strings.Replace(goTyp, "[]", "", 1) p.P("vs := make([]*", goTyp, ", len(this.", fieldname, "))") p.P("for i := range vs {") p.In() p.P("vs[i] = &this.", fieldname, "[i]") p.Out() p.P("}") p.P(`s = append(s, "`, fieldname, `: " + `, fmtPkg.Use(), `.Sprintf("%#v", vs) + ",\n")`) } } else { p.P(`s = append(s, "`, fieldname, `: " + `, stringsPkg.Use(), `.Replace(this.`, fieldname, `.GoString()`, ",`&`,``,1)", ` + ",\n")`) } if nullable || repeated { p.Out() p.P(`}`) } } else { if !proto3 && (nullable || repeated) { p.P(`if this.`, fieldname, ` != nil {`) p.In() } if field.IsEnum() { if nullable && !repeated && !proto3 { goTyp, _ := p.GoType(message, field) p.P(`s = append(s, "`, fieldname, `: " + valueToGoString`, p.localName, `(this.`, fieldname, `,"`, generator.GoTypeToName(goTyp), `"`, `) + ",\n")`) } else { p.P(`s = append(s, "`, fieldname, `: " + `, fmtPkg.Use(), `.Sprintf("%#v", this.`, fieldname, `) + ",\n")`) } } else { if nullable && !repeated && !proto3 { goTyp, _ := p.GoType(message, field) p.P(`s = append(s, "`, fieldname, `: " + valueToGoString`, p.localName, `(this.`, fieldname, `,"`, generator.GoTypeToName(goTyp), `"`, `) + ",\n")`) } else { p.P(`s = append(s, "`, fieldname, `: " + `, fmtPkg.Use(), `.Sprintf("%#v", this.`, fieldname, `) + ",\n")`) } } if !proto3 && (nullable || repeated) { p.Out() p.P(`}`) } } } if message.DescriptorProto.HasExtension() { if gogoproto.HasExtensionsMap(file.FileDescriptorProto, message.DescriptorProto) { p.P(`s = append(s, "XXX_InternalExtensions: " + extensionToGoString`, p.localName, `(this) + ",\n")`) extensionToGoStringUsed = true } else { p.P(`if this.XXX_extensions != nil {`) p.In() p.P(`s = append(s, "XXX_extensions: " + `, fmtPkg.Use(), `.Sprintf("%#v", this.XXX_extensions) + ",\n")`) p.Out() p.P(`}`) } } if gogoproto.HasUnrecognized(file.FileDescriptorProto, message.DescriptorProto) { p.P(`if this.XXX_unrecognized != nil {`) p.In() p.P(`s = append(s, "XXX_unrecognized:" + `, fmtPkg.Use(), `.Sprintf("%#v", this.XXX_unrecognized) + ",\n")`) p.Out() p.P(`}`) } p.P(`s = append(s, "}")`) p.P(`return `, stringsPkg.Use(), `.Join(s, "")`) p.Out() p.P(`}`) //Generate GoString methods for oneof fields for _, field := range message.Field { oneof := field.OneofIndex != nil if !oneof { continue } ccTypeName := p.OneOfTypeName(message, field) p.P(`func (this *`, ccTypeName, `) GoString() string {`) p.In() p.P(`if this == nil {`) p.In() p.P(`return "nil"`) p.Out() p.P(`}`) fieldname := p.GetOneOfFieldName(message, field) outStr := strings.Join([]string{ "s := ", stringsPkg.Use(), ".Join([]string{`&", packageName, ".", ccTypeName, "{` + \n", "`", fieldname, ":` + ", fmtPkg.Use(), `.Sprintf("%#v", this.`, fieldname, `)`, " + `}`", `}`, `,", "`, `)`}, "") p.P(outStr) p.P(`return s`) p.Out() p.P(`}`) } } if !p.atleastOne { return } p.P(`func valueToGoString`, p.localName, `(v interface{}, typ string) string {`) p.In() p.P(`rv := `, reflectPkg.Use(), `.ValueOf(v)`) p.P(`if rv.IsNil() {`) p.In() p.P(`return "nil"`) p.Out() p.P(`}`) p.P(`pv := `, reflectPkg.Use(), `.Indirect(rv).Interface()`) p.P(`return `, fmtPkg.Use(), `.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)`) p.Out() p.P(`}`) if extensionToGoStringUsed { if !gogoproto.ImportsGoGoProto(file.FileDescriptorProto) { fmt.Fprintf(os.Stderr, "The GoString plugin for messages with extensions requires importing gogoprotobuf. Please see file %s", file.GetName()) os.Exit(1) } p.P(`func extensionToGoString`, p.localName, `(m `, protoPkg.Use(), `.Message) string {`) p.In() p.P(`e := `, protoPkg.Use(), `.GetUnsafeExtensionsMap(m)`) p.P(`if e == nil { return "nil" }`) p.P(`s := "proto.NewUnsafeXXX_InternalExtensions(map[int32]proto.Extension{"`) p.P(`keys := make([]int, 0, len(e))`) p.P(`for k := range e {`) p.In() p.P(`keys = append(keys, int(k))`) p.Out() p.P(`}`) p.P(sortPkg.Use(), `.Ints(keys)`) p.P(`ss := []string{}`) p.P(`for _, k := range keys {`) p.In() p.P(`ss = append(ss, `, strconvPkg.Use(), `.Itoa(k) + ": " + e[int32(k)].GoString())`) p.Out() p.P(`}`) p.P(`s+=`, stringsPkg.Use(), `.Join(ss, ",") + "})"`) p.P(`return s`) p.Out() p.P(`}`) } } func init() { generator.RegisterPlugin(NewGoString()) }
{ "pile_set_name": "Github" }
--- title: "Type-Safe Access to Controls in a Dialog Box" ms.date: "11/04/2016" helpviewer_keywords: ["common controls [MFC], in dialog boxes", "Windows common controls [MFC], in dialog boxes", "safe access to dialog box controls", "dialog boxes [MFC], type-safe access to controls", "controls [MFC], accessing in dialog boxes", "type-safe access to dialog box controls", "MFC dialog boxes [MFC], type-safe access to controls"] ms.assetid: 67021025-dd93-4d6a-8bed-a1348fe50685 --- # Type-Safe Access to Controls in a Dialog Box The controls in a dialog box can use the interfaces of MFC control classes such as `CListBox` and `CEdit`. You can create a control object and attach it to a dialog control. Then you can access the control through its class interface, calling member functions to operate on the control. The methods described here are designed to give you type-safe access to a control. This is especially useful for controls such as edit boxes and list boxes. There are two approaches to making a connection between a control in a dialog box and a C++ control member variable in a `CDialog`-derived class: - [Without Code Wizards](../mfc/type-safe-access-to-controls-without-code-wizards.md) - [With Code Wizards](../mfc/type-safe-access-to-controls-with-code-wizards.md) ## See also [Dialog Boxes](../mfc/dialog-boxes.md)
{ "pile_set_name": "Github" }
/* Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license */ CKEDITOR.plugins.add("uicolor",{requires:"dialog",lang:"af,ar,az,bg,ca,cs,cy,da,de,de-ch,el,en,en-gb,eo,es,et,eu,fa,fi,fr,fr-ca,gl,he,hr,hu,id,it,ja,km,ko,ku,lv,mk,nb,nl,no,oc,pl,pt,pt-br,ru,si,sk,sl,sq,sv,tr,tt,ug,uk,vi,zh,zh-cn",icons:"uicolor",hidpi:!0,init:function(a){var b=new CKEDITOR.dialogCommand("uicolor");b.editorFocus=!1;CKEDITOR.dialog.add("uicolor",this.path+"dialogs/uicolor.js");a.addCommand("uicolor",b);a.ui.addButton&&a.ui.addButton("UIColor",{label:a.lang.uicolor.title,command:"uicolor", toolbar:"tools,1"})}});
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" /> <PropertyGroup> <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform> <ProjectGuid>{E45DA7CE-3A8F-4414-AA60-BDF636D15D1B}</ProjectGuid> <OutputType>Library</OutputType> <AppDesignerFolder>Properties</AppDesignerFolder> <RootNamespace>Cassette.RequireJS</RootNamespace> <AssemblyName>Cassette.RequireJS.UnitTests</AssemblyName> <FileAlignment>512</FileAlignment> <SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\</SolutionDir> <TargetFrameworkVersion Condition="'$(Configuration)|$(Platform)' == 'DebugNET35|AnyCPU'">v3.5</TargetFrameworkVersion> <TargetFrameworkVersion Condition="'$(Configuration)|$(Platform)' == 'ReleaseNET35|AnyCPU'">v3.5</TargetFrameworkVersion> <TargetFrameworkVersion Condition=" '$(TargetFrameworkVersion)' == '' ">v4.0</TargetFrameworkVersion> <RestorePackages>true</RestorePackages> </PropertyGroup> <PropertyGroup Condition=" '$(TargetFrameworkVersion)' == 'v4.0' "> <TargetFrameworkConstant>NET40</TargetFrameworkConstant> </PropertyGroup> <PropertyGroup Condition=" '$(TargetFrameworkVersion)' == 'v3.5' "> <TargetFrameworkConstant>NET35</TargetFrameworkConstant> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' "> <DebugSymbols>true</DebugSymbols> <DebugType>full</DebugType> <Optimize>false</Optimize> <OutputPath>bin\$(Configuration)\$(TargetFrameworkConstant)</OutputPath> <BaseIntermediateOutputPath>obj\$(Configuration)\$(TargetFrameworkConstant)</BaseIntermediateOutputPath> <DefineConstants>DEBUG;TRACE;$(TargetFrameworkConstant)</DefineConstants> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' "> <DebugType>pdbonly</DebugType> <Optimize>true</Optimize> <OutputPath>bin\$(Configuration)\$(TargetFrameworkConstant)</OutputPath> <BaseIntermediateOutputPath>obj\$(Configuration)\$(TargetFrameworkConstant)</BaseIntermediateOutputPath> <DefineConstants>TRACE;$(TargetFrameworkConstant)</DefineConstants> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'DebugNET35|AnyCPU'"> <DebugSymbols>true</DebugSymbols> <OutputPath>bin\$(Configuration)\$(TargetFrameworkConstant)</OutputPath> <BaseIntermediateOutputPath>obj\$(Configuration)\$(TargetFrameworkConstant)</BaseIntermediateOutputPath> <DefineConstants>DEBUG;TRACE;$(TargetFrameworkConstant)</DefineConstants> <DebugType>full</DebugType> <PlatformTarget>AnyCPU</PlatformTarget> <ErrorReport>prompt</ErrorReport> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'ReleaseNET35|AnyCPU'"> <OutputPath>bin\$(Configuration)\$(TargetFrameworkConstant)</OutputPath> <BaseIntermediateOutputPath>obj\$(Configuration)\$(TargetFrameworkConstant)</BaseIntermediateOutputPath> <DefineConstants>TRACE;$(TargetFrameworkConstant)</DefineConstants> <Optimize>true</Optimize> <DebugType>pdbonly</DebugType> <PlatformTarget>AnyCPU</PlatformTarget> <ErrorReport>prompt</ErrorReport> </PropertyGroup> <ItemGroup> <Reference Include="Moq, Version=4.0.10827.0, Culture=neutral, PublicKeyToken=69f491c39445e920, processorArchitecture=MSIL"> <SpecificVersion>False</SpecificVersion> <HintPath Condition="'$(TargetFrameworkVersion)' == 'v3.5'">$(SolutionDir)packages\Moq.4.0.10827\lib\NET35\Moq.dll</HintPath> <HintPath Condition="'$(TargetFrameworkVersion)' == 'v4.0'">$(SolutionDir)packages\Moq.4.0.10827\lib\NET40\Moq.dll</HintPath> </Reference> <Reference Include="Newtonsoft.Json"> <HintPath Condition="'$(TargetFrameworkVersion)' == 'v3.5'">$(SolutionDir)packages\Newtonsoft.Json.4.5.10\lib\net35\Newtonsoft.Json.dll</HintPath> <HintPath Condition="'$(TargetFrameworkVersion)' == 'v4.0'">$(SolutionDir)packages\Newtonsoft.Json.4.5.10\lib\net40\Newtonsoft.Json.dll</HintPath> </Reference> <Reference Include="Should"> <HintPath>$(SolutionDir)packages\Should.1.1.12.0\lib\Should.dll</HintPath> </Reference> <Reference Include="System" /> <Reference Include="System.Core" /> <Reference Include="System.Xml.Linq" /> <Reference Include="System.Data.DataSetExtensions" /> <Reference Include="Microsoft.CSharp" Condition="'$(TargetFrameworkVersion)' == 'v4.0'" /> <Reference Include="System.Data" /> <Reference Include="System.Xml" /> <Reference Include="xunit"> <HintPath>$(SolutionDir)packages\xunit.1.9.1\lib\net20\xunit.dll</HintPath> </Reference> </ItemGroup> <ItemGroup> <Compile Include="ModuleInitializer.cs" /> <Compile Include="AnonymousModule.cs" /> <Compile Include="NamedModule.cs" /> <Compile Include="PlainScript.cs" /> <Compile Include="Properties\AssemblyInfo.cs" /> <Compile Include="ConfigurationScriptBuilder.cs" /> </ItemGroup> <ItemGroup> <None Include="packages.config" /> </ItemGroup> <ItemGroup> <ProjectReference Include="..\Cassette.RequireJS\Cassette.RequireJS.csproj"> <Project>{5672A520-0A60-424F-B9DB-29A80A96F24F}</Project> <Name>Cassette.RequireJS</Name> </ProjectReference> <ProjectReference Include="..\Cassette.UnitTests\Cassette.UnitTests.csproj"> <Project>{F200A67A-D7B5-441B-AB36-69AA5C87577E}</Project> <Name>Cassette.UnitTests</Name> </ProjectReference> <ProjectReference Include="..\Cassette\Cassette.csproj"> <Project>{A5CCF9D3-5D49-4BFC-B9A6-9EC9E0E29C50}</Project> <Name>Cassette</Name> </ProjectReference> </ItemGroup> <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" /> <Import Project="$(SolutionDir)\.nuget\nuget.targets" /> <!-- To modify your build process, add your task inside one of the targets below and uncomment it. Other similar extension points exist, see Microsoft.Common.targets. <Target Name="BeforeBuild"> </Target> <Target Name="AfterBuild"> </Target> --> </Project>
{ "pile_set_name": "Github" }
package dt import ( "net/http" "path" "github.com/julienschmidt/httprouter" ) // HTTPRoute defines a route to be used within a HandlerMap. type HTTPRoute struct { Method string Path string } // HandlerMap maps HTTPRoutes (the method and URL path) to a echo router // handler. type HandlerMap map[HTTPRoute]http.HandlerFunc // RouteHandler is a complete struct containing both an HTTPRoute and a handler. type RouteHandler struct { Method string Path string Handler http.HandlerFunc } // AddRoutes to the router dynamically, enabling drivers to add routes to an // application at runtime usually as part of their initialization. func (hm HandlerMap) AddRoutes(prefix string, r *httprouter.Router) { for httpRoute, h := range hm { p := path.Join("/", prefix, httpRoute.Path) r.HandlerFunc(httpRoute.Method, p, h) } } // NewHandlerMap builds a HandlerMap from a slice of RouteHandlers. This is a // convenience function, since using RouteHandlers directly is very verbose for // plugins. func NewHandlerMap(rhs []RouteHandler) HandlerMap { hm := HandlerMap{} for _, rh := range rhs { route := HTTPRoute{ Path: rh.Path, Method: rh.Method, } hm[route] = rh.Handler } return hm }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <meta name="generator" content="Doxygen 1.8.5"/> <title>GLM: integer.hpp Source File</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="top"><!-- do not remove this div, it is closed by doxygen! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td style="padding-left: 0.5em;"> <div id="projectname">GLM &#160;<span id="projectnumber">0.9.5</span> </div> </td> </tr> </tbody> </table> </div> <!-- end header part --> <!-- Generated by Doxygen 1.8.5 --> <div id="navrow1" class="tabs"> <ul class="tablist"> <li><a href="index.html"><span>Main&#160;Page</span></a></li> <li><a href="pages.html"><span>Related&#160;Pages</span></a></li> <li><a href="modules.html"><span>Modules</span></a></li> <li><a href="namespaces.html"><span>Namespaces</span></a></li> <li class="current"><a href="files.html"><span>Files</span></a></li> </ul> </div> <div id="navrow2" class="tabs2"> <ul class="tablist"> <li><a href="files.html"><span>File&#160;List</span></a></li> </ul> </div> <div id="nav-path" class="navpath"> <ul> <li class="navelem"><a class="el" href="dir_8ceffd4ee35c3518d4e8bdc7e638efe8.html">Users</a></li><li class="navelem"><a class="el" href="dir_968fb7988749a6351e7b3d0c1783dec4.html">Groove</a></li><li class="navelem"><a class="el" href="dir_6e418c18ca640a0404613de005739e2e.html">Documents</a></li><li class="navelem"><a class="el" href="dir_e3ecd7863bd215c92a17f47e2ae3be43.html">GitHub</a></li><li class="navelem"><a class="el" href="dir_edf753475b928be648c1cf1c6443cf63.html">glm</a></li><li class="navelem"><a class="el" href="dir_e50778361fd4ab4de52181ed9eb2b726.html">glm</a></li> </ul> </div> </div><!-- top --> <div class="header"> <div class="headertitle"> <div class="title">integer.hpp</div> </div> </div><!--header--> <div class="contents"> <a href="a00050.html">Go to the documentation of this file.</a><div class="fragment"><div class="line"><a name="l00001"></a><span class="lineno"> 1</span>&#160;</div> <div class="line"><a name="l00029"></a><span class="lineno"> 29</span>&#160;<span class="preprocessor">#ifndef GLM_INTEGER_INCLUDED</span></div> <div class="line"><a name="l00030"></a><span class="lineno"> 30</span>&#160;<span class="preprocessor"></span><span class="preprocessor">#define GLM_INTEGER_INCLUDED</span></div> <div class="line"><a name="l00031"></a><span class="lineno"> 31</span>&#160;<span class="preprocessor"></span></div> <div class="line"><a name="l00032"></a><span class="lineno"> 32</span>&#160;<span class="preprocessor">#include &quot;detail/func_integer.hpp&quot;</span></div> <div class="line"><a name="l00033"></a><span class="lineno"> 33</span>&#160;</div> <div class="line"><a name="l00034"></a><span class="lineno"> 34</span>&#160;<span class="preprocessor">#endif//GLM_INTEGER_INCLUDED</span></div> </div><!-- fragment --></div><!-- contents --> <!-- start footer part --> <hr class="footer"/><address class="footer"><small> Generated by &#160;<a href="http://www.doxygen.org/index.html"> <img class="footer" src="doxygen.png" alt="doxygen"/> </a> 1.8.5 </small></address> </body> </html>
{ "pile_set_name": "Github" }
{ "type": "engine:nuiSkinEditorScreen", "skin": "engine:nuiEditorScreen", "contents": { "type": "migLayout", "layoutConstraints": "", "colConstraints": "[grow, 25%][grow, 75%]", "rowConstraints": "[min][grow, fill][min]", "debug": true, "contents": [ { "type": "UIBox", "id": "selectedScreen", "updateContent": false, "layoutInfo": { "cc": "cell 1 1" } }, { "type": "migLayout", "layoutInfo": { "cc": "cell 1 0" }, "contents": [ { "type": "UIDropdownScrollable", "id": "availableScreens" } ] }, { "type": "migLayout", "layoutInfo": { "cc": "cell 0 2, spanx 2" }, "contents": [ { "type": "UIButton", "id": "override", "text": "${engine:menu#nui-editor-override}" }, { "type": "UIButton", "id": "save", "text": "${engine:menu#nui-editor-save}" }, { "type": "UIButton", "id": "copy", "text": "${engine:menu#nui-editor-copy}" }, { "type": "UIButton", "id": "paste", "text": "${engine:menu#nui-editor-paste}" }, { "type": "UIButton", "id": "undo", "text": "${engine:menu#nui-editor-undo}" }, { "type": "UIButton", "id": "redo", "text": "${engine:menu#nui-editor-redo}" }, { "type": "UIButton", "id": "settings", "text": "${engine:menu#settings-title}" }, { "type": "UIButton", "id": "close", "text": "${engine:menu#nui-editor-close}" } ] }, { "type": "ScrollableArea", "layoutInfo": { "cc": "cell 0 1" }, "verticalScrollbar": true, "horizontalScrollbar": true, "content": { "type": "JsonEditorTreeView", "id": "editor" } }, { "type": "migLayout", "layoutInfo": { "cc": "cell 0 0" }, "contents": [ { "type": "UIDropdownScrollable", "id": "availableAssets" } ] } ] } }
{ "pile_set_name": "Github" }
/* * Copyright 2020 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import com.rits.cloning.Cloner; import com.thoughtworks.go.config.preprocessor.ClassAttributeCache; import com.thoughtworks.go.config.preprocessor.ParamReferenceCollectorFactory; import com.thoughtworks.go.config.preprocessor.ParamResolver; import com.thoughtworks.go.config.preprocessor.SkipParameterResolution; import com.thoughtworks.go.config.validation.NameTypeValidator; import com.thoughtworks.go.domain.BaseCollection; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.Task; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.thoughtworks.go.config.Authorization.ALLOW_GROUP_ADMINS; /** * @understands abstracting a pipeline definition */ @ConfigTag("pipeline") @ConfigCollection(value = StageConfig.class) public class PipelineTemplateConfig extends BaseCollection<StageConfig> implements Validatable, ParamsAttributeAware { private static final ClassAttributeCache.FieldCache FIELD_CACHE = new ClassAttributeCache.FieldCache(); private static final Cloner CLONER = new Cloner(); public static final String NAME = "name"; @ConfigAttribute(optional = false, value = "name") private CaseInsensitiveString name; public static final String AUTHORIZATION = "authorization"; @ConfigSubtag @SkipParameterResolution private Authorization authorization = new Authorization(); private CachedPluggableArtifactConfigs externalArtifactConfigs = null; private CachedFetchPluggableArtifactTasks fetchExternalArtifactTasks = null; private final ConfigErrors configErrors = new ConfigErrors(); public PipelineTemplateConfig() { } public PipelineTemplateConfig(CaseInsensitiveString name, StageConfig... items) { super(items); this.name = name; } public PipelineTemplateConfig(CaseInsensitiveString name, Authorization authorization, StageConfig... items) { this(name, items); this.authorization = authorization; } public CaseInsensitiveString name() { return name; } public void validateTree(ValidationContext validationContext, CruiseConfig preprocessedConfig, boolean isTemplateBeingCreated) { validate(validationContext); if (!isTemplateBeingCreated) { validateDependencies(preprocessedConfig); } validateStageConfig(validationContext); } public void encryptSecureProperties(CruiseConfig preprocessedConfig, PipelineTemplateConfig pipelineTemplateConfig) { if (doesNotHavePublishAndFetchExternalConfig()) { return; } for (StageConfig stageConfig : getStages()) { stageConfig.encryptSecureProperties(preprocessedConfig, pipelineTemplateConfig); } } private boolean doesNotHavePublishAndFetchExternalConfig() { if (externalArtifactConfigs == null || fetchExternalArtifactTasks == null) { cachePublishAndFetchExternalConfig(); } return externalArtifactConfigs.isEmpty() && fetchExternalArtifactTasks.isEmpty(); } private void cachePublishAndFetchExternalConfig() { externalArtifactConfigs = new CachedPluggableArtifactConfigs(); fetchExternalArtifactTasks = new CachedFetchPluggableArtifactTasks(); for (StageConfig stageConfig : getStages()) { for (JobConfig jobConfig : stageConfig.getJobs()) { externalArtifactConfigs.addAll(jobConfig.artifactTypeConfigs().getPluggableArtifactConfigs()); for (Task task : jobConfig.getTasks()) { if (task instanceof FetchPluggableArtifactTask) { fetchExternalArtifactTasks.add((FetchPluggableArtifactTask) task); } } } } } private void validateDependencies(CruiseConfig preprocessedConfig) { List<CaseInsensitiveString> pipelineNames = preprocessedConfig.pipelinesAssociatedWithTemplate(this.name()); ParamsConfig paramsConfig = this.referredParams(); for (CaseInsensitiveString pipelineName : pipelineNames) { PipelineConfig pipelineConfig = preprocessedConfig.getPipelineConfigByName(pipelineName); PipelineConfigs pipelineGroup = preprocessedConfig.findGroupOfPipeline(pipelineConfig); PipelineConfigSaveValidationContext contextForStages = PipelineConfigSaveValidationContext.forChain(false, pipelineGroup.getGroup(), preprocessedConfig, pipelineConfig); validateParams(pipelineConfig, paramsConfig); validatePartsOfPipelineConfig(pipelineConfig, contextForStages); validateDependenciesOfDownstreams(pipelineConfig, contextForStages); } } private void validateDependenciesOfDownstreams(PipelineConfig pipelineConfig, PipelineConfigSaveValidationContext contextForStages) { PipelineConfigTreeValidator pipelineConfigTreeValidator = new PipelineConfigTreeValidator(pipelineConfig); pipelineConfigTreeValidator.validateDependencies(contextForStages); this.errors().addAll(pipelineConfig.errors()); } private void validatePartsOfPipelineConfig(PipelineConfig pipelineConfig, PipelineConfigSaveValidationContext contextForStages) { for (StageConfig stageConfig : pipelineConfig.getStages()) { PipelineConfigSaveValidationContext contextForChildren = contextForStages.withParent(stageConfig); validateStageApprovalAuthorization(stageConfig, contextForChildren); for (JobConfig jobConfig : stageConfig.getJobs()) { PipelineConfigSaveValidationContext contextForJobChildren = contextForChildren.withParent(jobConfig); validateFetchTasks(jobConfig, contextForJobChildren); validateElasticProfileId(jobConfig, contextForJobChildren); validatePluggableArtifactConfig(jobConfig, contextForJobChildren); } } } private void validatePluggableArtifactConfig(JobConfig jobConfig, PipelineConfigSaveValidationContext contextForJobChildren) { for (PluggableArtifactConfig pluggableArtifactConfig : jobConfig.artifactTypeConfigs().getPluggableArtifactConfigs()) { if (!pluggableArtifactConfig.validateTree(contextForJobChildren)) { for (ConfigErrors errors : pluggableArtifactConfig.getAllErrors()) { this.errors().addAll(errors); } } } } private void validateStageApprovalAuthorization(StageConfig stageConfig, PipelineConfigSaveValidationContext contextForChildren) { Approval approval = stageConfig.getApproval(); if (!approval.validateTree(contextForChildren)) { for (ConfigErrors errors : approval.getAllErrors()) { this.errors().addAll(errors); } } } private void validateElasticProfileId(JobConfig jobConfig, PipelineConfigSaveValidationContext preprocessedConfig) { String elasticProfileId = jobConfig.getElasticProfileId(); if (elasticProfileId != null && !preprocessedConfig.isValidProfileId(elasticProfileId)) { String message = String.format("No profile defined corresponding to profile_id '%s'", elasticProfileId); jobConfig.addError("elasticProfileId", message); this.errors().addAll(jobConfig.errors()); } } private void validateFetchTasks(JobConfig jobConfig, PipelineConfigSaveValidationContext contextForTasks) { for (Task task : jobConfig.getTasks()) { if (task instanceof AbstractFetchTask) { task.validate(contextForTasks); this.errors().addAll(task.errors()); } } } private void validateParams(PipelineConfig pipelineConfig, ParamsConfig paramsConfig) { for (ParamConfig paramConfig : paramsConfig) { if (!pipelineConfig.getParams().hasParamNamed(paramConfig.getName())) { this.addError("params", String.format("The param '%s' is not defined in pipeline '%s'", paramConfig.getName(), pipelineConfig.getName())); } } } @Override public void validate(ValidationContext validationContext) { validateTemplateName(); validateStageNameUniqueness(); this.getAuthorization().validateTree(new DelegatingValidationContext(validationContext) { @Override public boolean shouldNotCheckRole() { return false; } }); } public void validateStageConfig(ValidationContext validationContext) { ValidationContext contextForChildren = validationContext.withParent(this); for (StageConfig stageConfig : this) { stageConfig.validateTree(contextForChildren); } } private void validateStageNameUniqueness() { Map<String, StageConfig> stageNameMap = new HashMap<>(); for (StageConfig stageConfig : this) { stageConfig.validateNameUniqueness(stageNameMap); } } private void validateTemplateName() { if (!new NameTypeValidator().isNameValid(name)) { errors().add(NAME, NameTypeValidator.errorMessage("template", name)); } } @Override public ConfigErrors errors() { return configErrors; } @Override public void addError(String fieldName, String message) { configErrors.add(fieldName, message); } public StageConfig getStage(final CaseInsensitiveString stageName) { return findBy(stageName); } public List<StageConfig> getStages() { return this; } public void setName(String name) { setName(new CaseInsensitiveString(name)); } public void setName(CaseInsensitiveString name) { this.name = name; } public StageConfig findBy(final CaseInsensitiveString stageName) { for (StageConfig stageConfig : this) { if (stageConfig.name().equals(stageName)) { return stageConfig; } } return null; } public boolean addStageWithoutValidityAssertion(StageConfig stageConfig) { return super.add(stageConfig); } public void incrementIndex(StageConfig stageToBeMoved) { moveStage(stageToBeMoved, 1); } public void decrementIndex(StageConfig stageToBeMoved) { moveStage(stageToBeMoved, -1); } private void moveStage(StageConfig moveMeStage, int moveBy) { int current = this.indexOf(moveMeStage); if (current == -1) { throw new RuntimeException(String.format("Cannot find the stage '%s' in pipeline '%s'", moveMeStage.name(), name())); } this.remove(moveMeStage); this.add(current + moveBy, moveMeStage); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } PipelineTemplateConfig config = (PipelineTemplateConfig) o; if (name != null ? !name.equals(config.name) : config.name != null) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (name != null ? name.hashCode() : 0); return result; } public boolean matches(CaseInsensitiveString templateName) { return this.name.equals(templateName); } @Override public void setConfigAttributes(Object attributes) { Map attributeMap = (Map) attributes; if (attributeMap.containsKey(NAME)) { String strName = (String) attributeMap.get(NAME); name = new CaseInsensitiveString(strName); } if (attributeMap.containsKey(AUTHORIZATION)) { this.authorization = new Authorization(); this.authorization.setConfigAttributes(attributeMap.get(AUTHORIZATION)); } else { this.authorization = new Authorization(); } if (attributeMap.containsKey(ALLOW_GROUP_ADMINS)) { this.authorization.setAllowGroupAdmins("true".equals(attributeMap.get(ALLOW_GROUP_ADMINS))); } } public boolean isAllowGroupAdmins() { return this.getAuthorization().isAllowGroupAdmins(); } public void validateNameUniquness(Map<String, PipelineTemplateConfig> templateMap) { String currentName = name.toLower(); PipelineTemplateConfig templateWithSameName = templateMap.get(currentName); if (templateWithSameName == null) { templateMap.put(currentName, this); } else { templateWithSameName.addError(NAME, String.format("Template name '%s' is not unique", templateWithSameName.name())); this.addError(NAME, String.format("Template name '%s' is not unique", name)); } } public ParamsConfig referredParams() { ParamReferenceCollectorFactory paramHandlerFactory = new ParamReferenceCollectorFactory(); new ParamResolver(paramHandlerFactory, FIELD_CACHE).resolve(CLONER.deepClone(this)); ParamsConfig paramsConfig = new ParamsConfig(); for (String param : paramHandlerFactory.referredParams()) { paramsConfig.add(new ParamConfig(param, null)); } return paramsConfig; } public void copyStages(PipelineConfig pipeline) { if (pipeline != null) { addAll(pipeline); } } public Authorization getAuthorization() { return authorization; } public void setAuthorization(Authorization authorization) { this.authorization = authorization; } public List<ConfigErrors> getAllErrors() { return ErrorCollector.getAllErrors(this); } public void cleanupAllUsagesOfRole(Role roleToDelete) { for (StageConfig stage : getStages()) { stage.cleanupAllUsagesOfRole(roleToDelete); } } public boolean canBeEditedBy(CaseInsensitiveString username, List<Role> roles) { return getAuthorization().isUserAnAdmin(username, roles); } }
{ "pile_set_name": "Github" }
@keyframes flash { from, 50%, to { opacity: 1; } 25%, 75% { opacity: 0; } } .flash { animation-name: flash; }
{ "pile_set_name": "Github" }
package net.sgoliver.android.xml; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.xml.sax.Attributes; import android.sax.Element; import android.sax.EndElementListener; import android.sax.EndTextElementListener; import android.sax.RootElement; import android.sax.StartElementListener; import android.util.Xml; public class RssParserSax2 { private URL rssUrl; private Noticia noticiaActual; public RssParserSax2(String url) { try { this.rssUrl = new URL(url); } catch (MalformedURLException e) { throw new RuntimeException(e); } } public List<Noticia> parse() { final List<Noticia> noticias = new ArrayList<Noticia>(); RootElement root = new RootElement("rss"); Element channel = root.getChild("channel"); Element item = channel.getChild("item"); item.setStartElementListener(new StartElementListener(){ public void start(Attributes attrs) { noticiaActual = new Noticia(); } }); item.setEndElementListener(new EndElementListener(){ public void end() { noticias.add(noticiaActual); } }); item.getChild("title").setEndTextElementListener( new EndTextElementListener(){ public void end(String body) { noticiaActual.setTitulo(body); } }); item.getChild("link").setEndTextElementListener( new EndTextElementListener(){ public void end(String body) { noticiaActual.setLink(body); } }); item.getChild("description").setEndTextElementListener( new EndTextElementListener(){ public void end(String body) { noticiaActual.setDescripcion(body); } }); item.getChild("guid").setEndTextElementListener( new EndTextElementListener(){ public void end(String body) { noticiaActual.setGuid(body); } }); item.getChild("pubDate").setEndTextElementListener( new EndTextElementListener(){ public void end(String body) { noticiaActual.setFecha(body); } }); try { Xml.parse(this.getInputStream(), Xml.Encoding.UTF_8, root.getContentHandler()); } catch (Exception e) { throw new RuntimeException(e); } return noticias; } private InputStream getInputStream() { try { return rssUrl.openConnection().getInputStream(); } catch (IOException e) { throw new RuntimeException(e); } } }
{ "pile_set_name": "Github" }
using FluentAssertions; using NUnit.Framework; using NzbDrone.Common.Extensions; namespace NzbDrone.Common.Test.ExtensionTests { [TestFixture] public class UrlExtensionsFixture { [TestCase("http://my.local/url")] [TestCase("https://my.local/url")] public void should_report_as_valid_url(string url) { url.IsValidUrl().Should().BeTrue(); } [TestCase("")] [TestCase(" http://my.local/url")] [TestCase("http://my.local/url ")] public void should_report_as_invalid_url(string url) { url.IsValidUrl().Should().BeFalse(); } } }
{ "pile_set_name": "Github" }
.clearfix { *zoom: 1; } .clearfix:before, .clearfix:after { display: table; content: ""; line-height: 0; } .clearfix:after { clear: both; } /*! * FullCalendar v1.6.3 Stylesheet * Docs & License: http://arshaw.com/fullcalendar/ * (c) 2013 Adam Shaw */ .fc { direction: ltr; text-align: left; } .fc table { border-collapse: collapse; border-spacing: 0; } html .fc, .fc table { font-size: 1em; } .fc td, .fc th { padding: 0; vertical-align: top; } /* Header ------------------------------------------------------------------------*/ .fc-header td { white-space: nowrap; } .fc-header-left { width: 25%; text-align: left; } .fc-header-center { text-align: center; } .fc-header-right { width: 25%; text-align: right; } .fc-header-title { display: inline-block; vertical-align: top; } .fc-header-title h2 { margin-top: 0; white-space: nowrap; font-size: 16px; font-weight: 600; line-height: 25px; } .fc .fc-header-space { padding-left: 10px; } .fc-header .fc-button { margin-bottom: 1em; vertical-align: top; } /* buttons edges butting together */ .fc-header .fc-button { margin-right: -1px; } .fc-header .fc-corner-right, .fc-header .ui-corner-right { /* theme */ margin-right: 0; /* back to normal */ } /* button layering (for border precedence) */ .fc-header .fc-state-hover, .fc-header .ui-state-hover { z-index: 2; } .fc-header .fc-state-down { z-index: 3; } .fc-header .fc-state-active, .fc-header .ui-state-active { z-index: 4; } /* Content ------------------------------------------------------------------------*/ .fc-content { clear: both; zoom: 1; /* for IE7, gives accurate coordinates for [un]freezeContentHeight */ } .fc-view { width: 100%; overflow: hidden; } /* Cell Styles ------------------------------------------------------------------------*/ .fc-widget-header, .fc-widget-content { /* <td>, usually */ border: 1px solid #ddd; } .fc-state-highlight { /* <td> today cell */ /* TODO: add .fc-today to <th> */ background: #fcf8e3; } .fc-cell-overlay { /* semi-transparent rectangle while dragging */ background: #bce8f1; opacity: .3; filter: alpha(opacity=30); /* for IE */ } /* Buttons ------------------------------------------------------------------------*/ .fc-button { position: relative; display: inline-block; padding: 0 .6em; overflow: hidden; height: 1.9em; line-height: 1.9em; white-space: nowrap; cursor: pointer; } .fc-state-default { /* non-theme */ border: 1px solid; } .fc-state-default.fc-corner-left { /* non-theme */ } .fc-state-default.fc-corner-right { /* non-theme */ } /* Our default prev/next buttons use HTML entities like &lsaquo; &rsaquo; &laquo; &raquo; and we'll try to make them look good cross-browser. */ .fc-text-arrow { margin: 0 .1em; font-size: 2em; font-family: "Courier New", Courier, monospace; vertical-align: baseline; /* for IE7 */ } .fc-button-prev .fc-text-arrow, .fc-button-next .fc-text-arrow { /* for &lsaquo; &rsaquo; */ font-weight: bold; } /* icon (for jquery ui) */ .fc-button .fc-icon-wrap { position: relative; float: left; top: 50%; } .fc-button .ui-icon { position: relative; float: left; margin-top: -50%; *margin-top: 0; *top: -50%; } /* button states borrowed from twitter bootstrap (http://twitter.github.com/bootstrap/) */ .fc-state-default { background-color: #f3f3f3; font-weight: normal; color: #333333; font-size: 13px; cursor: pointer; border: 1px solid #d9d9d9; border: 1px solid rgba(0, 0, 0, 0.13); position: relative; padding: 0 8px; z-index: 1; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; -o-user-select: none; user-select: none; height: 27px; } .fc-state-hover, .fc-state-down, .fc-state-active, .fc-state-disabled { color: #333333; background-color: #f8f8f8; border-color: #d9d9d9; border-color: rgba(0, 0, 0, 0.13); } .fc-state-hover { color: #333333; background-color: #f8f8f8; border-color: #d9d9d9; border-color: rgba(0, 0, 0, 0.13); } .fc-state-down, .fc-state-active { background-color: #e6e6e6; border-color: #d9d9d9; border-color: rgba(0, 0, 0, 0.13); border-top: 1px solid #a6a6a6; } .fc-state-disabled { cursor: default; background-image: none; opacity: 0.65; filter: alpha(opacity=65); box-shadow: none; } /* Global Event Styles ------------------------------------------------------------------------*/ .fc-event-container > * { z-index: 8; } .fc-event-container > .ui-draggable-dragging, .fc-event-container > .ui-resizable-resizing { z-index: 9; } .fc-event { border: 0; padding: 2px; background-color: #3a87ad; /* default BACKGROUND color */ color: #fff; /* default TEXT color */ font-size: .85em; cursor: default; } a.fc-event { text-decoration: none; } a.fc-event:hover { color: #fff; } a.fc-event, .fc-event-draggable { cursor: pointer; } .fc-rtl .fc-event { text-align: right; } .fc-event-inner { width: 100%; height: 100%; overflow: hidden; } .fc-event-time, .fc-event-title { padding: 0 1px; } .fc .ui-resizable-handle { display: block; position: absolute; z-index: 99999; overflow: hidden; /* hacky spaces (IE6/7) */ font-size: 300%; /* */ line-height: 50%; /* */ } /* Horizontal Events ------------------------------------------------------------------------*/ .fc-event-hori { border-width: 1px 0; margin-bottom: 1px; } .fc-ltr .fc-event-hori.fc-event-start, .fc-rtl .fc-event-hori.fc-event-end { border-left-width: 1px; } .fc-ltr .fc-event-hori.fc-event-end, .fc-rtl .fc-event-hori.fc-event-start { border-right-width: 1px; } /* resizable */ .fc-event-hori .ui-resizable-e { top: 0 !important; /* importants override pre jquery ui 1.7 styles */ right: -3px !important; width: 7px !important; height: 100% !important; cursor: e-resize; } .fc-event-hori .ui-resizable-w { top: 0 !important; left: -3px !important; width: 7px !important; height: 100% !important; cursor: w-resize; } .fc-event-hori .ui-resizable-handle { _padding-bottom: 14px; /* IE6 had 0 height */ } /* Reusable Separate-border Table ------------------------------------------------------------*/ table.fc-border-separate { border-collapse: separate; } .fc-border-separate th, .fc-border-separate td { border-width: 1px 0 0 1px; } .fc-border-separate th.fc-last, .fc-border-separate td.fc-last { border-right-width: 1px; } .fc-border-separate tr.fc-last th, .fc-border-separate tr.fc-last td { border-bottom-width: 1px; } .fc-border-separate tbody tr.fc-first td, .fc-border-separate tbody tr.fc-first th { border-top-width: 0; } /* Month View, Basic Week View, Basic Day View ------------------------------------------------------------------------*/ .fc-grid th { text-align: center; } .fc .fc-week-number { width: 22px; text-align: center; } .fc .fc-week-number div { padding: 0 2px; } .fc-grid .fc-day-number { float: right; padding: 0 2px; } .fc-grid .fc-other-month .fc-day-number { opacity: 0.3; filter: alpha(opacity=30); /* for IE */ /* opacity with small font can sometimes look too faded might want to set the 'color' property instead making day-numbers bold also fixes the problem */ } .fc-grid .fc-day-content { clear: both; padding: 2px 2px 1px; /* distance between events and day edges */ } /* event styles */ .fc-grid .fc-event-time { font-weight: bold; } /* right-to-left */ .fc-rtl .fc-grid .fc-day-number { float: left; } .fc-rtl .fc-grid .fc-event-time { float: right; } /* Agenda Week View, Agenda Day View ------------------------------------------------------------------------*/ .fc-agenda table { border-collapse: separate; } .fc-agenda-days th { text-align: center; } .fc-agenda .fc-agenda-axis { width: 50px; padding: 0 4px; vertical-align: middle; text-align: right; white-space: nowrap; font-weight: normal; } .fc-agenda .fc-week-number { font-weight: bold; } .fc-agenda .fc-day-content { padding: 2px 2px 1px; } /* make axis border take precedence */ .fc-agenda-days .fc-agenda-axis { border-right-width: 1px; } .fc-agenda-days .fc-col0 { border-left-width: 0; } /* all-day area */ .fc-agenda-allday th { border-width: 0 1px; } .fc-agenda-allday .fc-day-content { min-height: 34px; /* TODO: doesnt work well in quirksmode */ _height: 34px; } /* divider (between all-day and slots) */ .fc-agenda-divider-inner { height: 2px; overflow: hidden; } .fc-widget-header .fc-agenda-divider-inner { background: #eee; } /* slot rows */ .fc-agenda-slots th { border-width: 1px 1px 0; } .fc-agenda-slots td { border-width: 1px 0 0; background: none; } .fc-agenda-slots td div { height: 20px; } .fc-agenda-slots tr.fc-slot0 th, .fc-agenda-slots tr.fc-slot0 td { border-top-width: 0; } .fc-agenda-slots tr.fc-minor th, .fc-agenda-slots tr.fc-minor td { border-top-style: dotted; } .fc-agenda-slots tr.fc-minor th.ui-widget-header { *border-top-style: solid; /* doesn't work with background in IE6/7 */ } /* Vertical Events ------------------------------------------------------------------------*/ .fc-event-vert { border-width: 0 1px; } .fc-event-vert.fc-event-start { border-top-width: 1px; } .fc-event-vert.fc-event-end { border-bottom-width: 1px; } .fc-event-vert .fc-event-time { white-space: nowrap; font-size: 10px; } .fc-event-vert .fc-event-inner { position: relative; z-index: 2; } .fc-event-vert .fc-event-bg { /* makes the event lighter w/ a semi-transparent overlay */ position: absolute; z-index: 1; top: 0; left: 0; width: 100%; height: 100%; background: #fff; opacity: .25; filter: alpha(opacity=25); } .fc .ui-draggable-dragging .fc-event-bg, .fc-select-helper .fc-event-bg { display: none\9; /* for IE6/7/8. nested opacity filters while dragging don't work */ } /* resizable */ .fc-event-vert .ui-resizable-s { bottom: 0 !important; /* importants override pre jquery ui 1.7 styles */ width: 100% !important; height: 8px !important; overflow: hidden !important; line-height: 8px !important; font-size: 11px !important; font-family: monospace; text-align: center; cursor: s-resize; } .fc-agenda .ui-resizable-resizing { /* TODO: better selector */ _overflow: hidden; }
{ "pile_set_name": "Github" }
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: descriptor.proto /* Package descriptor is a generated protocol buffer package. It is generated from these files: descriptor.proto It has these top-level messages: FileDescriptorSet FileDescriptorProto DescriptorProto ExtensionRangeOptions FieldDescriptorProto OneofDescriptorProto EnumDescriptorProto EnumValueDescriptorProto ServiceDescriptorProto MethodDescriptorProto FileOptions MessageOptions FieldOptions OneofOptions EnumOptions EnumValueOptions ServiceOptions MethodOptions UninterpretedOption SourceCodeInfo GeneratedCodeInfo */ package descriptor import proto "github.com/gogo/protobuf/proto" import fmt "fmt" import math "math" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package type FieldDescriptorProto_Type int32 const ( // 0 is reserved for errors. // Order is weird for historical reasons. FieldDescriptorProto_TYPE_DOUBLE FieldDescriptorProto_Type = 1 FieldDescriptorProto_TYPE_FLOAT FieldDescriptorProto_Type = 2 // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if // negative values are likely. FieldDescriptorProto_TYPE_INT64 FieldDescriptorProto_Type = 3 FieldDescriptorProto_TYPE_UINT64 FieldDescriptorProto_Type = 4 // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if // negative values are likely. FieldDescriptorProto_TYPE_INT32 FieldDescriptorProto_Type = 5 FieldDescriptorProto_TYPE_FIXED64 FieldDescriptorProto_Type = 6 FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7 FieldDescriptorProto_TYPE_BOOL FieldDescriptorProto_Type = 8 FieldDescriptorProto_TYPE_STRING FieldDescriptorProto_Type = 9 // Tag-delimited aggregate. // Group type is deprecated and not supported in proto3. However, Proto3 // implementations should still be able to parse the group wire format and // treat group fields as unknown fields. FieldDescriptorProto_TYPE_GROUP FieldDescriptorProto_Type = 10 FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11 // New in version 2. FieldDescriptorProto_TYPE_BYTES FieldDescriptorProto_Type = 12 FieldDescriptorProto_TYPE_UINT32 FieldDescriptorProto_Type = 13 FieldDescriptorProto_TYPE_ENUM FieldDescriptorProto_Type = 14 FieldDescriptorProto_TYPE_SFIXED32 FieldDescriptorProto_Type = 15 FieldDescriptorProto_TYPE_SFIXED64 FieldDescriptorProto_Type = 16 FieldDescriptorProto_TYPE_SINT32 FieldDescriptorProto_Type = 17 FieldDescriptorProto_TYPE_SINT64 FieldDescriptorProto_Type = 18 ) var FieldDescriptorProto_Type_name = map[int32]string{ 1: "TYPE_DOUBLE", 2: "TYPE_FLOAT", 3: "TYPE_INT64", 4: "TYPE_UINT64", 5: "TYPE_INT32", 6: "TYPE_FIXED64", 7: "TYPE_FIXED32", 8: "TYPE_BOOL", 9: "TYPE_STRING", 10: "TYPE_GROUP", 11: "TYPE_MESSAGE", 12: "TYPE_BYTES", 13: "TYPE_UINT32", 14: "TYPE_ENUM", 15: "TYPE_SFIXED32", 16: "TYPE_SFIXED64", 17: "TYPE_SINT32", 18: "TYPE_SINT64", } var FieldDescriptorProto_Type_value = map[string]int32{ "TYPE_DOUBLE": 1, "TYPE_FLOAT": 2, "TYPE_INT64": 3, "TYPE_UINT64": 4, "TYPE_INT32": 5, "TYPE_FIXED64": 6, "TYPE_FIXED32": 7, "TYPE_BOOL": 8, "TYPE_STRING": 9, "TYPE_GROUP": 10, "TYPE_MESSAGE": 11, "TYPE_BYTES": 12, "TYPE_UINT32": 13, "TYPE_ENUM": 14, "TYPE_SFIXED32": 15, "TYPE_SFIXED64": 16, "TYPE_SINT32": 17, "TYPE_SINT64": 18, } func (x FieldDescriptorProto_Type) Enum() *FieldDescriptorProto_Type { p := new(FieldDescriptorProto_Type) *p = x return p } func (x FieldDescriptorProto_Type) String() string { return proto.EnumName(FieldDescriptorProto_Type_name, int32(x)) } func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Type_value, data, "FieldDescriptorProto_Type") if err != nil { return err } *x = FieldDescriptorProto_Type(value) return nil } func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{4, 0} } type FieldDescriptorProto_Label int32 const ( // 0 is reserved for errors FieldDescriptorProto_LABEL_OPTIONAL FieldDescriptorProto_Label = 1 FieldDescriptorProto_LABEL_REQUIRED FieldDescriptorProto_Label = 2 FieldDescriptorProto_LABEL_REPEATED FieldDescriptorProto_Label = 3 ) var FieldDescriptorProto_Label_name = map[int32]string{ 1: "LABEL_OPTIONAL", 2: "LABEL_REQUIRED", 3: "LABEL_REPEATED", } var FieldDescriptorProto_Label_value = map[string]int32{ "LABEL_OPTIONAL": 1, "LABEL_REQUIRED": 2, "LABEL_REPEATED": 3, } func (x FieldDescriptorProto_Label) Enum() *FieldDescriptorProto_Label { p := new(FieldDescriptorProto_Label) *p = x return p } func (x FieldDescriptorProto_Label) String() string { return proto.EnumName(FieldDescriptorProto_Label_name, int32(x)) } func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Label_value, data, "FieldDescriptorProto_Label") if err != nil { return err } *x = FieldDescriptorProto_Label(value) return nil } func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{4, 1} } // Generated classes can be optimized for speed or code size. type FileOptions_OptimizeMode int32 const ( FileOptions_SPEED FileOptions_OptimizeMode = 1 // etc. FileOptions_CODE_SIZE FileOptions_OptimizeMode = 2 FileOptions_LITE_RUNTIME FileOptions_OptimizeMode = 3 ) var FileOptions_OptimizeMode_name = map[int32]string{ 1: "SPEED", 2: "CODE_SIZE", 3: "LITE_RUNTIME", } var FileOptions_OptimizeMode_value = map[string]int32{ "SPEED": 1, "CODE_SIZE": 2, "LITE_RUNTIME": 3, } func (x FileOptions_OptimizeMode) Enum() *FileOptions_OptimizeMode { p := new(FileOptions_OptimizeMode) *p = x return p } func (x FileOptions_OptimizeMode) String() string { return proto.EnumName(FileOptions_OptimizeMode_name, int32(x)) } func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FileOptions_OptimizeMode_value, data, "FileOptions_OptimizeMode") if err != nil { return err } *x = FileOptions_OptimizeMode(value) return nil } func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{10, 0} } type FieldOptions_CType int32 const ( // Default mode. FieldOptions_STRING FieldOptions_CType = 0 FieldOptions_CORD FieldOptions_CType = 1 FieldOptions_STRING_PIECE FieldOptions_CType = 2 ) var FieldOptions_CType_name = map[int32]string{ 0: "STRING", 1: "CORD", 2: "STRING_PIECE", } var FieldOptions_CType_value = map[string]int32{ "STRING": 0, "CORD": 1, "STRING_PIECE": 2, } func (x FieldOptions_CType) Enum() *FieldOptions_CType { p := new(FieldOptions_CType) *p = x return p } func (x FieldOptions_CType) String() string { return proto.EnumName(FieldOptions_CType_name, int32(x)) } func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldOptions_CType_value, data, "FieldOptions_CType") if err != nil { return err } *x = FieldOptions_CType(value) return nil } func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{12, 0} } type FieldOptions_JSType int32 const ( // Use the default type. FieldOptions_JS_NORMAL FieldOptions_JSType = 0 // Use JavaScript strings. FieldOptions_JS_STRING FieldOptions_JSType = 1 // Use JavaScript numbers. FieldOptions_JS_NUMBER FieldOptions_JSType = 2 ) var FieldOptions_JSType_name = map[int32]string{ 0: "JS_NORMAL", 1: "JS_STRING", 2: "JS_NUMBER", } var FieldOptions_JSType_value = map[string]int32{ "JS_NORMAL": 0, "JS_STRING": 1, "JS_NUMBER": 2, } func (x FieldOptions_JSType) Enum() *FieldOptions_JSType { p := new(FieldOptions_JSType) *p = x return p } func (x FieldOptions_JSType) String() string { return proto.EnumName(FieldOptions_JSType_name, int32(x)) } func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(FieldOptions_JSType_value, data, "FieldOptions_JSType") if err != nil { return err } *x = FieldOptions_JSType(value) return nil } func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{12, 1} } // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, // or neither? HTTP based RPC implementation may choose GET verb for safe // methods, and PUT verb for idempotent methods instead of the default POST. type MethodOptions_IdempotencyLevel int32 const ( MethodOptions_IDEMPOTENCY_UNKNOWN MethodOptions_IdempotencyLevel = 0 MethodOptions_NO_SIDE_EFFECTS MethodOptions_IdempotencyLevel = 1 MethodOptions_IDEMPOTENT MethodOptions_IdempotencyLevel = 2 ) var MethodOptions_IdempotencyLevel_name = map[int32]string{ 0: "IDEMPOTENCY_UNKNOWN", 1: "NO_SIDE_EFFECTS", 2: "IDEMPOTENT", } var MethodOptions_IdempotencyLevel_value = map[string]int32{ "IDEMPOTENCY_UNKNOWN": 0, "NO_SIDE_EFFECTS": 1, "IDEMPOTENT": 2, } func (x MethodOptions_IdempotencyLevel) Enum() *MethodOptions_IdempotencyLevel { p := new(MethodOptions_IdempotencyLevel) *p = x return p } func (x MethodOptions_IdempotencyLevel) String() string { return proto.EnumName(MethodOptions_IdempotencyLevel_name, int32(x)) } func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error { value, err := proto.UnmarshalJSONEnum(MethodOptions_IdempotencyLevel_value, data, "MethodOptions_IdempotencyLevel") if err != nil { return err } *x = MethodOptions_IdempotencyLevel(value) return nil } func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{17, 0} } // The protocol compiler can output a FileDescriptorSet containing the .proto // files it parses. type FileDescriptorSet struct { File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } func (*FileDescriptorSet) ProtoMessage() {} func (*FileDescriptorSet) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{0} } func (m *FileDescriptorSet) GetFile() []*FileDescriptorProto { if m != nil { return m.File } return nil } // Describes a complete .proto file. type FileDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` Package *string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"` // Names of files imported by this file. Dependency []string `protobuf:"bytes,3,rep,name=dependency" json:"dependency,omitempty"` // Indexes of the public imported files in the dependency list above. PublicDependency []int32 `protobuf:"varint,10,rep,name=public_dependency,json=publicDependency" json:"public_dependency,omitempty"` // Indexes of the weak imported files in the dependency list. // For Google-internal migration only. Do not use. WeakDependency []int32 `protobuf:"varint,11,rep,name=weak_dependency,json=weakDependency" json:"weak_dependency,omitempty"` // All top-level definitions in this file. MessageType []*DescriptorProto `protobuf:"bytes,4,rep,name=message_type,json=messageType" json:"message_type,omitempty"` EnumType []*EnumDescriptorProto `protobuf:"bytes,5,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` Service []*ServiceDescriptorProto `protobuf:"bytes,6,rep,name=service" json:"service,omitempty"` Extension []*FieldDescriptorProto `protobuf:"bytes,7,rep,name=extension" json:"extension,omitempty"` Options *FileOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` // This field contains optional information about the original source code. // You may safely remove this entire field without harming runtime // functionality of the descriptors -- the information is needed only by // development tools. SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"` // The syntax of the proto file. // The supported values are "proto2" and "proto3". Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } func (*FileDescriptorProto) ProtoMessage() {} func (*FileDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{1} } func (m *FileDescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *FileDescriptorProto) GetPackage() string { if m != nil && m.Package != nil { return *m.Package } return "" } func (m *FileDescriptorProto) GetDependency() []string { if m != nil { return m.Dependency } return nil } func (m *FileDescriptorProto) GetPublicDependency() []int32 { if m != nil { return m.PublicDependency } return nil } func (m *FileDescriptorProto) GetWeakDependency() []int32 { if m != nil { return m.WeakDependency } return nil } func (m *FileDescriptorProto) GetMessageType() []*DescriptorProto { if m != nil { return m.MessageType } return nil } func (m *FileDescriptorProto) GetEnumType() []*EnumDescriptorProto { if m != nil { return m.EnumType } return nil } func (m *FileDescriptorProto) GetService() []*ServiceDescriptorProto { if m != nil { return m.Service } return nil } func (m *FileDescriptorProto) GetExtension() []*FieldDescriptorProto { if m != nil { return m.Extension } return nil } func (m *FileDescriptorProto) GetOptions() *FileOptions { if m != nil { return m.Options } return nil } func (m *FileDescriptorProto) GetSourceCodeInfo() *SourceCodeInfo { if m != nil { return m.SourceCodeInfo } return nil } func (m *FileDescriptorProto) GetSyntax() string { if m != nil && m.Syntax != nil { return *m.Syntax } return "" } // Describes a message type. type DescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` Field []*FieldDescriptorProto `protobuf:"bytes,2,rep,name=field" json:"field,omitempty"` Extension []*FieldDescriptorProto `protobuf:"bytes,6,rep,name=extension" json:"extension,omitempty"` NestedType []*DescriptorProto `protobuf:"bytes,3,rep,name=nested_type,json=nestedType" json:"nested_type,omitempty"` EnumType []*EnumDescriptorProto `protobuf:"bytes,4,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` ExtensionRange []*DescriptorProto_ExtensionRange `protobuf:"bytes,5,rep,name=extension_range,json=extensionRange" json:"extension_range,omitempty"` OneofDecl []*OneofDescriptorProto `protobuf:"bytes,8,rep,name=oneof_decl,json=oneofDecl" json:"oneof_decl,omitempty"` Options *MessageOptions `protobuf:"bytes,7,opt,name=options" json:"options,omitempty"` ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` // Reserved field names, which may not be used by fields in the same message. // A given name may only be reserved once. ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } func (*DescriptorProto) ProtoMessage() {} func (*DescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{2} } func (m *DescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *DescriptorProto) GetField() []*FieldDescriptorProto { if m != nil { return m.Field } return nil } func (m *DescriptorProto) GetExtension() []*FieldDescriptorProto { if m != nil { return m.Extension } return nil } func (m *DescriptorProto) GetNestedType() []*DescriptorProto { if m != nil { return m.NestedType } return nil } func (m *DescriptorProto) GetEnumType() []*EnumDescriptorProto { if m != nil { return m.EnumType } return nil } func (m *DescriptorProto) GetExtensionRange() []*DescriptorProto_ExtensionRange { if m != nil { return m.ExtensionRange } return nil } func (m *DescriptorProto) GetOneofDecl() []*OneofDescriptorProto { if m != nil { return m.OneofDecl } return nil } func (m *DescriptorProto) GetOptions() *MessageOptions { if m != nil { return m.Options } return nil } func (m *DescriptorProto) GetReservedRange() []*DescriptorProto_ReservedRange { if m != nil { return m.ReservedRange } return nil } func (m *DescriptorProto) GetReservedName() []string { if m != nil { return m.ReservedName } return nil } type DescriptorProto_ExtensionRange struct { Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` Options *ExtensionRangeOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *DescriptorProto_ExtensionRange) Reset() { *m = DescriptorProto_ExtensionRange{} } func (m *DescriptorProto_ExtensionRange) String() string { return proto.CompactTextString(m) } func (*DescriptorProto_ExtensionRange) ProtoMessage() {} func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{2, 0} } func (m *DescriptorProto_ExtensionRange) GetStart() int32 { if m != nil && m.Start != nil { return *m.Start } return 0 } func (m *DescriptorProto_ExtensionRange) GetEnd() int32 { if m != nil && m.End != nil { return *m.End } return 0 } func (m *DescriptorProto_ExtensionRange) GetOptions() *ExtensionRangeOptions { if m != nil { return m.Options } return nil } // Range of reserved tag numbers. Reserved tag numbers may not be used by // fields or extension ranges in the same message. Reserved ranges may // not overlap. type DescriptorProto_ReservedRange struct { Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *DescriptorProto_ReservedRange) Reset() { *m = DescriptorProto_ReservedRange{} } func (m *DescriptorProto_ReservedRange) String() string { return proto.CompactTextString(m) } func (*DescriptorProto_ReservedRange) ProtoMessage() {} func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{2, 1} } func (m *DescriptorProto_ReservedRange) GetStart() int32 { if m != nil && m.Start != nil { return *m.Start } return 0 } func (m *DescriptorProto_ReservedRange) GetEnd() int32 { if m != nil && m.End != nil { return *m.End } return 0 } type ExtensionRangeOptions struct { // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *ExtensionRangeOptions) Reset() { *m = ExtensionRangeOptions{} } func (m *ExtensionRangeOptions) String() string { return proto.CompactTextString(m) } func (*ExtensionRangeOptions) ProtoMessage() {} func (*ExtensionRangeOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{3} } var extRange_ExtensionRangeOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*ExtensionRangeOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_ExtensionRangeOptions } func (m *ExtensionRangeOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } // Describes a field within a message. type FieldDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` Number *int32 `protobuf:"varint,3,opt,name=number" json:"number,omitempty"` Label *FieldDescriptorProto_Label `protobuf:"varint,4,opt,name=label,enum=google.protobuf.FieldDescriptorProto_Label" json:"label,omitempty"` // If type_name is set, this need not be set. If both this and type_name // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. Type *FieldDescriptorProto_Type `protobuf:"varint,5,opt,name=type,enum=google.protobuf.FieldDescriptorProto_Type" json:"type,omitempty"` // For message and enum types, this is the name of the type. If the name // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping // rules are used to find the type (i.e. first the nested types within this // message are searched, then within the parent, on up to the root // namespace). TypeName *string `protobuf:"bytes,6,opt,name=type_name,json=typeName" json:"type_name,omitempty"` // For extensions, this is the name of the type being extended. It is // resolved in the same manner as type_name. Extendee *string `protobuf:"bytes,2,opt,name=extendee" json:"extendee,omitempty"` // For numeric types, contains the original text representation of the value. // For booleans, "true" or "false". // For strings, contains the default text contents (not escaped in any way). // For bytes, contains the C escaped value. All bytes >= 128 are escaped. // TODO(kenton): Base-64 encode? DefaultValue *string `protobuf:"bytes,7,opt,name=default_value,json=defaultValue" json:"default_value,omitempty"` // If set, gives the index of a oneof in the containing type's oneof_decl // list. This field is a member of that oneof. OneofIndex *int32 `protobuf:"varint,9,opt,name=oneof_index,json=oneofIndex" json:"oneof_index,omitempty"` // JSON name of this field. The value is set by protocol compiler. If the // user has set a "json_name" option on this field, that option's value // will be used. Otherwise, it's deduced from the field's name by converting // it to camelCase. JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } func (*FieldDescriptorProto) ProtoMessage() {} func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{4} } func (m *FieldDescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *FieldDescriptorProto) GetNumber() int32 { if m != nil && m.Number != nil { return *m.Number } return 0 } func (m *FieldDescriptorProto) GetLabel() FieldDescriptorProto_Label { if m != nil && m.Label != nil { return *m.Label } return FieldDescriptorProto_LABEL_OPTIONAL } func (m *FieldDescriptorProto) GetType() FieldDescriptorProto_Type { if m != nil && m.Type != nil { return *m.Type } return FieldDescriptorProto_TYPE_DOUBLE } func (m *FieldDescriptorProto) GetTypeName() string { if m != nil && m.TypeName != nil { return *m.TypeName } return "" } func (m *FieldDescriptorProto) GetExtendee() string { if m != nil && m.Extendee != nil { return *m.Extendee } return "" } func (m *FieldDescriptorProto) GetDefaultValue() string { if m != nil && m.DefaultValue != nil { return *m.DefaultValue } return "" } func (m *FieldDescriptorProto) GetOneofIndex() int32 { if m != nil && m.OneofIndex != nil { return *m.OneofIndex } return 0 } func (m *FieldDescriptorProto) GetJsonName() string { if m != nil && m.JsonName != nil { return *m.JsonName } return "" } func (m *FieldDescriptorProto) GetOptions() *FieldOptions { if m != nil { return m.Options } return nil } // Describes a oneof. type OneofDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } func (*OneofDescriptorProto) ProtoMessage() {} func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{5} } func (m *OneofDescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *OneofDescriptorProto) GetOptions() *OneofOptions { if m != nil { return m.Options } return nil } // Describes an enum type. type EnumDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` // Range of reserved numeric values. Reserved numeric values may not be used // by enum values in the same enum declaration. Reserved ranges may not // overlap. ReservedRange []*EnumDescriptorProto_EnumReservedRange `protobuf:"bytes,4,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` // Reserved enum value names, which may not be reused. A given name may only // be reserved once. ReservedName []string `protobuf:"bytes,5,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } func (*EnumDescriptorProto) ProtoMessage() {} func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{6} } func (m *EnumDescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *EnumDescriptorProto) GetValue() []*EnumValueDescriptorProto { if m != nil { return m.Value } return nil } func (m *EnumDescriptorProto) GetOptions() *EnumOptions { if m != nil { return m.Options } return nil } func (m *EnumDescriptorProto) GetReservedRange() []*EnumDescriptorProto_EnumReservedRange { if m != nil { return m.ReservedRange } return nil } func (m *EnumDescriptorProto) GetReservedName() []string { if m != nil { return m.ReservedName } return nil } // Range of reserved numeric values. Reserved values may not be used by // entries in the same enum. Reserved ranges may not overlap. // // Note that this is distinct from DescriptorProto.ReservedRange in that it // is inclusive such that it can appropriately represent the entire int32 // domain. type EnumDescriptorProto_EnumReservedRange struct { Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *EnumDescriptorProto_EnumReservedRange) Reset() { *m = EnumDescriptorProto_EnumReservedRange{} } func (m *EnumDescriptorProto_EnumReservedRange) String() string { return proto.CompactTextString(m) } func (*EnumDescriptorProto_EnumReservedRange) ProtoMessage() {} func (*EnumDescriptorProto_EnumReservedRange) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{6, 0} } func (m *EnumDescriptorProto_EnumReservedRange) GetStart() int32 { if m != nil && m.Start != nil { return *m.Start } return 0 } func (m *EnumDescriptorProto_EnumReservedRange) GetEnd() int32 { if m != nil && m.End != nil { return *m.End } return 0 } // Describes a value within an enum. type EnumValueDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } func (*EnumValueDescriptorProto) ProtoMessage() {} func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{7} } func (m *EnumValueDescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *EnumValueDescriptorProto) GetNumber() int32 { if m != nil && m.Number != nil { return *m.Number } return 0 } func (m *EnumValueDescriptorProto) GetOptions() *EnumValueOptions { if m != nil { return m.Options } return nil } // Describes a service. type ServiceDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } func (*ServiceDescriptorProto) ProtoMessage() {} func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{8} } func (m *ServiceDescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *ServiceDescriptorProto) GetMethod() []*MethodDescriptorProto { if m != nil { return m.Method } return nil } func (m *ServiceDescriptorProto) GetOptions() *ServiceOptions { if m != nil { return m.Options } return nil } // Describes a method of a service. type MethodDescriptorProto struct { Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // Input and output type names. These are resolved in the same way as // FieldDescriptorProto.type_name, but must refer to a message type. InputType *string `protobuf:"bytes,2,opt,name=input_type,json=inputType" json:"input_type,omitempty"` OutputType *string `protobuf:"bytes,3,opt,name=output_type,json=outputType" json:"output_type,omitempty"` Options *MethodOptions `protobuf:"bytes,4,opt,name=options" json:"options,omitempty"` // Identifies if client streams multiple client messages ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"` // Identifies if server streams multiple server messages ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } func (*MethodDescriptorProto) ProtoMessage() {} func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{9} } const Default_MethodDescriptorProto_ClientStreaming bool = false const Default_MethodDescriptorProto_ServerStreaming bool = false func (m *MethodDescriptorProto) GetName() string { if m != nil && m.Name != nil { return *m.Name } return "" } func (m *MethodDescriptorProto) GetInputType() string { if m != nil && m.InputType != nil { return *m.InputType } return "" } func (m *MethodDescriptorProto) GetOutputType() string { if m != nil && m.OutputType != nil { return *m.OutputType } return "" } func (m *MethodDescriptorProto) GetOptions() *MethodOptions { if m != nil { return m.Options } return nil } func (m *MethodDescriptorProto) GetClientStreaming() bool { if m != nil && m.ClientStreaming != nil { return *m.ClientStreaming } return Default_MethodDescriptorProto_ClientStreaming } func (m *MethodDescriptorProto) GetServerStreaming() bool { if m != nil && m.ServerStreaming != nil { return *m.ServerStreaming } return Default_MethodDescriptorProto_ServerStreaming } type FileOptions struct { // Sets the Java package where classes generated from this .proto will be // placed. By default, the proto package is used, but this is often // inappropriate because proto packages do not normally start with backwards // domain names. JavaPackage *string `protobuf:"bytes,1,opt,name=java_package,json=javaPackage" json:"java_package,omitempty"` // If set, all the classes from the .proto file are wrapped in a single // outer class with the given name. This applies to both Proto1 // (equivalent to the old "--one_java_file" option) and Proto2 (where // a .proto always translates to a single class, but you may want to // explicitly choose the class name). JavaOuterClassname *string `protobuf:"bytes,8,opt,name=java_outer_classname,json=javaOuterClassname" json:"java_outer_classname,omitempty"` // If set true, then the Java code generator will generate a separate .java // file for each top-level message, enum, and service defined in the .proto // file. Thus, these types will *not* be nested inside the outer class // named by java_outer_classname. However, the outer class will still be // generated to contain the file's getDescriptor() method as well as any // top-level extensions defined in the file. JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"` // This option does nothing. JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` // If set true, then the Java2 code generator will generate code that // throws an exception whenever an attempt is made to assign a non-UTF-8 // byte sequence to a string field. // Message reflection will do the same. // However, an extension field still accepts non-UTF-8 byte sequences. // This option has no effect on when used with the lite runtime. JavaStringCheckUtf8 *bool `protobuf:"varint,27,opt,name=java_string_check_utf8,json=javaStringCheckUtf8,def=0" json:"java_string_check_utf8,omitempty"` OptimizeFor *FileOptions_OptimizeMode `protobuf:"varint,9,opt,name=optimize_for,json=optimizeFor,enum=google.protobuf.FileOptions_OptimizeMode,def=1" json:"optimize_for,omitempty"` // Sets the Go package where structs generated from this .proto will be // placed. If omitted, the Go package will be derived from the following: // - The basename of the package import path, if provided. // - Otherwise, the package statement in the .proto file, if present. // - Otherwise, the basename of the .proto file, without extension. GoPackage *string `protobuf:"bytes,11,opt,name=go_package,json=goPackage" json:"go_package,omitempty"` // Should generic services be generated in each language? "Generic" services // are not specific to any particular RPC system. They are generated by the // main code generators in each language (without additional plugins). // Generic services were the only kind of service generation supported by // early versions of google.protobuf. // // Generic services are now considered deprecated in favor of using plugins // that generate code specific to your particular RPC system. Therefore, // these default to false. Old code which depends on generic services should // explicitly set them to true. CcGenericServices *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"` JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"` PyGenericServices *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"` PhpGenericServices *bool `protobuf:"varint,42,opt,name=php_generic_services,json=phpGenericServices,def=0" json:"php_generic_services,omitempty"` // Is this file deprecated? // Depending on the target platform, this can emit Deprecated annotations // for everything in the file, or it will be completely ignored; in the very // least, this is a formalization for deprecating files. Deprecated *bool `protobuf:"varint,23,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // Enables the use of arenas for the proto messages in this file. This applies // only to generated classes for C++. CcEnableArenas *bool `protobuf:"varint,31,opt,name=cc_enable_arenas,json=ccEnableArenas,def=0" json:"cc_enable_arenas,omitempty"` // Sets the objective c class prefix which is prepended to all objective c // generated classes from this .proto. There is no default. ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"` // Namespace for generated classes; defaults to the package. CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"` // By default Swift generators will take the proto package and CamelCase it // replacing '.' with underscore and use that to prefix the types/symbols // defined. When this options is provided, they will use this value instead // to prefix the types/symbols defined. SwiftPrefix *string `protobuf:"bytes,39,opt,name=swift_prefix,json=swiftPrefix" json:"swift_prefix,omitempty"` // Sets the php class prefix which is prepended to all php generated classes // from this .proto. Default is empty. PhpClassPrefix *string `protobuf:"bytes,40,opt,name=php_class_prefix,json=phpClassPrefix" json:"php_class_prefix,omitempty"` // Use this option to change the namespace of php generated classes. Default // is empty. When this option is empty, the package name will be used for // determining the namespace. PhpNamespace *string `protobuf:"bytes,41,opt,name=php_namespace,json=phpNamespace" json:"php_namespace,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *FileOptions) Reset() { *m = FileOptions{} } func (m *FileOptions) String() string { return proto.CompactTextString(m) } func (*FileOptions) ProtoMessage() {} func (*FileOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{10} } var extRange_FileOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_FileOptions } const Default_FileOptions_JavaMultipleFiles bool = false const Default_FileOptions_JavaStringCheckUtf8 bool = false const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED const Default_FileOptions_CcGenericServices bool = false const Default_FileOptions_JavaGenericServices bool = false const Default_FileOptions_PyGenericServices bool = false const Default_FileOptions_PhpGenericServices bool = false const Default_FileOptions_Deprecated bool = false const Default_FileOptions_CcEnableArenas bool = false func (m *FileOptions) GetJavaPackage() string { if m != nil && m.JavaPackage != nil { return *m.JavaPackage } return "" } func (m *FileOptions) GetJavaOuterClassname() string { if m != nil && m.JavaOuterClassname != nil { return *m.JavaOuterClassname } return "" } func (m *FileOptions) GetJavaMultipleFiles() bool { if m != nil && m.JavaMultipleFiles != nil { return *m.JavaMultipleFiles } return Default_FileOptions_JavaMultipleFiles } func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool { if m != nil && m.JavaGenerateEqualsAndHash != nil { return *m.JavaGenerateEqualsAndHash } return false } func (m *FileOptions) GetJavaStringCheckUtf8() bool { if m != nil && m.JavaStringCheckUtf8 != nil { return *m.JavaStringCheckUtf8 } return Default_FileOptions_JavaStringCheckUtf8 } func (m *FileOptions) GetOptimizeFor() FileOptions_OptimizeMode { if m != nil && m.OptimizeFor != nil { return *m.OptimizeFor } return Default_FileOptions_OptimizeFor } func (m *FileOptions) GetGoPackage() string { if m != nil && m.GoPackage != nil { return *m.GoPackage } return "" } func (m *FileOptions) GetCcGenericServices() bool { if m != nil && m.CcGenericServices != nil { return *m.CcGenericServices } return Default_FileOptions_CcGenericServices } func (m *FileOptions) GetJavaGenericServices() bool { if m != nil && m.JavaGenericServices != nil { return *m.JavaGenericServices } return Default_FileOptions_JavaGenericServices } func (m *FileOptions) GetPyGenericServices() bool { if m != nil && m.PyGenericServices != nil { return *m.PyGenericServices } return Default_FileOptions_PyGenericServices } func (m *FileOptions) GetPhpGenericServices() bool { if m != nil && m.PhpGenericServices != nil { return *m.PhpGenericServices } return Default_FileOptions_PhpGenericServices } func (m *FileOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated } return Default_FileOptions_Deprecated } func (m *FileOptions) GetCcEnableArenas() bool { if m != nil && m.CcEnableArenas != nil { return *m.CcEnableArenas } return Default_FileOptions_CcEnableArenas } func (m *FileOptions) GetObjcClassPrefix() string { if m != nil && m.ObjcClassPrefix != nil { return *m.ObjcClassPrefix } return "" } func (m *FileOptions) GetCsharpNamespace() string { if m != nil && m.CsharpNamespace != nil { return *m.CsharpNamespace } return "" } func (m *FileOptions) GetSwiftPrefix() string { if m != nil && m.SwiftPrefix != nil { return *m.SwiftPrefix } return "" } func (m *FileOptions) GetPhpClassPrefix() string { if m != nil && m.PhpClassPrefix != nil { return *m.PhpClassPrefix } return "" } func (m *FileOptions) GetPhpNamespace() string { if m != nil && m.PhpNamespace != nil { return *m.PhpNamespace } return "" } func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } type MessageOptions struct { // Set true to use the old proto1 MessageSet wire format for extensions. // This is provided for backwards-compatibility with the MessageSet wire // format. You should not use this for any other reason: It's less // efficient, has fewer features, and is more complicated. // // The message must be defined exactly as follows: // message Foo { // option message_set_wire_format = true; // extensions 4 to max; // } // Note that the message cannot have any defined fields; MessageSets only // have extensions. // // All extensions of your type must be singular messages; e.g. they cannot // be int32s, enums, or repeated messages. // // Because this is an option, the above two restrictions are not enforced by // the protocol compiler. MessageSetWireFormat *bool `protobuf:"varint,1,opt,name=message_set_wire_format,json=messageSetWireFormat,def=0" json:"message_set_wire_format,omitempty"` // Disables the generation of the standard "descriptor()" accessor, which can // conflict with a field of the same name. This is meant to make migration // from proto1 easier; new code should avoid fields named "descriptor". NoStandardDescriptorAccessor *bool `protobuf:"varint,2,opt,name=no_standard_descriptor_accessor,json=noStandardDescriptorAccessor,def=0" json:"no_standard_descriptor_accessor,omitempty"` // Is this message deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the message, or it will be completely ignored; in the very least, // this is a formalization for deprecating messages. Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // Whether the message is an automatically generated map entry type for the // maps field. // // For maps fields: // map<KeyType, ValueType> map_field = 1; // The parsed descriptor looks like: // message MapFieldEntry { // option map_entry = true; // optional KeyType key = 1; // optional ValueType value = 2; // } // repeated MapFieldEntry map_field = 1; // // Implementations may choose not to generate the map_entry=true message, but // use a native map in the target language to hold the keys and values. // The reflection APIs in such implementions still need to work as // if the field is a repeated message field. // // NOTE: Do not set the option in .proto files. Always use the maps syntax // instead. The option should only be implicitly set by the proto compiler // parser. MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *MessageOptions) Reset() { *m = MessageOptions{} } func (m *MessageOptions) String() string { return proto.CompactTextString(m) } func (*MessageOptions) ProtoMessage() {} func (*MessageOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{11} } var extRange_MessageOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*MessageOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_MessageOptions } const Default_MessageOptions_MessageSetWireFormat bool = false const Default_MessageOptions_NoStandardDescriptorAccessor bool = false const Default_MessageOptions_Deprecated bool = false func (m *MessageOptions) GetMessageSetWireFormat() bool { if m != nil && m.MessageSetWireFormat != nil { return *m.MessageSetWireFormat } return Default_MessageOptions_MessageSetWireFormat } func (m *MessageOptions) GetNoStandardDescriptorAccessor() bool { if m != nil && m.NoStandardDescriptorAccessor != nil { return *m.NoStandardDescriptorAccessor } return Default_MessageOptions_NoStandardDescriptorAccessor } func (m *MessageOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated } return Default_MessageOptions_Deprecated } func (m *MessageOptions) GetMapEntry() bool { if m != nil && m.MapEntry != nil { return *m.MapEntry } return false } func (m *MessageOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } type FieldOptions struct { // The ctype option instructs the C++ code generator to use a different // representation of the field than it normally would. See the specific // options below. This option is not yet implemented in the open source // release -- sorry, we'll try to include it in a future version! Ctype *FieldOptions_CType `protobuf:"varint,1,opt,name=ctype,enum=google.protobuf.FieldOptions_CType,def=0" json:"ctype,omitempty"` // The packed option can be enabled for repeated primitive fields to enable // a more efficient representation on the wire. Rather than repeatedly // writing the tag and type for each element, the entire array is encoded as // a single length-delimited blob. In proto3, only explicit setting it to // false will avoid using packed encoding. Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"` // The jstype option determines the JavaScript type used for values of the // field. The option is permitted only for 64 bit integral and fixed types // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING // is represented as JavaScript string, which avoids loss of precision that // can happen when a large value is converted to a floating point JavaScript. // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to // use the JavaScript "number" type. The behavior of the default option // JS_NORMAL is implementation dependent. // // This option is an enum to permit additional types to be added, e.g. // goog.math.Integer. Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"` // Should this field be parsed lazily? Lazy applies only to message-type // fields. It means that when the outer message is initially parsed, the // inner message's contents will not be parsed but instead stored in encoded // form. The inner message will actually be parsed when it is first accessed. // // This is only a hint. Implementations are free to choose whether to use // eager or lazy parsing regardless of the value of this option. However, // setting this option true suggests that the protocol author believes that // using lazy parsing on this field is worth the additional bookkeeping // overhead typically needed to implement it. // // This option does not affect the public interface of any generated code; // all method signatures remain the same. Furthermore, thread-safety of the // interface is not affected by this option; const methods remain safe to // call from multiple threads concurrently, while non-const methods continue // to require exclusive access. // // // Note that implementations may choose not to check required fields within // a lazy sub-message. That is, calling IsInitialized() on the outer message // may return true even if the inner message has missing required fields. // This is necessary because otherwise the inner message would have to be // parsed in order to perform the check, defeating the purpose of lazy // parsing. An implementation which chooses not to check required fields // must be consistent about it. That is, for any particular sub-message, the // implementation must either *always* check its required fields, or *never* // check its required fields, regardless of whether or not the message has // been parsed. Lazy *bool `protobuf:"varint,5,opt,name=lazy,def=0" json:"lazy,omitempty"` // Is this field deprecated? // Depending on the target platform, this can emit Deprecated annotations // for accessors, or it will be completely ignored; in the very least, this // is a formalization for deprecating fields. Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // For Google-internal migration only. Do not use. Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *FieldOptions) Reset() { *m = FieldOptions{} } func (m *FieldOptions) String() string { return proto.CompactTextString(m) } func (*FieldOptions) ProtoMessage() {} func (*FieldOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{12} } var extRange_FieldOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*FieldOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_FieldOptions } const Default_FieldOptions_Ctype FieldOptions_CType = FieldOptions_STRING const Default_FieldOptions_Jstype FieldOptions_JSType = FieldOptions_JS_NORMAL const Default_FieldOptions_Lazy bool = false const Default_FieldOptions_Deprecated bool = false const Default_FieldOptions_Weak bool = false func (m *FieldOptions) GetCtype() FieldOptions_CType { if m != nil && m.Ctype != nil { return *m.Ctype } return Default_FieldOptions_Ctype } func (m *FieldOptions) GetPacked() bool { if m != nil && m.Packed != nil { return *m.Packed } return false } func (m *FieldOptions) GetJstype() FieldOptions_JSType { if m != nil && m.Jstype != nil { return *m.Jstype } return Default_FieldOptions_Jstype } func (m *FieldOptions) GetLazy() bool { if m != nil && m.Lazy != nil { return *m.Lazy } return Default_FieldOptions_Lazy } func (m *FieldOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated } return Default_FieldOptions_Deprecated } func (m *FieldOptions) GetWeak() bool { if m != nil && m.Weak != nil { return *m.Weak } return Default_FieldOptions_Weak } func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } type OneofOptions struct { // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *OneofOptions) Reset() { *m = OneofOptions{} } func (m *OneofOptions) String() string { return proto.CompactTextString(m) } func (*OneofOptions) ProtoMessage() {} func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{13} } var extRange_OneofOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_OneofOptions } func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } type EnumOptions struct { // Set this option to true to allow mapping different tag names to the same // value. AllowAlias *bool `protobuf:"varint,2,opt,name=allow_alias,json=allowAlias" json:"allow_alias,omitempty"` // Is this enum deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the enum, or it will be completely ignored; in the very least, this // is a formalization for deprecating enums. Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *EnumOptions) Reset() { *m = EnumOptions{} } func (m *EnumOptions) String() string { return proto.CompactTextString(m) } func (*EnumOptions) ProtoMessage() {} func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{14} } var extRange_EnumOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*EnumOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_EnumOptions } const Default_EnumOptions_Deprecated bool = false func (m *EnumOptions) GetAllowAlias() bool { if m != nil && m.AllowAlias != nil { return *m.AllowAlias } return false } func (m *EnumOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated } return Default_EnumOptions_Deprecated } func (m *EnumOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } type EnumValueOptions struct { // Is this enum value deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the enum value, or it will be completely ignored; in the very least, // this is a formalization for deprecating enum values. Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } func (*EnumValueOptions) ProtoMessage() {} func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{15} } var extRange_EnumValueOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*EnumValueOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_EnumValueOptions } const Default_EnumValueOptions_Deprecated bool = false func (m *EnumValueOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated } return Default_EnumValueOptions_Deprecated } func (m *EnumValueOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } type ServiceOptions struct { // Is this service deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the service, or it will be completely ignored; in the very least, // this is a formalization for deprecating services. Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } func (*ServiceOptions) ProtoMessage() {} func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{16} } var extRange_ServiceOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*ServiceOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_ServiceOptions } const Default_ServiceOptions_Deprecated bool = false func (m *ServiceOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated } return Default_ServiceOptions_Deprecated } func (m *ServiceOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } type MethodOptions struct { // Is this method deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the method, or it will be completely ignored; in the very least, // this is a formalization for deprecating methods. Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"` // The parser stores options it doesn't recognize here. See above. UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` proto.XXX_InternalExtensions `json:"-"` XXX_unrecognized []byte `json:"-"` } func (m *MethodOptions) Reset() { *m = MethodOptions{} } func (m *MethodOptions) String() string { return proto.CompactTextString(m) } func (*MethodOptions) ProtoMessage() {} func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{17} } var extRange_MethodOptions = []proto.ExtensionRange{ {Start: 1000, End: 536870911}, } func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange { return extRange_MethodOptions } const Default_MethodOptions_Deprecated bool = false const Default_MethodOptions_IdempotencyLevel MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN func (m *MethodOptions) GetDeprecated() bool { if m != nil && m.Deprecated != nil { return *m.Deprecated } return Default_MethodOptions_Deprecated } func (m *MethodOptions) GetIdempotencyLevel() MethodOptions_IdempotencyLevel { if m != nil && m.IdempotencyLevel != nil { return *m.IdempotencyLevel } return Default_MethodOptions_IdempotencyLevel } func (m *MethodOptions) GetUninterpretedOption() []*UninterpretedOption { if m != nil { return m.UninterpretedOption } return nil } // A message representing a option the parser does not recognize. This only // appears in options protos created by the compiler::Parser class. // DescriptorPool resolves these when building Descriptor objects. Therefore, // options protos in descriptor objects (e.g. returned by Descriptor::options(), // or produced by Descriptor::CopyTo()) will never have UninterpretedOptions // in them. type UninterpretedOption struct { Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"` // The value of the uninterpreted option, in whatever type the tokenizer // identified it as during parsing. Exactly one of these should be set. IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } func (*UninterpretedOption) ProtoMessage() {} func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{18} } func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart { if m != nil { return m.Name } return nil } func (m *UninterpretedOption) GetIdentifierValue() string { if m != nil && m.IdentifierValue != nil { return *m.IdentifierValue } return "" } func (m *UninterpretedOption) GetPositiveIntValue() uint64 { if m != nil && m.PositiveIntValue != nil { return *m.PositiveIntValue } return 0 } func (m *UninterpretedOption) GetNegativeIntValue() int64 { if m != nil && m.NegativeIntValue != nil { return *m.NegativeIntValue } return 0 } func (m *UninterpretedOption) GetDoubleValue() float64 { if m != nil && m.DoubleValue != nil { return *m.DoubleValue } return 0 } func (m *UninterpretedOption) GetStringValue() []byte { if m != nil { return m.StringValue } return nil } func (m *UninterpretedOption) GetAggregateValue() string { if m != nil && m.AggregateValue != nil { return *m.AggregateValue } return "" } // The name of the uninterpreted option. Each string represents a segment in // a dot-separated name. is_extension is true iff a segment represents an // extension (denoted with parentheses in options specs in .proto files). // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents // "foo.(bar.baz).qux". type UninterpretedOption_NamePart struct { NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *UninterpretedOption_NamePart) Reset() { *m = UninterpretedOption_NamePart{} } func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) } func (*UninterpretedOption_NamePart) ProtoMessage() {} func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{18, 0} } func (m *UninterpretedOption_NamePart) GetNamePart() string { if m != nil && m.NamePart != nil { return *m.NamePart } return "" } func (m *UninterpretedOption_NamePart) GetIsExtension() bool { if m != nil && m.IsExtension != nil { return *m.IsExtension } return false } // Encapsulates information about the original source file from which a // FileDescriptorProto was generated. type SourceCodeInfo struct { // A Location identifies a piece of source code in a .proto file which // corresponds to a particular definition. This information is intended // to be useful to IDEs, code indexers, documentation generators, and similar // tools. // // For example, say we have a file like: // message Foo { // optional string foo = 1; // } // Let's look at just the field definition: // optional string foo = 1; // ^ ^^ ^^ ^ ^^^ // a bc de f ghi // We have the following locations: // span path represents // [a,i) [ 4, 0, 2, 0 ] The whole field definition. // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). // // Notes: // - A location may refer to a repeated field itself (i.e. not to any // particular index within it). This is used whenever a set of elements are // logically enclosed in a single code segment. For example, an entire // extend block (possibly containing multiple extension definitions) will // have an outer location whose path refers to the "extensions" repeated // field without an index. // - Multiple locations may have the same path. This happens when a single // logical declaration is spread out across multiple places. The most // obvious example is the "extend" block again -- there may be multiple // extend blocks in the same scope, each of which will have the same path. // - A location's span is not always a subset of its parent's span. For // example, the "extendee" of an extension declaration appears at the // beginning of the "extend" block and is shared by all extensions within // the block. // - Just because a location's span is a subset of some other location's span // does not mean that it is a descendent. For example, a "group" defines // both a type and a field in a single declaration. Thus, the locations // corresponding to the type and field and their components will overlap. // - Code which tries to interpret locations should probably be designed to // ignore those that it doesn't understand, as more types of locations could // be recorded in the future. Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } func (*SourceCodeInfo) ProtoMessage() {} func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{19} } func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location { if m != nil { return m.Location } return nil } type SourceCodeInfo_Location struct { // Identifies which part of the FileDescriptorProto was defined at this // location. // // Each element is a field number or an index. They form a path from // the root FileDescriptorProto to the place where the definition. For // example, this path: // [ 4, 3, 2, 7, 1 ] // refers to: // file.message_type(3) // 4, 3 // .field(7) // 2, 7 // .name() // 1 // This is because FileDescriptorProto.message_type has field number 4: // repeated DescriptorProto message_type = 4; // and DescriptorProto.field has field number 2: // repeated FieldDescriptorProto field = 2; // and FieldDescriptorProto.name has field number 1: // optional string name = 1; // // Thus, the above path gives the location of a field name. If we removed // the last element: // [ 4, 3, 2, 7 ] // this path refers to the whole field declaration (from the beginning // of the label to the terminating semicolon). Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` // Always has exactly three or four elements: start line, start column, // end line (optional, otherwise assumed same as start line), end column. // These are packed into a single field for efficiency. Note that line // and column numbers are zero-based -- typically you will want to add // 1 to each before displaying to a user. Span []int32 `protobuf:"varint,2,rep,packed,name=span" json:"span,omitempty"` // If this SourceCodeInfo represents a complete declaration, these are any // comments appearing before and after the declaration which appear to be // attached to the declaration. // // A series of line comments appearing on consecutive lines, with no other // tokens appearing on those lines, will be treated as a single comment. // // leading_detached_comments will keep paragraphs of comments that appear // before (but not connected to) the current element. Each paragraph, // separated by empty lines, will be one comment element in the repeated // field. // // Only the comment content is provided; comment markers (e.g. //) are // stripped out. For block comments, leading whitespace and an asterisk // will be stripped from the beginning of each line other than the first. // Newlines are included in the output. // // Examples: // // optional int32 foo = 1; // Comment attached to foo. // // Comment attached to bar. // optional int32 bar = 2; // // optional string baz = 3; // // Comment attached to baz. // // Another line attached to baz. // // // Comment attached to qux. // // // // Another line attached to qux. // optional double qux = 4; // // // Detached comment for corge. This is not leading or trailing comments // // to qux or corge because there are blank lines separating it from // // both. // // // Detached comment for corge paragraph 2. // // optional string corge = 5; // /* Block comment attached // * to corge. Leading asterisks // * will be removed. */ // /* Block comment attached to // * grault. */ // optional int32 grault = 6; // // // ignored detached comments. LeadingComments *string `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"` TrailingComments *string `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"` LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } func (*SourceCodeInfo_Location) ProtoMessage() {} func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{19, 0} } func (m *SourceCodeInfo_Location) GetPath() []int32 { if m != nil { return m.Path } return nil } func (m *SourceCodeInfo_Location) GetSpan() []int32 { if m != nil { return m.Span } return nil } func (m *SourceCodeInfo_Location) GetLeadingComments() string { if m != nil && m.LeadingComments != nil { return *m.LeadingComments } return "" } func (m *SourceCodeInfo_Location) GetTrailingComments() string { if m != nil && m.TrailingComments != nil { return *m.TrailingComments } return "" } func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string { if m != nil { return m.LeadingDetachedComments } return nil } // Describes the relationship between generated code and its original source // file. A GeneratedCodeInfo message is associated with only one generated // source file, but may contain references to different source .proto files. type GeneratedCodeInfo struct { // An Annotation connects some span of text in generated code to an element // of its generating .proto file. Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } func (*GeneratedCodeInfo) ProtoMessage() {} func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{20} } func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation { if m != nil { return m.Annotation } return nil } type GeneratedCodeInfo_Annotation struct { // Identifies the element in the original source .proto file. This field // is formatted the same as SourceCodeInfo.Location.path. Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` // Identifies the filesystem path to the original source .proto. SourceFile *string `protobuf:"bytes,2,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"` // Identifies the starting offset in bytes in the generated code // that relates to the identified object. Begin *int32 `protobuf:"varint,3,opt,name=begin" json:"begin,omitempty"` // Identifies the ending offset in bytes in the generated code that // relates to the identified offset. The end offset should be one past // the last relevant byte (so the length of the text = end - begin). End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *GeneratedCodeInfo_Annotation) Reset() { *m = GeneratedCodeInfo_Annotation{} } func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) } func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{20, 0} } func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 { if m != nil { return m.Path } return nil } func (m *GeneratedCodeInfo_Annotation) GetSourceFile() string { if m != nil && m.SourceFile != nil { return *m.SourceFile } return "" } func (m *GeneratedCodeInfo_Annotation) GetBegin() int32 { if m != nil && m.Begin != nil { return *m.Begin } return 0 } func (m *GeneratedCodeInfo_Annotation) GetEnd() int32 { if m != nil && m.End != nil { return *m.End } return 0 } func init() { proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet") proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto") proto.RegisterType((*DescriptorProto)(nil), "google.protobuf.DescriptorProto") proto.RegisterType((*DescriptorProto_ExtensionRange)(nil), "google.protobuf.DescriptorProto.ExtensionRange") proto.RegisterType((*DescriptorProto_ReservedRange)(nil), "google.protobuf.DescriptorProto.ReservedRange") proto.RegisterType((*ExtensionRangeOptions)(nil), "google.protobuf.ExtensionRangeOptions") proto.RegisterType((*FieldDescriptorProto)(nil), "google.protobuf.FieldDescriptorProto") proto.RegisterType((*OneofDescriptorProto)(nil), "google.protobuf.OneofDescriptorProto") proto.RegisterType((*EnumDescriptorProto)(nil), "google.protobuf.EnumDescriptorProto") proto.RegisterType((*EnumDescriptorProto_EnumReservedRange)(nil), "google.protobuf.EnumDescriptorProto.EnumReservedRange") proto.RegisterType((*EnumValueDescriptorProto)(nil), "google.protobuf.EnumValueDescriptorProto") proto.RegisterType((*ServiceDescriptorProto)(nil), "google.protobuf.ServiceDescriptorProto") proto.RegisterType((*MethodDescriptorProto)(nil), "google.protobuf.MethodDescriptorProto") proto.RegisterType((*FileOptions)(nil), "google.protobuf.FileOptions") proto.RegisterType((*MessageOptions)(nil), "google.protobuf.MessageOptions") proto.RegisterType((*FieldOptions)(nil), "google.protobuf.FieldOptions") proto.RegisterType((*OneofOptions)(nil), "google.protobuf.OneofOptions") proto.RegisterType((*EnumOptions)(nil), "google.protobuf.EnumOptions") proto.RegisterType((*EnumValueOptions)(nil), "google.protobuf.EnumValueOptions") proto.RegisterType((*ServiceOptions)(nil), "google.protobuf.ServiceOptions") proto.RegisterType((*MethodOptions)(nil), "google.protobuf.MethodOptions") proto.RegisterType((*UninterpretedOption)(nil), "google.protobuf.UninterpretedOption") proto.RegisterType((*UninterpretedOption_NamePart)(nil), "google.protobuf.UninterpretedOption.NamePart") proto.RegisterType((*SourceCodeInfo)(nil), "google.protobuf.SourceCodeInfo") proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location") proto.RegisterType((*GeneratedCodeInfo)(nil), "google.protobuf.GeneratedCodeInfo") proto.RegisterType((*GeneratedCodeInfo_Annotation)(nil), "google.protobuf.GeneratedCodeInfo.Annotation") proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value) proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value) proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value) proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value) proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value) proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value) } func init() { proto.RegisterFile("descriptor.proto", fileDescriptorDescriptor) } var fileDescriptorDescriptor = []byte{ // 2487 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xcd, 0x6f, 0xdb, 0xc8, 0x15, 0x5f, 0x7d, 0x5a, 0x7a, 0x92, 0xe5, 0xf1, 0xd8, 0x9b, 0x30, 0xde, 0x8f, 0x38, 0xda, 0x8f, 0x38, 0x49, 0xab, 0x2c, 0x9c, 0xc4, 0xc9, 0x3a, 0xc5, 0xb6, 0xb2, 0xc4, 0x78, 0x95, 0xca, 0x92, 0x4a, 0xc9, 0xdd, 0x64, 0x8b, 0x82, 0x18, 0x93, 0x23, 0x89, 0x09, 0x45, 0x72, 0x49, 0x2a, 0x89, 0x83, 0x1e, 0x02, 0xf4, 0xd4, 0xff, 0xa0, 0x28, 0x8a, 0x1e, 0x7a, 0x59, 0xa0, 0xd7, 0x02, 0x05, 0xda, 0x7b, 0xaf, 0x05, 0x7a, 0xef, 0xa1, 0x40, 0x0b, 0xb4, 0x7f, 0x42, 0x8f, 0xc5, 0xcc, 0x90, 0x14, 0xf5, 0x95, 0x78, 0x17, 0x48, 0xf6, 0x64, 0xcf, 0xef, 0xfd, 0xde, 0xe3, 0x9b, 0x37, 0x6f, 0xde, 0xbc, 0x19, 0x01, 0xd2, 0xa9, 0xa7, 0xb9, 0x86, 0xe3, 0xdb, 0x6e, 0xc5, 0x71, 0x6d, 0xdf, 0xc6, 0x6b, 0x03, 0xdb, 0x1e, 0x98, 0x54, 0x8c, 0x4e, 0xc6, 0xfd, 0xf2, 0x11, 0xac, 0xdf, 0x33, 0x4c, 0x5a, 0x8f, 0x88, 0x5d, 0xea, 0xe3, 0x3b, 0x90, 0xee, 0x1b, 0x26, 0x95, 0x12, 0xdb, 0xa9, 0x9d, 0xc2, 0xee, 0x87, 0x95, 0x19, 0xa5, 0xca, 0xb4, 0x46, 0x87, 0xc1, 0x0a, 0xd7, 0x28, 0xff, 0x3b, 0x0d, 0x1b, 0x0b, 0xa4, 0x18, 0x43, 0xda, 0x22, 0x23, 0x66, 0x31, 0xb1, 0x93, 0x57, 0xf8, 0xff, 0x58, 0x82, 0x15, 0x87, 0x68, 0x8f, 0xc9, 0x80, 0x4a, 0x49, 0x0e, 0x87, 0x43, 0xfc, 0x3e, 0x80, 0x4e, 0x1d, 0x6a, 0xe9, 0xd4, 0xd2, 0x4e, 0xa5, 0xd4, 0x76, 0x6a, 0x27, 0xaf, 0xc4, 0x10, 0x7c, 0x0d, 0xd6, 0x9d, 0xf1, 0x89, 0x69, 0x68, 0x6a, 0x8c, 0x06, 0xdb, 0xa9, 0x9d, 0x8c, 0x82, 0x84, 0xa0, 0x3e, 0x21, 0x5f, 0x86, 0xb5, 0xa7, 0x94, 0x3c, 0x8e, 0x53, 0x0b, 0x9c, 0x5a, 0x62, 0x70, 0x8c, 0x58, 0x83, 0xe2, 0x88, 0x7a, 0x1e, 0x19, 0x50, 0xd5, 0x3f, 0x75, 0xa8, 0x94, 0xe6, 0xb3, 0xdf, 0x9e, 0x9b, 0xfd, 0xec, 0xcc, 0x0b, 0x81, 0x56, 0xef, 0xd4, 0xa1, 0xb8, 0x0a, 0x79, 0x6a, 0x8d, 0x47, 0xc2, 0x42, 0x66, 0x49, 0xfc, 0x64, 0x6b, 0x3c, 0x9a, 0xb5, 0x92, 0x63, 0x6a, 0x81, 0x89, 0x15, 0x8f, 0xba, 0x4f, 0x0c, 0x8d, 0x4a, 0x59, 0x6e, 0xe0, 0xf2, 0x9c, 0x81, 0xae, 0x90, 0xcf, 0xda, 0x08, 0xf5, 0x70, 0x0d, 0xf2, 0xf4, 0x99, 0x4f, 0x2d, 0xcf, 0xb0, 0x2d, 0x69, 0x85, 0x1b, 0xf9, 0x68, 0xc1, 0x2a, 0x52, 0x53, 0x9f, 0x35, 0x31, 0xd1, 0xc3, 0x7b, 0xb0, 0x62, 0x3b, 0xbe, 0x61, 0x5b, 0x9e, 0x94, 0xdb, 0x4e, 0xec, 0x14, 0x76, 0xdf, 0x5d, 0x98, 0x08, 0x6d, 0xc1, 0x51, 0x42, 0x32, 0x6e, 0x00, 0xf2, 0xec, 0xb1, 0xab, 0x51, 0x55, 0xb3, 0x75, 0xaa, 0x1a, 0x56, 0xdf, 0x96, 0xf2, 0xdc, 0xc0, 0xc5, 0xf9, 0x89, 0x70, 0x62, 0xcd, 0xd6, 0x69, 0xc3, 0xea, 0xdb, 0x4a, 0xc9, 0x9b, 0x1a, 0xe3, 0x73, 0x90, 0xf5, 0x4e, 0x2d, 0x9f, 0x3c, 0x93, 0x8a, 0x3c, 0x43, 0x82, 0x51, 0xf9, 0xcf, 0x59, 0x58, 0x3b, 0x4b, 0x8a, 0xdd, 0x85, 0x4c, 0x9f, 0xcd, 0x52, 0x4a, 0x7e, 0x93, 0x18, 0x08, 0x9d, 0xe9, 0x20, 0x66, 0xbf, 0x65, 0x10, 0xab, 0x50, 0xb0, 0xa8, 0xe7, 0x53, 0x5d, 0x64, 0x44, 0xea, 0x8c, 0x39, 0x05, 0x42, 0x69, 0x3e, 0xa5, 0xd2, 0xdf, 0x2a, 0xa5, 0x1e, 0xc0, 0x5a, 0xe4, 0x92, 0xea, 0x12, 0x6b, 0x10, 0xe6, 0xe6, 0xf5, 0x57, 0x79, 0x52, 0x91, 0x43, 0x3d, 0x85, 0xa9, 0x29, 0x25, 0x3a, 0x35, 0xc6, 0x75, 0x00, 0xdb, 0xa2, 0x76, 0x5f, 0xd5, 0xa9, 0x66, 0x4a, 0xb9, 0x25, 0x51, 0x6a, 0x33, 0xca, 0x5c, 0x94, 0x6c, 0x81, 0x6a, 0x26, 0xfe, 0x74, 0x92, 0x6a, 0x2b, 0x4b, 0x32, 0xe5, 0x48, 0x6c, 0xb2, 0xb9, 0x6c, 0x3b, 0x86, 0x92, 0x4b, 0x59, 0xde, 0x53, 0x3d, 0x98, 0x59, 0x9e, 0x3b, 0x51, 0x79, 0xe5, 0xcc, 0x94, 0x40, 0x4d, 0x4c, 0x6c, 0xd5, 0x8d, 0x0f, 0xf1, 0x07, 0x10, 0x01, 0x2a, 0x4f, 0x2b, 0xe0, 0x55, 0xa8, 0x18, 0x82, 0x2d, 0x32, 0xa2, 0x5b, 0xcf, 0xa1, 0x34, 0x1d, 0x1e, 0xbc, 0x09, 0x19, 0xcf, 0x27, 0xae, 0xcf, 0xb3, 0x30, 0xa3, 0x88, 0x01, 0x46, 0x90, 0xa2, 0x96, 0xce, 0xab, 0x5c, 0x46, 0x61, 0xff, 0xe2, 0x1f, 0x4d, 0x26, 0x9c, 0xe2, 0x13, 0xfe, 0x78, 0x7e, 0x45, 0xa7, 0x2c, 0xcf, 0xce, 0x7b, 0xeb, 0x36, 0xac, 0x4e, 0x4d, 0xe0, 0xac, 0x9f, 0x2e, 0xff, 0x02, 0xde, 0x5e, 0x68, 0x1a, 0x3f, 0x80, 0xcd, 0xb1, 0x65, 0x58, 0x3e, 0x75, 0x1d, 0x97, 0xb2, 0x8c, 0x15, 0x9f, 0x92, 0xfe, 0xb3, 0xb2, 0x24, 0xe7, 0x8e, 0xe3, 0x6c, 0x61, 0x45, 0xd9, 0x18, 0xcf, 0x83, 0x57, 0xf3, 0xb9, 0xff, 0xae, 0xa0, 0x17, 0x2f, 0x5e, 0xbc, 0x48, 0x96, 0x7f, 0x9d, 0x85, 0xcd, 0x45, 0x7b, 0x66, 0xe1, 0xf6, 0x3d, 0x07, 0x59, 0x6b, 0x3c, 0x3a, 0xa1, 0x2e, 0x0f, 0x52, 0x46, 0x09, 0x46, 0xb8, 0x0a, 0x19, 0x93, 0x9c, 0x50, 0x53, 0x4a, 0x6f, 0x27, 0x76, 0x4a, 0xbb, 0xd7, 0xce, 0xb4, 0x2b, 0x2b, 0x4d, 0xa6, 0xa2, 0x08, 0x4d, 0xfc, 0x19, 0xa4, 0x83, 0x12, 0xcd, 0x2c, 0x5c, 0x3d, 0x9b, 0x05, 0xb6, 0x97, 0x14, 0xae, 0x87, 0xdf, 0x81, 0x3c, 0xfb, 0x2b, 0x72, 0x23, 0xcb, 0x7d, 0xce, 0x31, 0x80, 0xe5, 0x05, 0xde, 0x82, 0x1c, 0xdf, 0x26, 0x3a, 0x0d, 0x8f, 0xb6, 0x68, 0xcc, 0x12, 0x4b, 0xa7, 0x7d, 0x32, 0x36, 0x7d, 0xf5, 0x09, 0x31, 0xc7, 0x94, 0x27, 0x7c, 0x5e, 0x29, 0x06, 0xe0, 0x4f, 0x19, 0x86, 0x2f, 0x42, 0x41, 0xec, 0x2a, 0xc3, 0xd2, 0xe9, 0x33, 0x5e, 0x3d, 0x33, 0x8a, 0xd8, 0x68, 0x0d, 0x86, 0xb0, 0xcf, 0x3f, 0xf2, 0x6c, 0x2b, 0x4c, 0x4d, 0xfe, 0x09, 0x06, 0xf0, 0xcf, 0xdf, 0x9e, 0x2d, 0xdc, 0xef, 0x2d, 0x9e, 0xde, 0x6c, 0x4e, 0x95, 0xff, 0x94, 0x84, 0x34, 0xaf, 0x17, 0x6b, 0x50, 0xe8, 0x3d, 0xec, 0xc8, 0x6a, 0xbd, 0x7d, 0x7c, 0xd0, 0x94, 0x51, 0x02, 0x97, 0x00, 0x38, 0x70, 0xaf, 0xd9, 0xae, 0xf6, 0x50, 0x32, 0x1a, 0x37, 0x5a, 0xbd, 0xbd, 0x9b, 0x28, 0x15, 0x29, 0x1c, 0x0b, 0x20, 0x1d, 0x27, 0xdc, 0xd8, 0x45, 0x19, 0x8c, 0xa0, 0x28, 0x0c, 0x34, 0x1e, 0xc8, 0xf5, 0xbd, 0x9b, 0x28, 0x3b, 0x8d, 0xdc, 0xd8, 0x45, 0x2b, 0x78, 0x15, 0xf2, 0x1c, 0x39, 0x68, 0xb7, 0x9b, 0x28, 0x17, 0xd9, 0xec, 0xf6, 0x94, 0x46, 0xeb, 0x10, 0xe5, 0x23, 0x9b, 0x87, 0x4a, 0xfb, 0xb8, 0x83, 0x20, 0xb2, 0x70, 0x24, 0x77, 0xbb, 0xd5, 0x43, 0x19, 0x15, 0x22, 0xc6, 0xc1, 0xc3, 0x9e, 0xdc, 0x45, 0xc5, 0x29, 0xb7, 0x6e, 0xec, 0xa2, 0xd5, 0xe8, 0x13, 0x72, 0xeb, 0xf8, 0x08, 0x95, 0xf0, 0x3a, 0xac, 0x8a, 0x4f, 0x84, 0x4e, 0xac, 0xcd, 0x40, 0x7b, 0x37, 0x11, 0x9a, 0x38, 0x22, 0xac, 0xac, 0x4f, 0x01, 0x7b, 0x37, 0x11, 0x2e, 0xd7, 0x20, 0xc3, 0xb3, 0x0b, 0x63, 0x28, 0x35, 0xab, 0x07, 0x72, 0x53, 0x6d, 0x77, 0x7a, 0x8d, 0x76, 0xab, 0xda, 0x44, 0x89, 0x09, 0xa6, 0xc8, 0x3f, 0x39, 0x6e, 0x28, 0x72, 0x1d, 0x25, 0xe3, 0x58, 0x47, 0xae, 0xf6, 0xe4, 0x3a, 0x4a, 0x95, 0x35, 0xd8, 0x5c, 0x54, 0x27, 0x17, 0xee, 0x8c, 0xd8, 0x12, 0x27, 0x97, 0x2c, 0x31, 0xb7, 0x35, 0xb7, 0xc4, 0xff, 0x4a, 0xc2, 0xc6, 0x82, 0xb3, 0x62, 0xe1, 0x47, 0x7e, 0x08, 0x19, 0x91, 0xa2, 0xe2, 0xf4, 0xbc, 0xb2, 0xf0, 0xd0, 0xe1, 0x09, 0x3b, 0x77, 0x82, 0x72, 0xbd, 0x78, 0x07, 0x91, 0x5a, 0xd2, 0x41, 0x30, 0x13, 0x73, 0x35, 0xfd, 0xe7, 0x73, 0x35, 0x5d, 0x1c, 0x7b, 0x7b, 0x67, 0x39, 0xf6, 0x38, 0xf6, 0xcd, 0x6a, 0x7b, 0x66, 0x41, 0x6d, 0xbf, 0x0b, 0xeb, 0x73, 0x86, 0xce, 0x5c, 0x63, 0x7f, 0x99, 0x00, 0x69, 0x59, 0x70, 0x5e, 0x51, 0xe9, 0x92, 0x53, 0x95, 0xee, 0xee, 0x6c, 0x04, 0x2f, 0x2d, 0x5f, 0x84, 0xb9, 0xb5, 0xfe, 0x3a, 0x01, 0xe7, 0x16, 0x77, 0x8a, 0x0b, 0x7d, 0xf8, 0x0c, 0xb2, 0x23, 0xea, 0x0f, 0xed, 0xb0, 0x5b, 0xfa, 0x78, 0xc1, 0x19, 0xcc, 0xc4, 0xb3, 0x8b, 0x1d, 0x68, 0xc5, 0x0f, 0xf1, 0xd4, 0xb2, 0x76, 0x4f, 0x78, 0x33, 0xe7, 0xe9, 0xaf, 0x92, 0xf0, 0xf6, 0x42, 0xe3, 0x0b, 0x1d, 0x7d, 0x0f, 0xc0, 0xb0, 0x9c, 0xb1, 0x2f, 0x3a, 0x22, 0x51, 0x60, 0xf3, 0x1c, 0xe1, 0xc5, 0x8b, 0x15, 0xcf, 0xb1, 0x1f, 0xc9, 0x53, 0x5c, 0x0e, 0x02, 0xe2, 0x84, 0x3b, 0x13, 0x47, 0xd3, 0xdc, 0xd1, 0xf7, 0x97, 0xcc, 0x74, 0x2e, 0x31, 0x3f, 0x01, 0xa4, 0x99, 0x06, 0xb5, 0x7c, 0xd5, 0xf3, 0x5d, 0x4a, 0x46, 0x86, 0x35, 0xe0, 0x27, 0x48, 0x6e, 0x3f, 0xd3, 0x27, 0xa6, 0x47, 0x95, 0x35, 0x21, 0xee, 0x86, 0x52, 0xa6, 0xc1, 0x13, 0xc8, 0x8d, 0x69, 0x64, 0xa7, 0x34, 0x84, 0x38, 0xd2, 0x28, 0xff, 0x31, 0x07, 0x85, 0x58, 0x5f, 0x8d, 0x2f, 0x41, 0xf1, 0x11, 0x79, 0x42, 0xd4, 0xf0, 0xae, 0x24, 0x22, 0x51, 0x60, 0x58, 0x27, 0xb8, 0x2f, 0x7d, 0x02, 0x9b, 0x9c, 0x62, 0x8f, 0x7d, 0xea, 0xaa, 0x9a, 0x49, 0x3c, 0x8f, 0x07, 0x2d, 0xc7, 0xa9, 0x98, 0xc9, 0xda, 0x4c, 0x54, 0x0b, 0x25, 0xf8, 0x16, 0x6c, 0x70, 0x8d, 0xd1, 0xd8, 0xf4, 0x0d, 0xc7, 0xa4, 0x2a, 0xbb, 0xbd, 0x79, 0xfc, 0x24, 0x89, 0x3c, 0x5b, 0x67, 0x8c, 0xa3, 0x80, 0xc0, 0x3c, 0xf2, 0x70, 0x1d, 0xde, 0xe3, 0x6a, 0x03, 0x6a, 0x51, 0x97, 0xf8, 0x54, 0xa5, 0x5f, 0x8d, 0x89, 0xe9, 0xa9, 0xc4, 0xd2, 0xd5, 0x21, 0xf1, 0x86, 0xd2, 0x26, 0x33, 0x70, 0x90, 0x94, 0x12, 0xca, 0x05, 0x46, 0x3c, 0x0c, 0x78, 0x32, 0xa7, 0x55, 0x2d, 0xfd, 0x73, 0xe2, 0x0d, 0xf1, 0x3e, 0x9c, 0xe3, 0x56, 0x3c, 0xdf, 0x35, 0xac, 0x81, 0xaa, 0x0d, 0xa9, 0xf6, 0x58, 0x1d, 0xfb, 0xfd, 0x3b, 0xd2, 0x3b, 0xf1, 0xef, 0x73, 0x0f, 0xbb, 0x9c, 0x53, 0x63, 0x94, 0x63, 0xbf, 0x7f, 0x07, 0x77, 0xa1, 0xc8, 0x16, 0x63, 0x64, 0x3c, 0xa7, 0x6a, 0xdf, 0x76, 0xf9, 0xd1, 0x58, 0x5a, 0x50, 0x9a, 0x62, 0x11, 0xac, 0xb4, 0x03, 0x85, 0x23, 0x5b, 0xa7, 0xfb, 0x99, 0x6e, 0x47, 0x96, 0xeb, 0x4a, 0x21, 0xb4, 0x72, 0xcf, 0x76, 0x59, 0x42, 0x0d, 0xec, 0x28, 0xc0, 0x05, 0x91, 0x50, 0x03, 0x3b, 0x0c, 0xef, 0x2d, 0xd8, 0xd0, 0x34, 0x31, 0x67, 0x43, 0x53, 0x83, 0x3b, 0x96, 0x27, 0xa1, 0xa9, 0x60, 0x69, 0xda, 0xa1, 0x20, 0x04, 0x39, 0xee, 0xe1, 0x4f, 0xe1, 0xed, 0x49, 0xb0, 0xe2, 0x8a, 0xeb, 0x73, 0xb3, 0x9c, 0x55, 0xbd, 0x05, 0x1b, 0xce, 0xe9, 0xbc, 0x22, 0x9e, 0xfa, 0xa2, 0x73, 0x3a, 0xab, 0x76, 0x1b, 0x36, 0x9d, 0xa1, 0x33, 0xaf, 0x77, 0x35, 0xae, 0x87, 0x9d, 0xa1, 0x33, 0xab, 0xf8, 0x11, 0xbf, 0x70, 0xbb, 0x54, 0x23, 0x3e, 0xd5, 0xa5, 0xf3, 0x71, 0x7a, 0x4c, 0x80, 0xaf, 0x03, 0xd2, 0x34, 0x95, 0x5a, 0xe4, 0xc4, 0xa4, 0x2a, 0x71, 0xa9, 0x45, 0x3c, 0xe9, 0x62, 0x9c, 0x5c, 0xd2, 0x34, 0x99, 0x4b, 0xab, 0x5c, 0x88, 0xaf, 0xc2, 0xba, 0x7d, 0xf2, 0x48, 0x13, 0x29, 0xa9, 0x3a, 0x2e, 0xed, 0x1b, 0xcf, 0xa4, 0x0f, 0x79, 0x7c, 0xd7, 0x98, 0x80, 0x27, 0x64, 0x87, 0xc3, 0xf8, 0x0a, 0x20, 0xcd, 0x1b, 0x12, 0xd7, 0xe1, 0x35, 0xd9, 0x73, 0x88, 0x46, 0xa5, 0x8f, 0x04, 0x55, 0xe0, 0xad, 0x10, 0x66, 0x5b, 0xc2, 0x7b, 0x6a, 0xf4, 0xfd, 0xd0, 0xe2, 0x65, 0xb1, 0x25, 0x38, 0x16, 0x58, 0xdb, 0x01, 0xc4, 0x42, 0x31, 0xf5, 0xe1, 0x1d, 0x4e, 0x2b, 0x39, 0x43, 0x27, 0xfe, 0xdd, 0x0f, 0x60, 0x95, 0x31, 0x27, 0x1f, 0xbd, 0x22, 0x1a, 0x32, 0x67, 0x18, 0xfb, 0xe2, 0x6b, 0xeb, 0x8d, 0xcb, 0xfb, 0x50, 0x8c, 0xe7, 0x27, 0xce, 0x83, 0xc8, 0x50, 0x94, 0x60, 0xcd, 0x4a, 0xad, 0x5d, 0x67, 0x6d, 0xc6, 0x97, 0x32, 0x4a, 0xb2, 0x76, 0xa7, 0xd9, 0xe8, 0xc9, 0xaa, 0x72, 0xdc, 0xea, 0x35, 0x8e, 0x64, 0x94, 0x8a, 0xf7, 0xd5, 0x7f, 0x4d, 0x42, 0x69, 0xfa, 0x8a, 0x84, 0x7f, 0x00, 0xe7, 0xc3, 0xf7, 0x0c, 0x8f, 0xfa, 0xea, 0x53, 0xc3, 0xe5, 0x5b, 0x66, 0x44, 0xc4, 0xf1, 0x15, 0x2d, 0xda, 0x66, 0xc0, 0xea, 0x52, 0xff, 0x0b, 0xc3, 0x65, 0x1b, 0x62, 0x44, 0x7c, 0xdc, 0x84, 0x8b, 0x96, 0xad, 0x7a, 0x3e, 0xb1, 0x74, 0xe2, 0xea, 0xea, 0xe4, 0x25, 0x49, 0x25, 0x9a, 0x46, 0x3d, 0xcf, 0x16, 0x47, 0x55, 0x64, 0xe5, 0x5d, 0xcb, 0xee, 0x06, 0xe4, 0x49, 0x0d, 0xaf, 0x06, 0xd4, 0x99, 0x04, 0x4b, 0x2d, 0x4b, 0xb0, 0x77, 0x20, 0x3f, 0x22, 0x8e, 0x4a, 0x2d, 0xdf, 0x3d, 0xe5, 0x8d, 0x71, 0x4e, 0xc9, 0x8d, 0x88, 0x23, 0xb3, 0xf1, 0x9b, 0xb9, 0x9f, 0xfc, 0x23, 0x05, 0xc5, 0x78, 0x73, 0xcc, 0xee, 0x1a, 0x1a, 0x3f, 0x47, 0x12, 0xbc, 0xd2, 0x7c, 0xf0, 0xd2, 0x56, 0xba, 0x52, 0x63, 0x07, 0xcc, 0x7e, 0x56, 0xb4, 0xac, 0x8a, 0xd0, 0x64, 0x87, 0x3b, 0xab, 0x2d, 0x54, 0xb4, 0x08, 0x39, 0x25, 0x18, 0xe1, 0x43, 0xc8, 0x3e, 0xf2, 0xb8, 0xed, 0x2c, 0xb7, 0xfd, 0xe1, 0xcb, 0x6d, 0xdf, 0xef, 0x72, 0xe3, 0xf9, 0xfb, 0x5d, 0xb5, 0xd5, 0x56, 0x8e, 0xaa, 0x4d, 0x25, 0x50, 0xc7, 0x17, 0x20, 0x6d, 0x92, 0xe7, 0xa7, 0xd3, 0x47, 0x11, 0x87, 0xce, 0x1a, 0xf8, 0x0b, 0x90, 0x7e, 0x4a, 0xc9, 0xe3, 0xe9, 0x03, 0x80, 0x43, 0xaf, 0x31, 0xf5, 0xaf, 0x43, 0x86, 0xc7, 0x0b, 0x03, 0x04, 0x11, 0x43, 0x6f, 0xe1, 0x1c, 0xa4, 0x6b, 0x6d, 0x85, 0xa5, 0x3f, 0x82, 0xa2, 0x40, 0xd5, 0x4e, 0x43, 0xae, 0xc9, 0x28, 0x59, 0xbe, 0x05, 0x59, 0x11, 0x04, 0xb6, 0x35, 0xa2, 0x30, 0xa0, 0xb7, 0x82, 0x61, 0x60, 0x23, 0x11, 0x4a, 0x8f, 0x8f, 0x0e, 0x64, 0x05, 0x25, 0xe3, 0xcb, 0xeb, 0x41, 0x31, 0xde, 0x17, 0xbf, 0x99, 0x9c, 0xfa, 0x4b, 0x02, 0x0a, 0xb1, 0x3e, 0x97, 0x35, 0x28, 0xc4, 0x34, 0xed, 0xa7, 0x2a, 0x31, 0x0d, 0xe2, 0x05, 0x49, 0x01, 0x1c, 0xaa, 0x32, 0xe4, 0xac, 0x8b, 0xf6, 0x46, 0x9c, 0xff, 0x5d, 0x02, 0xd0, 0x6c, 0x8b, 0x39, 0xe3, 0x60, 0xe2, 0x3b, 0x75, 0xf0, 0xb7, 0x09, 0x28, 0x4d, 0xf7, 0x95, 0x33, 0xee, 0x5d, 0xfa, 0x4e, 0xdd, 0xfb, 0x67, 0x12, 0x56, 0xa7, 0xba, 0xc9, 0xb3, 0x7a, 0xf7, 0x15, 0xac, 0x1b, 0x3a, 0x1d, 0x39, 0xb6, 0x4f, 0x2d, 0xed, 0x54, 0x35, 0xe9, 0x13, 0x6a, 0x4a, 0x65, 0x5e, 0x28, 0xae, 0xbf, 0xbc, 0x5f, 0xad, 0x34, 0x26, 0x7a, 0x4d, 0xa6, 0xb6, 0xbf, 0xd1, 0xa8, 0xcb, 0x47, 0x9d, 0x76, 0x4f, 0x6e, 0xd5, 0x1e, 0xaa, 0xc7, 0xad, 0x1f, 0xb7, 0xda, 0x5f, 0xb4, 0x14, 0x64, 0xcc, 0xd0, 0x5e, 0xe3, 0x56, 0xef, 0x00, 0x9a, 0x75, 0x0a, 0x9f, 0x87, 0x45, 0x6e, 0xa1, 0xb7, 0xf0, 0x06, 0xac, 0xb5, 0xda, 0x6a, 0xb7, 0x51, 0x97, 0x55, 0xf9, 0xde, 0x3d, 0xb9, 0xd6, 0xeb, 0x8a, 0x17, 0x88, 0x88, 0xdd, 0x9b, 0xde, 0xd4, 0xbf, 0x49, 0xc1, 0xc6, 0x02, 0x4f, 0x70, 0x35, 0xb8, 0x3b, 0x88, 0xeb, 0xcc, 0xf7, 0xcf, 0xe2, 0x7d, 0x85, 0x1d, 0xf9, 0x1d, 0xe2, 0xfa, 0xc1, 0x55, 0xe3, 0x0a, 0xb0, 0x28, 0x59, 0xbe, 0xd1, 0x37, 0xa8, 0x1b, 0x3c, 0xd8, 0x88, 0x0b, 0xc5, 0xda, 0x04, 0x17, 0x6f, 0x36, 0xdf, 0x03, 0xec, 0xd8, 0x9e, 0xe1, 0x1b, 0x4f, 0xa8, 0x6a, 0x58, 0xe1, 0xeb, 0x0e, 0xbb, 0x60, 0xa4, 0x15, 0x14, 0x4a, 0x1a, 0x96, 0x1f, 0xb1, 0x2d, 0x3a, 0x20, 0x33, 0x6c, 0x56, 0xc0, 0x53, 0x0a, 0x0a, 0x25, 0x11, 0xfb, 0x12, 0x14, 0x75, 0x7b, 0xcc, 0xba, 0x2e, 0xc1, 0x63, 0xe7, 0x45, 0x42, 0x29, 0x08, 0x2c, 0xa2, 0x04, 0xfd, 0xf4, 0xe4, 0x59, 0xa9, 0xa8, 0x14, 0x04, 0x26, 0x28, 0x97, 0x61, 0x8d, 0x0c, 0x06, 0x2e, 0x33, 0x1e, 0x1a, 0x12, 0x37, 0x84, 0x52, 0x04, 0x73, 0xe2, 0xd6, 0x7d, 0xc8, 0x85, 0x71, 0x60, 0x47, 0x32, 0x8b, 0x84, 0xea, 0x88, 0x6b, 0x6f, 0x72, 0x27, 0xaf, 0xe4, 0xac, 0x50, 0x78, 0x09, 0x8a, 0x86, 0xa7, 0x4e, 0x5e, 0xc9, 0x93, 0xdb, 0xc9, 0x9d, 0x9c, 0x52, 0x30, 0xbc, 0xe8, 0x85, 0xb1, 0xfc, 0x75, 0x12, 0x4a, 0xd3, 0xaf, 0xfc, 0xb8, 0x0e, 0x39, 0xd3, 0xd6, 0x08, 0x4f, 0x2d, 0xf1, 0x13, 0xd3, 0xce, 0x2b, 0x7e, 0x18, 0xa8, 0x34, 0x03, 0xbe, 0x12, 0x69, 0x6e, 0xfd, 0x2d, 0x01, 0xb9, 0x10, 0xc6, 0xe7, 0x20, 0xed, 0x10, 0x7f, 0xc8, 0xcd, 0x65, 0x0e, 0x92, 0x28, 0xa1, 0xf0, 0x31, 0xc3, 0x3d, 0x87, 0x58, 0x3c, 0x05, 0x02, 0x9c, 0x8d, 0xd9, 0xba, 0x9a, 0x94, 0xe8, 0xfc, 0xfa, 0x61, 0x8f, 0x46, 0xd4, 0xf2, 0xbd, 0x70, 0x5d, 0x03, 0xbc, 0x16, 0xc0, 0xf8, 0x1a, 0xac, 0xfb, 0x2e, 0x31, 0xcc, 0x29, 0x6e, 0x9a, 0x73, 0x51, 0x28, 0x88, 0xc8, 0xfb, 0x70, 0x21, 0xb4, 0xab, 0x53, 0x9f, 0x68, 0x43, 0xaa, 0x4f, 0x94, 0xb2, 0xfc, 0x99, 0xe1, 0x7c, 0x40, 0xa8, 0x07, 0xf2, 0x50, 0xb7, 0xfc, 0xf7, 0x04, 0xac, 0x87, 0x17, 0x26, 0x3d, 0x0a, 0xd6, 0x11, 0x00, 0xb1, 0x2c, 0xdb, 0x8f, 0x87, 0x6b, 0x3e, 0x95, 0xe7, 0xf4, 0x2a, 0xd5, 0x48, 0x49, 0x89, 0x19, 0xd8, 0x1a, 0x01, 0x4c, 0x24, 0x4b, 0xc3, 0x76, 0x11, 0x0a, 0xc1, 0x4f, 0x38, 0xfc, 0x77, 0x40, 0x71, 0xc5, 0x06, 0x01, 0xb1, 0x9b, 0x15, 0xde, 0x84, 0xcc, 0x09, 0x1d, 0x18, 0x56, 0xf0, 0x30, 0x2b, 0x06, 0xe1, 0x43, 0x48, 0x3a, 0x7a, 0x08, 0x39, 0xf8, 0x19, 0x6c, 0x68, 0xf6, 0x68, 0xd6, 0xdd, 0x03, 0x34, 0x73, 0xcd, 0xf7, 0x3e, 0x4f, 0x7c, 0x09, 0x93, 0x16, 0xf3, 0x7f, 0x89, 0xc4, 0xef, 0x93, 0xa9, 0xc3, 0xce, 0xc1, 0x1f, 0x92, 0x5b, 0x87, 0x42, 0xb5, 0x13, 0xce, 0x54, 0xa1, 0x7d, 0x93, 0x6a, 0xcc, 0xfb, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0xa3, 0x58, 0x22, 0x30, 0xdf, 0x1c, 0x00, 0x00, }
{ "pile_set_name": "Github" }
<?php /* * This file is part of Psy Shell. * * (c) 2012-2018 Justin Hileman * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Psy\Command\ListCommand; use Psy\VarDumper\Presenter; use Symfony\Component\Console\Input\InputInterface; /** * Interface Enumerator class. * * @deprecated Nothing should use this anymore */ class InterfaceEnumerator extends Enumerator { public function __construct(Presenter $presenter) { @\trigger_error('InterfaceEnumerator is no longer used', E_USER_DEPRECATED); parent::__construct($presenter); } /** * {@inheritdoc} */ protected function listItems(InputInterface $input, \Reflector $reflector = null, $target = null) { // only list interfaces when no Reflector is present. // // @todo make a NamespaceReflector and pass that in for commands like: // // ls --interfaces Foo // // ... for listing interfaces in the Foo namespace if ($reflector !== null || $target !== null) { return; } // only list interfaces if we are specifically asked if (!$input->getOption('interfaces')) { return; } $interfaces = $this->prepareInterfaces(\get_declared_interfaces()); if (empty($interfaces)) { return; } return [ 'Interfaces' => $interfaces, ]; } /** * Prepare formatted interface array. * * @param array $interfaces * * @return array */ protected function prepareInterfaces(array $interfaces) { \natcasesort($interfaces); // My kingdom for a generator. $ret = []; foreach ($interfaces as $name) { if ($this->showItem($name)) { $ret[$name] = [ 'name' => $name, 'style' => self::IS_CLASS, 'value' => $this->presentSignature($name), ]; } } return $ret; } }
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- """ *************************************************************************** ImportIntoSpatialite.py --------------------- Date : October 2016 Copyright : (C) 2016 by Mathieu Pellerin Email : nirvn dot asia at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Mathieu Pellerin' __date__ = 'October 2016' __copyright__ = '(C) 2012, Mathieu Pellerin' from qgis.core import (QgsDataSourceUri, QgsFeatureSink, QgsProcessingAlgorithm, QgsVectorLayerExporter, QgsProcessing, QgsProcessingException, QgsProcessingParameterFeatureSource, QgsProcessingParameterVectorLayer, QgsProcessingParameterField, QgsProcessingParameterString, QgsProcessingParameterBoolean, QgsWkbTypes, QgsProviderRegistry, QgsProviderConnectionException, QgsAbstractDatabaseProviderConnection) from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm class ImportIntoSpatialite(QgisAlgorithm): DATABASE = 'DATABASE' TABLENAME = 'TABLENAME' INPUT = 'INPUT' OVERWRITE = 'OVERWRITE' CREATEINDEX = 'CREATEINDEX' GEOMETRY_COLUMN = 'GEOMETRY_COLUMN' LOWERCASE_NAMES = 'LOWERCASE_NAMES' DROP_STRING_LENGTH = 'DROP_STRING_LENGTH' FORCE_SINGLEPART = 'FORCE_SINGLEPART' PRIMARY_KEY = 'PRIMARY_KEY' ENCODING = 'ENCODING' def group(self): return self.tr('Database') def groupId(self): return 'database' def __init__(self): super().__init__() def initAlgorithm(self, config=None): self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT, self.tr('Layer to import'), types=[QgsProcessing.TypeVector])) self.addParameter(QgsProcessingParameterVectorLayer(self.DATABASE, self.tr('File database'), optional=False)) self.addParameter( QgsProcessingParameterString(self.TABLENAME, self.tr('Table to import to (leave blank to use layer name)'), optional=True)) self.addParameter(QgsProcessingParameterField(self.PRIMARY_KEY, self.tr('Primary key field'), None, self.INPUT, QgsProcessingParameterField.Any, False, True)) self.addParameter(QgsProcessingParameterString(self.GEOMETRY_COLUMN, self.tr('Geometry column'), 'geom')) self.addParameter(QgsProcessingParameterString(self.ENCODING, self.tr('Encoding'), 'UTF-8', optional=True)) self.addParameter(QgsProcessingParameterBoolean(self.OVERWRITE, self.tr('Overwrite'), True)) self.addParameter(QgsProcessingParameterBoolean(self.CREATEINDEX, self.tr('Create spatial index'), True)) self.addParameter( QgsProcessingParameterBoolean(self.LOWERCASE_NAMES, self.tr('Convert field names to lowercase'), True)) self.addParameter(QgsProcessingParameterBoolean(self.DROP_STRING_LENGTH, self.tr('Drop length constraints on character fields'), False)) self.addParameter(QgsProcessingParameterBoolean(self.FORCE_SINGLEPART, self.tr('Create single-part geometries instead of multi-part'), False)) def flags(self): return super().flags() | QgsProcessingAlgorithm.FlagNoThreading def name(self): return 'importintospatialite' def displayName(self): return self.tr('Export to SpatiaLite') def shortDescription(self): return self.tr('Exports a vector layer to a SpatiaLite database') def tags(self): return self.tr('import,table,layer,into,copy').split(',') def processAlgorithm(self, parameters, context, feedback): database = self.parameterAsVectorLayer(parameters, self.DATABASE, context) databaseuri = database.dataProvider().dataSourceUri() uri = QgsDataSourceUri(databaseuri) if uri.database() == '': if '|layername' in databaseuri: databaseuri = databaseuri[:databaseuri.find('|layername')] elif '|layerid' in databaseuri: databaseuri = databaseuri[:databaseuri.find('|layerid')] uri = QgsDataSourceUri('dbname=\'%s\'' % (databaseuri)) try: md = QgsProviderRegistry.instance().providerMetadata('spatialite') conn = md.createConnection(uri.uri(), {}) except QgsProviderConnectionException: raise QgsProcessingException(self.tr('Could not connect to {}').format(uri.uri())) overwrite = self.parameterAsBoolean(parameters, self.OVERWRITE, context) createIndex = self.parameterAsBoolean(parameters, self.CREATEINDEX, context) convertLowerCase = self.parameterAsBoolean(parameters, self.LOWERCASE_NAMES, context) dropStringLength = self.parameterAsBoolean(parameters, self.DROP_STRING_LENGTH, context) forceSinglePart = self.parameterAsBoolean(parameters, self.FORCE_SINGLEPART, context) primaryKeyField = self.parameterAsString(parameters, self.PRIMARY_KEY, context) or 'id' encoding = self.parameterAsString(parameters, self.ENCODING, context) source = self.parameterAsSource(parameters, self.INPUT, context) if source is None: raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) table = self.parameterAsString(parameters, self.TABLENAME, context) if table: table.strip() if not table or table == '': table = source.sourceName() table = table.replace('.', '_') table = table.replace(' ', '').lower() providerName = 'spatialite' geomColumn = self.parameterAsString(parameters, self.GEOMETRY_COLUMN, context) if not geomColumn: geomColumn = 'geom' options = {} if overwrite: options['overwrite'] = True if convertLowerCase: options['lowercaseFieldNames'] = True geomColumn = geomColumn.lower() if dropStringLength: options['dropStringConstraints'] = True if forceSinglePart: options['forceSinglePartGeometryType'] = True # Clear geometry column for non-geometry tables if source.wkbType() == QgsWkbTypes.NoGeometry: geomColumn = None uri.setDataSource('', table, geomColumn, '', primaryKeyField) if encoding: options['fileEncoding'] = encoding exporter = QgsVectorLayerExporter(uri.uri(), providerName, source.fields(), source.wkbType(), source.sourceCrs(), overwrite, options) if exporter.errorCode() != QgsVectorLayerExporter.NoError: raise QgsProcessingException( self.tr('Error importing to Spatialite\n{0}').format(exporter.errorMessage())) features = source.getFeatures() total = 100.0 / source.featureCount() if source.featureCount() else 0 for current, f in enumerate(features): if feedback.isCanceled(): break if not exporter.addFeature(f, QgsFeatureSink.FastInsert): feedback.reportError(exporter.errorMessage()) feedback.setProgress(int(current * total)) exporter.flushBuffer() if exporter.errorCode() != QgsVectorLayerExporter.NoError: raise QgsProcessingException( self.tr('Error importing to Spatialite\n{0}').format(exporter.errorMessage())) if geomColumn and createIndex: try: options = QgsAbstractDatabaseProviderConnection.SpatialIndexOptions() options.geometryColumnName = geomColumn conn.createSpatialIndex('', table, options) except QgsProviderConnectionException as e: raise QgsProcessingException(self.tr('Error creating spatial index:\n{0}').format(e)) return {}
{ "pile_set_name": "Github" }
// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package proto import ( "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/runtime/protoiface" ) // Size returns the size in bytes of the wire-format encoding of m. func Size(m Message) int { return MarshalOptions{}.Size(m) } // Size returns the size in bytes of the wire-format encoding of m. func (o MarshalOptions) Size(m Message) int { // Treat a nil message interface as an empty message; nothing to output. if m == nil { return 0 } return sizeMessage(m.ProtoReflect()) } func sizeMessage(m protoreflect.Message) (size int) { methods := protoMethods(m) if methods != nil && methods.Size != nil { out := methods.Size(protoiface.SizeInput{ Message: m, }) return out.Size } if methods != nil && methods.Marshal != nil { // This is not efficient, but we don't have any choice. // This case is mainly used for legacy types with a Marshal method. out, _ := methods.Marshal(protoiface.MarshalInput{ Message: m, }) return len(out.Buf) } return sizeMessageSlow(m) } func sizeMessageSlow(m protoreflect.Message) (size int) { if messageset.IsMessageSet(m.Descriptor()) { return sizeMessageSet(m) } m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { size += sizeField(fd, v) return true }) size += len(m.GetUnknown()) return size } func sizeField(fd protoreflect.FieldDescriptor, value protoreflect.Value) (size int) { num := fd.Number() switch { case fd.IsList(): return sizeList(num, fd, value.List()) case fd.IsMap(): return sizeMap(num, fd, value.Map()) default: return protowire.SizeTag(num) + sizeSingular(num, fd.Kind(), value) } } func sizeList(num protowire.Number, fd protoreflect.FieldDescriptor, list protoreflect.List) (size int) { if fd.IsPacked() && list.Len() > 0 { content := 0 for i, llen := 0, list.Len(); i < llen; i++ { content += sizeSingular(num, fd.Kind(), list.Get(i)) } return protowire.SizeTag(num) + protowire.SizeBytes(content) } for i, llen := 0, list.Len(); i < llen; i++ { size += protowire.SizeTag(num) + sizeSingular(num, fd.Kind(), list.Get(i)) } return size } func sizeMap(num protowire.Number, fd protoreflect.FieldDescriptor, mapv protoreflect.Map) (size int) { mapv.Range(func(key protoreflect.MapKey, value protoreflect.Value) bool { size += protowire.SizeTag(num) size += protowire.SizeBytes(sizeField(fd.MapKey(), key.Value()) + sizeField(fd.MapValue(), value)) return true }) return size }
{ "pile_set_name": "Github" }
/* * platform.c - DesignWare HS OTG Controller platform driver * * Copyright (C) Matthijs Kooijman <[email protected]> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions, and the following disclaimer, * without modification. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The names of the above-listed copyright holders may not be used * to endorse or promote products derived from this software without * specific prior written permission. * * ALTERNATIVELY, this software may be distributed under the terms of the * GNU General Public License ("GPL") as published by the Free Software * Foundation; either version 2 of the License, or (at your option) any * later version. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <linux/kernel.h> #include <linux/module.h> #include <linux/slab.h> #include <linux/clk.h> #include <linux/device.h> #include <linux/dma-mapping.h> #include <linux/of_device.h> #include <linux/mutex.h> #include <linux/platform_device.h> #include <linux/phy/phy.h> #include <linux/platform_data/s3c-hsotg.h> #include <linux/reset.h> #include <linux/usb/of.h> #include "core.h" #include "hcd.h" #include "debug.h" static const char dwc2_driver_name[] = "dwc2"; /* * Check the dr_mode against the module configuration and hardware * capabilities. * * The hardware, module, and dr_mode, can each be set to host, device, * or otg. Check that all these values are compatible and adjust the * value of dr_mode if possible. * * actual * HW MOD dr_mode dr_mode * ------------------------------ * HST HST any : HST * HST DEV any : --- * HST OTG any : HST * * DEV HST any : --- * DEV DEV any : DEV * DEV OTG any : DEV * * OTG HST any : HST * OTG DEV any : DEV * OTG OTG any : dr_mode */ static int dwc2_get_dr_mode(struct dwc2_hsotg *hsotg) { enum usb_dr_mode mode; hsotg->dr_mode = usb_get_dr_mode(hsotg->dev); if (hsotg->dr_mode == USB_DR_MODE_UNKNOWN) hsotg->dr_mode = USB_DR_MODE_OTG; mode = hsotg->dr_mode; if (dwc2_hw_is_device(hsotg)) { if (IS_ENABLED(CONFIG_USB_DWC2_HOST)) { dev_err(hsotg->dev, "Controller does not support host mode.\n"); return -EINVAL; } mode = USB_DR_MODE_PERIPHERAL; } else if (dwc2_hw_is_host(hsotg)) { if (IS_ENABLED(CONFIG_USB_DWC2_PERIPHERAL)) { dev_err(hsotg->dev, "Controller does not support device mode.\n"); return -EINVAL; } mode = USB_DR_MODE_HOST; } else { if (IS_ENABLED(CONFIG_USB_DWC2_HOST)) mode = USB_DR_MODE_HOST; else if (IS_ENABLED(CONFIG_USB_DWC2_PERIPHERAL)) mode = USB_DR_MODE_PERIPHERAL; } if (mode != hsotg->dr_mode) { dev_warn(hsotg->dev, "Configuration mismatch. dr_mode forced to %s\n", mode == USB_DR_MODE_HOST ? "host" : "device"); hsotg->dr_mode = mode; } return 0; } static int __dwc2_lowlevel_hw_enable(struct dwc2_hsotg *hsotg) { struct platform_device *pdev = to_platform_device(hsotg->dev); int ret; ret = regulator_bulk_enable(ARRAY_SIZE(hsotg->supplies), hsotg->supplies); if (ret) return ret; if (hsotg->clk) { ret = clk_prepare_enable(hsotg->clk); if (ret) return ret; } if (hsotg->uphy) { ret = usb_phy_init(hsotg->uphy); } else if (hsotg->plat && hsotg->plat->phy_init) { ret = hsotg->plat->phy_init(pdev, hsotg->plat->phy_type); } else { ret = phy_power_on(hsotg->phy); if (ret == 0) ret = phy_init(hsotg->phy); } return ret; } /** * dwc2_lowlevel_hw_enable - enable platform lowlevel hw resources * @hsotg: The driver state * * A wrapper for platform code responsible for controlling * low-level USB platform resources (phy, clock, regulators) */ int dwc2_lowlevel_hw_enable(struct dwc2_hsotg *hsotg) { int ret = __dwc2_lowlevel_hw_enable(hsotg); if (ret == 0) hsotg->ll_hw_enabled = true; return ret; } static int __dwc2_lowlevel_hw_disable(struct dwc2_hsotg *hsotg) { struct platform_device *pdev = to_platform_device(hsotg->dev); int ret = 0; if (hsotg->uphy) { usb_phy_shutdown(hsotg->uphy); } else if (hsotg->plat && hsotg->plat->phy_exit) { ret = hsotg->plat->phy_exit(pdev, hsotg->plat->phy_type); } else { ret = phy_exit(hsotg->phy); if (ret == 0) ret = phy_power_off(hsotg->phy); } if (ret) return ret; if (hsotg->clk) clk_disable_unprepare(hsotg->clk); ret = regulator_bulk_disable(ARRAY_SIZE(hsotg->supplies), hsotg->supplies); return ret; } /** * dwc2_lowlevel_hw_disable - disable platform lowlevel hw resources * @hsotg: The driver state * * A wrapper for platform code responsible for controlling * low-level USB platform resources (phy, clock, regulators) */ int dwc2_lowlevel_hw_disable(struct dwc2_hsotg *hsotg) { int ret = __dwc2_lowlevel_hw_disable(hsotg); if (ret == 0) hsotg->ll_hw_enabled = false; return ret; } static int dwc2_lowlevel_hw_init(struct dwc2_hsotg *hsotg) { int i, ret; hsotg->reset = devm_reset_control_get_optional(hsotg->dev, "dwc2"); if (IS_ERR(hsotg->reset)) { ret = PTR_ERR(hsotg->reset); dev_err(hsotg->dev, "error getting reset control %d\n", ret); return ret; } reset_control_deassert(hsotg->reset); /* Set default UTMI width */ hsotg->phyif = GUSBCFG_PHYIF16; /* * Attempt to find a generic PHY, then look for an old style * USB PHY and then fall back to pdata */ hsotg->phy = devm_phy_get(hsotg->dev, "usb2-phy"); if (IS_ERR(hsotg->phy)) { ret = PTR_ERR(hsotg->phy); switch (ret) { case -ENODEV: case -ENOSYS: hsotg->phy = NULL; break; case -EPROBE_DEFER: return ret; default: dev_err(hsotg->dev, "error getting phy %d\n", ret); return ret; } } if (!hsotg->phy) { hsotg->uphy = devm_usb_get_phy(hsotg->dev, USB_PHY_TYPE_USB2); if (IS_ERR(hsotg->uphy)) { ret = PTR_ERR(hsotg->uphy); switch (ret) { case -ENODEV: case -ENXIO: hsotg->uphy = NULL; break; case -EPROBE_DEFER: return ret; default: dev_err(hsotg->dev, "error getting usb phy %d\n", ret); return ret; } } } hsotg->plat = dev_get_platdata(hsotg->dev); if (hsotg->phy) { /* * If using the generic PHY framework, check if the PHY bus * width is 8-bit and set the phyif appropriately. */ if (phy_get_bus_width(hsotg->phy) == 8) hsotg->phyif = GUSBCFG_PHYIF8; } /* Clock */ hsotg->clk = devm_clk_get(hsotg->dev, "otg"); if (IS_ERR(hsotg->clk)) { hsotg->clk = NULL; dev_dbg(hsotg->dev, "cannot get otg clock\n"); } /* Regulators */ for (i = 0; i < ARRAY_SIZE(hsotg->supplies); i++) hsotg->supplies[i].supply = dwc2_hsotg_supply_names[i]; ret = devm_regulator_bulk_get(hsotg->dev, ARRAY_SIZE(hsotg->supplies), hsotg->supplies); if (ret) { dev_err(hsotg->dev, "failed to request supplies: %d\n", ret); return ret; } return 0; } /** * dwc2_driver_remove() - Called when the DWC_otg core is unregistered with the * DWC_otg driver * * @dev: Platform device * * This routine is called, for example, when the rmmod command is executed. The * device may or may not be electrically present. If it is present, the driver * stops device processing. Any resources used on behalf of this device are * freed. */ static int dwc2_driver_remove(struct platform_device *dev) { struct dwc2_hsotg *hsotg = platform_get_drvdata(dev); dwc2_debugfs_exit(hsotg); if (hsotg->hcd_enabled) dwc2_hcd_remove(hsotg); if (hsotg->gadget_enabled) dwc2_hsotg_remove(hsotg); if (hsotg->ll_hw_enabled) dwc2_lowlevel_hw_disable(hsotg); reset_control_assert(hsotg->reset); return 0; } /** * dwc2_driver_shutdown() - Called on device shutdown * * @dev: Platform device * * In specific conditions (involving usb hubs) dwc2 devices can create a * lot of interrupts, even to the point of overwhelming devices running * at low frequencies. Some devices need to do special clock handling * at shutdown-time which may bring the system clock below the threshold * of being able to handle the dwc2 interrupts. Disabling dwc2-irqs * prevents reboots/poweroffs from getting stuck in such cases. */ static void dwc2_driver_shutdown(struct platform_device *dev) { struct dwc2_hsotg *hsotg = platform_get_drvdata(dev); disable_irq(hsotg->irq); } /** * dwc2_driver_probe() - Called when the DWC_otg core is bound to the DWC_otg * driver * * @dev: Platform device * * This routine creates the driver components required to control the device * (core, HCD, and PCD) and initializes the device. The driver components are * stored in a dwc2_hsotg structure. A reference to the dwc2_hsotg is saved * in the device private data. This allows the driver to access the dwc2_hsotg * structure on subsequent calls to driver methods for this device. */ static int dwc2_driver_probe(struct platform_device *dev) { struct dwc2_hsotg *hsotg; struct resource *res; int retval; hsotg = devm_kzalloc(&dev->dev, sizeof(*hsotg), GFP_KERNEL); if (!hsotg) return -ENOMEM; hsotg->dev = &dev->dev; /* * Use reasonable defaults so platforms don't have to provide these. */ if (!dev->dev.dma_mask) dev->dev.dma_mask = &dev->dev.coherent_dma_mask; retval = dma_set_coherent_mask(&dev->dev, DMA_BIT_MASK(32)); if (retval) return retval; res = platform_get_resource(dev, IORESOURCE_MEM, 0); hsotg->regs = devm_ioremap_resource(&dev->dev, res); if (IS_ERR(hsotg->regs)) return PTR_ERR(hsotg->regs); dev_dbg(&dev->dev, "mapped PA %08lx to VA %p\n", (unsigned long)res->start, hsotg->regs); retval = dwc2_lowlevel_hw_init(hsotg); if (retval) return retval; spin_lock_init(&hsotg->lock); hsotg->irq = platform_get_irq(dev, 0); if (hsotg->irq < 0) { dev_err(&dev->dev, "missing IRQ resource\n"); return hsotg->irq; } dev_dbg(hsotg->dev, "registering common handler for irq%d\n", hsotg->irq); retval = devm_request_irq(hsotg->dev, hsotg->irq, dwc2_handle_common_intr, IRQF_SHARED, dev_name(hsotg->dev), hsotg); if (retval) return retval; retval = dwc2_lowlevel_hw_enable(hsotg); if (retval) return retval; retval = dwc2_get_dr_mode(hsotg); if (retval) goto error; /* * Reset before dwc2_get_hwparams() then it could get power-on real * reset value form registers. */ dwc2_core_reset_and_force_dr_mode(hsotg); /* Detect config values from hardware */ retval = dwc2_get_hwparams(hsotg); if (retval) goto error; dwc2_force_dr_mode(hsotg); retval = dwc2_init_params(hsotg); if (retval) goto error; if (hsotg->dr_mode != USB_DR_MODE_HOST) { retval = dwc2_gadget_init(hsotg, hsotg->irq); if (retval) goto error; hsotg->gadget_enabled = 1; } if (hsotg->dr_mode != USB_DR_MODE_PERIPHERAL) { retval = dwc2_hcd_init(hsotg); if (retval) { if (hsotg->gadget_enabled) dwc2_hsotg_remove(hsotg); goto error; } hsotg->hcd_enabled = 1; } platform_set_drvdata(dev, hsotg); dwc2_debugfs_init(hsotg); /* Gadget code manages lowlevel hw on its own */ if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) dwc2_lowlevel_hw_disable(hsotg); return 0; error: dwc2_lowlevel_hw_disable(hsotg); return retval; } static int __maybe_unused dwc2_suspend(struct device *dev) { struct dwc2_hsotg *dwc2 = dev_get_drvdata(dev); int ret = 0; if (dwc2_is_device_mode(dwc2)) dwc2_hsotg_suspend(dwc2); if (dwc2->ll_hw_enabled) ret = __dwc2_lowlevel_hw_disable(dwc2); return ret; } static int __maybe_unused dwc2_resume(struct device *dev) { struct dwc2_hsotg *dwc2 = dev_get_drvdata(dev); int ret = 0; if (dwc2->ll_hw_enabled) { ret = __dwc2_lowlevel_hw_enable(dwc2); if (ret) return ret; } if (dwc2_is_device_mode(dwc2)) ret = dwc2_hsotg_resume(dwc2); return ret; } static const struct dev_pm_ops dwc2_dev_pm_ops = { SET_SYSTEM_SLEEP_PM_OPS(dwc2_suspend, dwc2_resume) }; static struct platform_driver dwc2_platform_driver = { .driver = { .name = dwc2_driver_name, .of_match_table = dwc2_of_match_table, .pm = &dwc2_dev_pm_ops, }, .probe = dwc2_driver_probe, .remove = dwc2_driver_remove, .shutdown = dwc2_driver_shutdown, }; module_platform_driver(dwc2_platform_driver); MODULE_DESCRIPTION("DESIGNWARE HS OTG Platform Glue"); MODULE_AUTHOR("Matthijs Kooijman <[email protected]>"); MODULE_LICENSE("Dual BSD/GPL");
{ "pile_set_name": "Github" }
/* Firewall Builder Copyright (C) 2003 NetCitadel, LLC Author: Vadim Kurland [email protected] $Id$ This program is free software which we release under the GNU General Public License. You may redistribute and/or modify this program under the terms of that license as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. To get a copy of the GNU General Public License, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef __TCPSERVICEDIALOG_H_ #define __TCPSERVICEDIALOG_H_ #include <ui_tcpservicedialog_q.h> #include "BaseObjectDialog.h" #include <QWidget> #include "fwbuilder/FWObject.h" class ProjectPanel; class TCPServiceDialog : public BaseObjectDialog { Q_OBJECT; Ui::TCPServiceDialog_q *m_dialog; public: TCPServiceDialog(QWidget *parent); ~TCPServiceDialog(); public slots: virtual void applyChanges(); virtual void loadFWObject(libfwbuilder::FWObject *obj); virtual void validate(bool*); virtual void toggleEstablished(); }; #endif // TCPSERVICEDIALOG_H
{ "pile_set_name": "Github" }
// Regions @import "compass/support"; // The prefixed support threshold for css regions. // Defaults to the $graceful-usage-threshold. $regions-support-threshold: $graceful-usage-threshold !default; // Webkit, IE10 and future support for [CSS Regions](http://dev.w3.org/csswg/css3-regions/) // // $target is a value you use to link two regions of your css. // Give the source of your content the flow-into property, // and give your target container the flow-from property. // // For a visual explanation, see the diagrams at Chris Coyier's // [CSS-Tricks](http://css-tricks.com/content-folding/) @mixin flow-into($target) { $target: unquote($target); @include prefixed-properties(css-regions, $regions-support-threshold, (flow-into: $target)); } @mixin flow-from($target) { $target: unquote($target); @include prefixed-properties(css-regions, $regions-support-threshold, (flow-from: $target)); }
{ "pile_set_name": "Github" }
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * @category Zend * @package Zend_Oauth * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: Request.php 20217 2010-01-12 16:01:57Z matthew $ */ /** Zend_Oauth_Token */ require_once 'Zend/Oauth/Token.php'; /** * @category Zend * @package Zend_Oauth * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Oauth_Token_Request extends Zend_Oauth_Token { /** * Constructor * * @param null|Zend_Http_Response $response * @param null|Zend_Oauth_Http_Utility $utility */ public function __construct( Zend_Http_Response $response = null, Zend_Oauth_Http_Utility $utility = null ) { parent::__construct($response, $utility); // detect if server supports OAuth 1.0a if (isset($this->_params[Zend_Oauth_Token::TOKEN_PARAM_CALLBACK_CONFIRMED])) { Zend_Oauth_Client::$supportsRevisionA = true; } } }
{ "pile_set_name": "Github" }
### OpenStreetMap setup ![13](images/13.png)
{ "pile_set_name": "Github" }
type t = { title: string, date: string, slug: string, author: string, }; let fromJs: ResourceIo.postShallow => t; let toJs: t => ResourceIo.postShallow; let query: unit => Future.t(Belt.Result.t(array(t), Errors.t));
{ "pile_set_name": "Github" }
/*! * Cropper v3.1.6 * https://github.com/fengyuanchen/cropper * * Copyright (c) 2014-2018 Chen Fengyuan * Released under the MIT license * * Date: 2018-03-01T13:33:39.581Z */ .cropper-container { direction: ltr; font-size: 0; line-height: 0; position: relative; -ms-touch-action: none; touch-action: none; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; } .cropper-container img {/*Avoid margin top issue (Occur only when margin-top <= -height) */ display: block; height: 100%; image-orientation: 0deg; max-height: none !important; max-width: none !important; min-height: 0 !important; min-width: 0 !important; width: 100%; } .cropper-wrap-box, .cropper-canvas, .cropper-drag-box, .cropper-crop-box, .cropper-modal { bottom: 0; left: 0; position: absolute; right: 0; top: 0; } .cropper-wrap-box, .cropper-canvas { overflow: hidden; } .cropper-drag-box { background-color: #fff; opacity: 0; } .cropper-modal { background-color: #000; opacity: .5; } .cropper-view-box { display: block; height: 100%; outline-color: rgba(51, 153, 255, 0.75); outline: 1px solid #39f; overflow: hidden; width: 100%; } .cropper-dashed { border: 0 dashed #eee; display: block; opacity: .5; position: absolute; } .cropper-dashed.dashed-h { border-bottom-width: 1px; border-top-width: 1px; height: 33.33333%; left: 0; top: 33.33333%; width: 100%; } .cropper-dashed.dashed-v { border-left-width: 1px; border-right-width: 1px; height: 100%; left: 33.33333%; top: 0; width: 33.33333%; } .cropper-center { display: block; height: 0; left: 50%; opacity: .75; position: absolute; top: 50%; width: 0; } .cropper-center:before, .cropper-center:after { background-color: #eee; content: ' '; display: block; position: absolute; } .cropper-center:before { height: 1px; left: -3px; top: 0; width: 7px; } .cropper-center:after { height: 7px; left: 0; top: -3px; width: 1px; } .cropper-face, .cropper-line, .cropper-point { display: block; height: 100%; opacity: .1; position: absolute; width: 100%; } .cropper-face { background-color: #fff; left: 0; top: 0; } .cropper-line { background-color: #39f; } .cropper-line.line-e { cursor: ew-resize; right: -3px; top: 0; width: 5px; } .cropper-line.line-n { cursor: ns-resize; height: 5px; left: 0; top: -3px; } .cropper-line.line-w { cursor: ew-resize; left: -3px; top: 0; width: 5px; } .cropper-line.line-s { bottom: -3px; cursor: ns-resize; height: 5px; left: 0; } .cropper-point { background-color: #39f; height: 5px; opacity: .75; width: 5px; } .cropper-point.point-e { cursor: ew-resize; margin-top: -3px; right: -3px; top: 50%; } .cropper-point.point-n { cursor: ns-resize; left: 50%; margin-left: -3px; top: -3px; } .cropper-point.point-w { cursor: ew-resize; left: -3px; margin-top: -3px; top: 50%; } .cropper-point.point-s { bottom: -3px; cursor: s-resize; left: 50%; margin-left: -3px; } .cropper-point.point-ne { cursor: nesw-resize; right: -3px; top: -3px; } .cropper-point.point-nw { cursor: nwse-resize; left: -3px; top: -3px; } .cropper-point.point-sw { bottom: -3px; cursor: nesw-resize; left: -3px; } .cropper-point.point-se { bottom: -3px; cursor: nwse-resize; height: 20px; opacity: 1; right: -3px; width: 20px; } @media (min-width: 768px) { .cropper-point.point-se { height: 15px; width: 15px; } } @media (min-width: 992px) { .cropper-point.point-se { height: 10px; width: 10px; } } @media (min-width: 1200px) { .cropper-point.point-se { height: 5px; opacity: .75; width: 5px; } } .cropper-point.point-se:before { background-color: #39f; bottom: -50%; content: ' '; display: block; height: 200%; opacity: 0; position: absolute; right: -50%; width: 200%; } .cropper-invisible { opacity: 0; } .cropper-bg { background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAQMAAAAlPW0iAAAAA3NCSVQICAjb4U/gAAAABlBMVEXMzMz////TjRV2AAAACXBIWXMAAArrAAAK6wGCiw1aAAAAHHRFWHRTb2Z0d2FyZQBBZG9iZSBGaXJld29ya3MgQ1M26LyyjAAAABFJREFUCJlj+M/AgBVhF/0PAH6/D/HkDxOGAAAAAElFTkSuQmCC'); } .cropper-hide { display: block; height: 0; position: absolute; width: 0; } .cropper-hidden { display: none !important; } .cropper-move { cursor: move; } .cropper-crop { cursor: crosshair; } .cropper-disabled .cropper-drag-box, .cropper-disabled .cropper-face, .cropper-disabled .cropper-line, .cropper-disabled .cropper-point { cursor: not-allowed; }
{ "pile_set_name": "Github" }
{ "author": { "name": "Isaac Z. Schlueter", "email": "[email protected]", "url": "http://blog.izs.me/" }, "name": "npmlog", "description": "logger for npm", "version": "0.0.6", "repository": { "type": "git", "url": "git://github.com/isaacs/npmlog.git" }, "main": "log.js", "scripts": { "test": "tap test/*.js" }, "dependencies": { "ansi": "~0.2.1" }, "devDependencies": { "tap": "" }, "license": "BSD", "readme": "# npmlog\n\nThe logger util that npm uses.\n\nThis logger is very basic. It does the logging for npm. It supports\ncustom levels and colored output.\n\nBy default, logs are written to stderr. If you want to send log messages\nto outputs other than streams, then you can change the `log.stream`\nmember, or you can just listen to the events that it emits, and do\nwhatever you want with them.\n\n# Basic Usage\n\n```\nvar log = require('npmlog')\n\n// additional stuff ---------------------------+\n// message ----------+ |\n// prefix ----+ | |\n// level -+ | | |\n// v v v v\n log.info('fyi', 'I have a kitty cat: %j', myKittyCat)\n```\n\n## log.level\n\n* {String}\n\nThe level to display logs at. Any logs at or above this level will be\ndisplayed. The special level `silent` will prevent anything from being\ndisplayed ever.\n\n## log.record\n\n* {Array}\n\nAn array of all the log messages that have been entered.\n\n## log.maxRecordSize\n\n* {Number}\n\nThe maximum number of records to keep. If log.record gets bigger than\n10% over this value, then it is sliced down to 90% of this value.\n\nThe reason for the 10% window is so that it doesn't have to resize a\nlarge array on every log entry.\n\n## log.prefixStyle\n\n* {Object}\n\nA style object that specifies how prefixes are styled. (See below)\n\n## log.headingStyle\n\n* {Object}\n\nA style object that specifies how the heading is styled. (See below)\n\n## log.heading\n\n* {String} Default: \"\"\n\nIf set, a heading that is printed at the start of every line.\n\n## log.stream\n\n* {Stream} Default: `process.stderr`\n\nThe stream where output is written.\n\n## log.enableColor()\n\nForce colors to be used on all messages, regardless of the output\nstream.\n\n## log.disableColor()\n\nDisable colors on all messages.\n\n## log.pause()\n\nStop emitting messages to the stream, but do not drop them.\n\n## log.resume()\n\nEmit all buffered messages that were written while paused.\n\n## log.log(level, prefix, message, ...)\n\n* `level` {String} The level to emit the message at\n* `prefix` {String} A string prefix. Set to \"\" to skip.\n* `message...` Arguments to `util.format`\n\nEmit a log message at the specified level.\n\n## log\\[level](prefix, message, ...)\n\nFor example,\n\n* log.silly(prefix, message, ...)\n* log.verbose(prefix, message, ...)\n* log.info(prefix, message, ...)\n* log.http(prefix, message, ...)\n* log.warn(prefix, message, ...)\n* log.error(prefix, message, ...)\n\nLike `log.log(level, prefix, message, ...)`. In this way, each level is\ngiven a shorthand, so you can do `log.info(prefix, message)`.\n\n## log.addLevel(level, n, style, disp)\n\n* `level` {String} Level indicator\n* `n` {Number} The numeric level\n* `style` {Object} Object with fg, bg, inverse, etc.\n* `disp` {String} Optional replacement for `level` in the output.\n\nSets up a new level with a shorthand function and so forth.\n\nNote that if the number is `Infinity`, then setting the level to that\nwill cause all log messages to be suppressed. If the number is\n`-Infinity`, then the only way to show it is to enable all log messages.\n\n# Events\n\nEvents are all emitted with the message object.\n\n* `log` Emitted for all messages\n* `log.<level>` Emitted for all messages with the `<level>` level.\n* `<prefix>` Messages with prefixes also emit their prefix as an event.\n\n# Style Objects\n\nStyle objects can have the following fields:\n\n* `fg` {String} Color for the foreground text\n* `bg` {String} Color for the background\n* `bold`, `inverse`, `underline` {Boolean} Set the associated property\n* `bell` {Boolean} Make a noise (This is pretty annoying, probably.)\n\n# Message Objects\n\nEvery log event is emitted with a message object, and the `log.record`\nlist contains all of them that have been created. They have the\nfollowing fields:\n\n* `id` {Number}\n* `level` {String}\n* `prefix` {String}\n* `message` {String} Result of `util.format()`\n* `messageRaw` {Array} Arguments to `util.format()`\n", "readmeFilename": "README.md", "bugs": { "url": "https://github.com/isaacs/npmlog/issues" }, "_id": "[email protected]", "_from": "npmlog@latest" }
{ "pile_set_name": "Github" }
--- ms.topic: include author: shashankbarsin ms.author: shasb ms.date: 12/17/2019 ms.prod: devops ms.technology: devops-cicd-tasks --- ```YAML # Go # Get, build, or test a Go application, or run a custom Go command - task: Go@0 inputs: #command: 'get' # Options: get, build, test, custom #customCommand: # Required when command == Custom #arguments: # Optional workingDirectory: ```
{ "pile_set_name": "Github" }
{ "name": "vuepress-plugin-typedoc", "version": "0.1.0", "description": "A VuePress plugin to build api documentation with TypeDoc.", "main": "dist/index.js", "files": [ "dist/" ], "bugs": { "url": "https://github.com/tgreyuk/typedoc-plugin-markdown/issues" }, "repository": { "type": "git", "url": "git+https://github.com/tgreyuk/typedoc-plugin-markdown.git", "directory": "packages/vuepress-plugin-typedoc" }, "homepage": "https://github.com/tgreyuk/typedoc-plugin-markdown/tree/master/packages/vuepress-plugin-typedoc", "peerDependencies": { "typedoc": ">=0.19.0", "typedoc-plugin-markdown": ">=3.0.0" }, "dependencies": {}, "devDependencies": { "typedoc": "^0.19.1", "typedoc-plugin-markdown": "*", "typescript": "^4.0.2" }, "scripts": { "lint": "eslint ./src --ext .ts", "prepublishOnly": "npm run lint && npm run build && npm run test", "build": "rm -rf ./dist && tsc && copyfiles --up 1 ./src/**/*.hbs ./dist/", "demo:start": "npm run build && cd demo && npm run dev", "demo:build": "npm run build && cd demo && npm run build", "demo:init": "rm -rf demo && npx create-vuepress demo", "test": "jest --colors", "build-and-test": "npm run build && npm run test" }, "author": "Thomas Grey", "license": "MIT", "keywords": [ "vuepress", "typedoc", "plugin", "markdown", "typescript", "api" ] }
{ "pile_set_name": "Github" }
/***************************************************************************** * Copyright (C) 2006-2011 by Mikhail V. Zinin * * [email protected] * * * * You may redistribute this file under the terms of the GNU General * * Public License as published by the Free Software Foundation, either * * version 2 of the License, or any later version. * *****************************************************************************/ #ifndef BIBASIS_INVOLUTIVE_HPP #define BIBASIS_INVOLUTIVE_HPP #include <list> #include <algorithm> #include "pcomparator.hpp" #include "qset.hpp" #include "tset.hpp" #include "matrix.hpp" #include "matrix-con.hpp" namespace BIBasis { template <typename MonomType> class BooleanInvolutiveBasis { private: std::list<Polynom<MonomType>*> GBasis; TSet<MonomType> IntermediateBasis; QSet<MonomType> ProlongationsSet; const PolynomialRing* const PRing; public: BooleanInvolutiveBasis(const Matrix* matrix, bool toGroebner); ~BooleanInvolutiveBasis(); const Polynom<MonomType>& operator[](int number) const; unsigned Length() const; const Matrix* ToMatrix() const; private: void FillInitialSet(const Matrix* matrix, std::list<Polynom<MonomType>*>& initialSet) const; Polynom<MonomType>* NormalForm(const Triple<MonomType>* triple) const; const Polynom<MonomType>* FindDivisor(const Polynom<MonomType>* polynom , const std::list<Polynom<MonomType>*>& set , bool toGroebner) const; Polynom<MonomType>* Reduce(Polynom<MonomType>* polynom , const std::list<Polynom<MonomType>*>& set , bool toGroebner) const; void ReduceSet(bool toGroebner); void Reset(); void Construct(const std::list<Polynom<MonomType>*>& set, bool toGroebner); void ConstructInvolutiveBasis(); }; template <typename MonomType> BooleanInvolutiveBasis<MonomType>::BooleanInvolutiveBasis(const Matrix* matrix, bool toGroebner) : GBasis() , IntermediateBasis() , ProlongationsSet() , PRing(matrix->get_ring()->cast_to_PolynomialRing()) { try { std::list<Polynom<MonomType>*> initialSet; FillInitialSet(matrix, initialSet); Construct(initialSet, toGroebner); initialSet.clear(); } catch(std::string& errorString) { ERROR(errorString.c_str()); Reset(); } catch(...) { ERROR("BIBasis::BooleanInvolutiveBasis::BooleanInvolutiveBasis(): unknown error."); Reset(); } } template <typename MonomType> BooleanInvolutiveBasis<MonomType>::~BooleanInvolutiveBasis() { Reset(); } template <typename MonomType> const Polynom<MonomType>& BooleanInvolutiveBasis<MonomType>::operator[](int num) const { typename std::list<Polynom<MonomType>*>::const_iterator it(GBasis.begin()); for (unsigned i = Length()-1-num; i > 0; i--) { ++it; } return **it; } template <typename MonomType> unsigned BooleanInvolutiveBasis<MonomType>::Length() const { return GBasis.size(); } template <typename MonomType> const Matrix* BooleanInvolutiveBasis<MonomType>::ToMatrix() const { MatrixConstructor matrixConstructor(PRing->make_FreeModule(1), 0); const Monoid* monoid = PRing->getMonoid(); const ring_elem coefficientUnit = PRing->getCoefficients()->one(); monomial tmpRingMonomial = monoid->make_one(); for (typename std::list<Polynom<MonomType>*>::const_iterator currentPolynom = GBasis.begin(); currentPolynom != GBasis.end(); ++currentPolynom) { if (!*currentPolynom) { continue; } ring_elem currentRingPolynomial; for (const MonomType* currentMonom = &(**currentPolynom).Lm(); currentMonom; currentMonom = currentMonom->Next) { exponents currentExponent = newarray_atomic_clear(int, MonomType::GetDimIndepend()); typename std::set<typename MonomType::Integer> variablesSet = currentMonom->GetVariablesSet(); for (typename std::set<typename MonomType::Integer>::const_iterator currentVariable = variablesSet.begin(); currentVariable != variablesSet.end(); ++currentVariable) { currentExponent[*currentVariable] = 1; } monoid->from_expvector(currentExponent, tmpRingMonomial); deletearray(currentExponent); ring_elem tmpRingPolynomial = PRing->make_flat_term(coefficientUnit, tmpRingMonomial); PRing->add_to(currentRingPolynomial, tmpRingPolynomial); } matrixConstructor.append(PRing->make_vec(0, currentRingPolynomial)); } return matrixConstructor.to_matrix(); } template <typename MonomType> void BooleanInvolutiveBasis<MonomType>::FillInitialSet(const Matrix* matrix, std::list<Polynom<MonomType>*>& initialSet) const { const Monoid* monoid = PRing->getMonoid(); typename MonomType::Integer independ = MonomType::GetDimIndepend(); //construct Polynom for every column in matrix for (int column = 0; column < matrix->n_cols(); ++column) { vec polynomVector = matrix->elem(column); if (!polynomVector) { continue; } Polynom<MonomType>* currentPolynom = new Polynom<MonomType>(); for (Nterm* currentTerm = polynomVector->coeff; currentTerm; currentTerm = currentTerm->next) { exponents monomVector = newarray_atomic(int, independ); monoid->to_expvector(currentTerm->monom, monomVector); //construct Monom for every term MonomType* currentMonom = new MonomType(); if (!currentMonom) { deletearray(monomVector); throw std::string("BIBasis::BooleanInvolutiveBasis::FillInitialSet(): got NULL istead of new monom."); } for (typename MonomType::Integer currentVariable = 0; currentVariable < independ; ++currentVariable) { if (monomVector[currentVariable]) { *currentMonom *= currentVariable; } } *currentPolynom += *currentMonom; deletearray(monomVector); delete currentMonom; } initialSet.push_back(currentPolynom); } } template <typename MonomType> Polynom<MonomType>* BooleanInvolutiveBasis<MonomType>::NormalForm(const Triple<MonomType>* triple) const { /* As far as currentTriple can't be 0 (checked in QSET and TSET), * no need to check for NULL pointer. */ const Triple<MonomType>* involutiveDivisor = 0; Polynom<MonomType>* originalForm = 0; Polynom<MonomType>* normalForm = new Polynom<MonomType>(); if (triple->GetVariable() == -1) { originalForm = new Polynom<MonomType>(*triple->GetPolynom()); } else { originalForm = new Polynom<MonomType>(*triple->GetWeakAncestor()->GetPolynom()); (*originalForm) *= triple->GetVariable(); } while (!originalForm->IsZero()) { involutiveDivisor = IntermediateBasis.Find(originalForm->Lm()); while (involutiveDivisor) { originalForm->HeadReduction(*involutiveDivisor->GetPolynom()); if (!originalForm->IsZero()) { involutiveDivisor = IntermediateBasis.Find(originalForm->Lm()); } else { involutiveDivisor = 0; } } if (!originalForm->IsZero()) { (*normalForm) += originalForm->Lm(); originalForm->RidOfLm(); } } delete originalForm; return normalForm; } template <typename MonomType> const Polynom<MonomType>* BooleanInvolutiveBasis<MonomType>::FindDivisor(const Polynom<MonomType>* polynom , const std::list<Polynom<MonomType>*>& set , bool toGroebner) const { if (!polynom || polynom->IsZero()) { return 0; } typename std::list<Polynom<MonomType>*>::const_iterator it(set.begin()), setEnd(set.end()); const MonomType& plm = polynom->Lm(); while (it != setEnd) { if (toGroebner && plm.IsDivisibleBy((**it).Lm())) { return *it; } else if (!toGroebner && plm.IsPommaretDivisibleBy((**it).Lm())) { return *it; } ++it; } return 0; } template <typename MonomType> Polynom<MonomType>* BooleanInvolutiveBasis<MonomType>::Reduce(Polynom<MonomType>* polynom , const std::list<Polynom<MonomType>*>& set , bool toGroebner) const { if (!polynom) { return 0; } Polynom<MonomType>* result = new Polynom<MonomType>(); const Polynom<MonomType>* currentReducer = 0; while (!polynom->IsZero()) { currentReducer = FindDivisor(polynom, set, toGroebner); while (currentReducer) { polynom->Reduction(*currentReducer); currentReducer = FindDivisor(polynom, set, toGroebner); } if (!polynom->IsZero()) { (*result) += polynom->Lm(); polynom->RidOfLm(); } } polynom = result; return result; } template <typename MonomType> void BooleanInvolutiveBasis<MonomType>::ReduceSet(bool toGroebner) { std::list<Polynom<MonomType>*> tmpPolySet; GBasis.sort(PointerMoreComparator<Polynom<MonomType> >()); while (!GBasis.empty()) { Polynom<MonomType>* currentPolynom = GBasis.front(); GBasis.pop_front(); currentPolynom = Reduce(currentPolynom, tmpPolySet, toGroebner); if (currentPolynom && !currentPolynom->IsZero()) { const MonomType& hLm = currentPolynom->Lm(); typename std::list<Polynom<MonomType>*>::iterator iteratorTmpPolySet = tmpPolySet.begin(); while (iteratorTmpPolySet != tmpPolySet.end()) { if ((**iteratorTmpPolySet).Lm().IsDivisibleBy(hLm)) { GBasis.push_back(*iteratorTmpPolySet); iteratorTmpPolySet = tmpPolySet.erase(iteratorTmpPolySet); } else { ++iteratorTmpPolySet; } } tmpPolySet.push_back(currentPolynom); } } unsigned tmpPolySetSize = static_cast<unsigned int>(tmpPolySet.size()); for (unsigned i = 0; i < tmpPolySetSize; ++i) { Polynom<MonomType>* currentPolynom = tmpPolySet.front(); tmpPolySet.pop_front(); currentPolynom = Reduce(currentPolynom, tmpPolySet, toGroebner); if (!currentPolynom || currentPolynom->IsZero()) { tmpPolySetSize--; } else { tmpPolySet.push_back(currentPolynom); } } GBasis = tmpPolySet; } template <typename MonomType> void BooleanInvolutiveBasis<MonomType>::Reset() { IntermediateBasis.Clear(); ProlongationsSet.Clear(); GBasis.clear(); } template <typename MonomType> void BooleanInvolutiveBasis<MonomType>::Construct(const std::list<Polynom<MonomType>*>& set, bool toGroebner) { Reset(); GBasis = set; ReduceSet(true); ProlongationsSet.Insert(GBasis); GBasis.clear(); ConstructInvolutiveBasis(); ProlongationsSet.Clear(); typename TSet<MonomType>::ConstIterator i2(IntermediateBasis.Begin()); while (i2 != IntermediateBasis.End()) { GBasis.push_back(const_cast<Polynom<MonomType>*>((**i2).GetPolynom())); ++i2; } ReduceSet(toGroebner); } template <typename MonomType> void BooleanInvolutiveBasis<MonomType>::ConstructInvolutiveBasis() { typename TSet<MonomType>::Iterator tit(IntermediateBasis.Begin()); Polynom<MonomType>* newNormalForm = 0; Triple<MonomType>* currentTriple = 0; while (!ProlongationsSet.Empty()) { currentTriple = ProlongationsSet.Get(); newNormalForm = NormalForm(currentTriple); /* As far as currentTriple can't be 0 (checked in QSET and TSET), * NormalForm can't return 0. */ std::set<typename MonomType::Integer> currentNmpSet; const Triple<MonomType>* currentAncestor = 0; if (!newNormalForm->IsZero() && newNormalForm->Lm() == currentTriple->GetPolynomLm()) { currentNmpSet = currentTriple->GetNmp(); currentAncestor = currentTriple->GetAncestor(); if (currentAncestor == currentTriple) { currentAncestor = 0; } } delete currentTriple; if (!newNormalForm->IsZero()) { std::list<Triple<MonomType>*> newProlongations; tit = IntermediateBasis.Begin(); while (tit != IntermediateBasis.End()) { if ((**tit).GetPolynomLm().IsTrueDivisibleBy(newNormalForm->Lm())) { ProlongationsSet.DeleteDescendants(*tit); newProlongations.push_back(*tit); tit = IntermediateBasis.Erase(tit); } else { ++tit; } } IntermediateBasis.PushBack(new Triple<MonomType>(newNormalForm, currentAncestor, currentNmpSet, 0, -1)); if (!newNormalForm->Degree()) { return; } IntermediateBasis.CollectNonMultiProlongations(--IntermediateBasis.End(), newProlongations); ProlongationsSet.Insert(newProlongations); } else { delete newNormalForm; } } } } #endif // BIBASIS_INVOLUTIVE_HPP
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- # # Copyright (C) 2009-2010 Pedro Algarvio <[email protected]> # # Basic plugin template created by: # Copyright (C) 2008 Martijn Voncken <[email protected]> # Copyright (C) 2007-2009 Andrew Resch <[email protected]> # Copyright (C) 2009 Damien Churchill <[email protected]> # # This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with # the additional special exception to link portions of this program with the OpenSSL library. # See LICENSE for more details. # from __future__ import unicode_literals import logging import smtplib from email.utils import formatdate from twisted.internet import defer, threads import deluge.configmanager from deluge import component from deluge.core.rpcserver import export from deluge.event import known_events from deluge.plugins.pluginbase import CorePluginBase from .common import CustomNotifications log = logging.getLogger(__name__) DEFAULT_PREFS = { 'smtp_enabled': False, 'smtp_host': '', 'smtp_port': 25, 'smtp_user': '', 'smtp_pass': '', 'smtp_from': '', 'smtp_tls': False, # SSL or TLS 'smtp_recipients': [], # Subscriptions 'subscriptions': {'email': []}, } class CoreNotifications(CustomNotifications): def __init__(self, plugin_name=None): CustomNotifications.__init__(self, plugin_name) def enable(self): CustomNotifications.enable(self) self.register_custom_email_notification( 'TorrentFinishedEvent', self._on_torrent_finished_event ) def disable(self): self.deregister_custom_email_notification('TorrentFinishedEvent') CustomNotifications.disable(self) def register_custom_email_notification(self, eventtype, handler): """This is used to register email notifications for custom event types. :param event: str, the event name :param handler: function, to be called when `:param:event` is emitted Your handler should return a tuple of (email_subject, email_contents). """ self._register_custom_provider('email', eventtype, handler) def deregister_custom_email_notification(self, eventtype): self._deregister_custom_provider('email', eventtype) def handle_custom_email_notification(self, result, eventtype): if not self.config['smtp_enabled']: return defer.succeed('SMTP notification not enabled.') subject, message = result log.debug( 'Spawning new thread to send email with subject: %s: %s', subject, message ) # Spawn thread because we don't want Deluge to lock up while we send the # email. return threads.deferToThread(self._notify_email, subject, message) def get_handled_events(self): handled_events = [] for evt in sorted(known_events): if known_events[evt].__module__.startswith('deluge.event'): if evt not in ('TorrentFinishedEvent',): # Skip all un-handled built-in events continue classdoc = known_events[evt].__doc__.strip() handled_events.append((evt, classdoc)) log.debug('Handled Notification Events: %s', handled_events) return handled_events def _notify_email(self, subject='', message=''): log.debug('Email prepared') to_addrs = self.config['smtp_recipients'] to_addrs_str = ', '.join(self.config['smtp_recipients']) headers_dict = { 'smtp_from': self.config['smtp_from'], 'subject': subject, 'smtp_recipients': to_addrs_str, 'date': formatdate(), } headers = ( """\ From: %(smtp_from)s To: %(smtp_recipients)s Subject: %(subject)s Date: %(date)s """ % headers_dict ) message = '\r\n'.join((headers + message).splitlines()) try: # Python 2.6 server = smtplib.SMTP( self.config['smtp_host'], self.config['smtp_port'], timeout=60 ) except Exception as ex: err_msg = _('There was an error sending the notification email: %s') % ex log.error(err_msg) return ex security_enabled = self.config['smtp_tls'] if security_enabled: server.ehlo() if 'starttls' not in server.esmtp_features: log.warning('TLS/SSL enabled but server does not support it') else: server.starttls() server.ehlo() if self.config['smtp_user'] and self.config['smtp_pass']: try: server.login(self.config['smtp_user'], self.config['smtp_pass']) except smtplib.SMTPHeloError as ex: err_msg = _('Server did not reply properly to HELO greeting: %s') % ex log.error(err_msg) return ex except smtplib.SMTPAuthenticationError as ex: err_msg = _('Server refused username/password combination: %s') % ex log.error(err_msg) return ex try: try: server.sendmail(self.config['smtp_from'], to_addrs, message) except smtplib.SMTPException as ex: err_msg = ( _('There was an error sending the notification email: %s') % ex ) log.error(err_msg) return ex finally: if security_enabled: # avoid false failure detection when the server closes # the SMTP connection with TLS enabled import socket try: server.quit() except socket.sslerror: pass else: server.quit() return _('Notification email sent.') def _on_torrent_finished_event(self, torrent_id): log.debug('Handler for TorrentFinishedEvent called for CORE') torrent = component.get('TorrentManager')[torrent_id] torrent_status = torrent.get_status({}) # Email subject = _('Finished Torrent "%(name)s"') % torrent_status message = ( _( 'This email is to inform you that Deluge has finished ' 'downloading "%(name)s", which includes %(num_files)i files.' '\nTo stop receiving these alerts, simply turn off email ' "notification in Deluge's preferences.\n\n" 'Thank you,\nDeluge.' ) % torrent_status ) return subject, message # d = defer.maybeDeferred(self.handle_custom_email_notification, # [subject, message], # 'TorrentFinishedEvent') # d.addCallback(self._on_notify_sucess, 'email') # d.addErrback(self._on_notify_failure, 'email') # return d class Core(CorePluginBase, CoreNotifications): def __init__(self, plugin_name): CorePluginBase.__init__(self, plugin_name) CoreNotifications.__init__(self) def enable(self): CoreNotifications.enable(self) self.config = deluge.configmanager.ConfigManager( 'notifications-core.conf', DEFAULT_PREFS ) log.debug('ENABLING CORE NOTIFICATIONS') def disable(self): log.debug('DISABLING CORE NOTIFICATIONS') CoreNotifications.disable(self) @export def set_config(self, config): """Sets the config dictionary.""" for key in config: self.config[key] = config[key] self.config.save() @export def get_config(self): """Returns the config dictionary.""" return self.config.config @export def get_handled_events(self): return CoreNotifications.get_handled_events(self)
{ "pile_set_name": "Github" }
{-# OPTIONS_GHC -F -pgmF hspec-discover #-}
{ "pile_set_name": "Github" }
# Building on Windows _This page was based originally on the COMPILE-WIN32.txt file traditionally provided with Schism Tracker sources. It has been rewriten with instructions that use newer tools._ ## Software needed To compile on Windows, the following things are needed: * mingw-gcc * Python * SDL headers and libs * An environment in which to run them, like msys. If you want proper version information, you'll need git installed and in your path too ## Installing MSYS2 and mingw These instructions describe how to install msys2, which includes all of the required packages. ### Get MSYS2 and install it Go to the URL http://msys2.github.io/ and download either the 32bit installer or the 64bit installer. The 32bit download can run on 32bit windows and the 64bit one requires a 64bit Windows. 64bit executables can be created with either of them. Once installed, follow these instructions to get up-to-date files. This process is also described in their web page, so in case of conflict, you might opt to follow their instructions. Run the MSYS2 shell (a start menu shortcut should have been created) and update the pacman package manager: pacman -Sy pacman Close the MSYS2 window. Run it again from the Start menu and update the system: pacman -Syu Close the MSYS2 window. Run it again from the Start menu _(note: the update process can, in some cases, break the start menu shortcut - in which case you may need to look in C:\msys64 (or wherever you installed msys) and run msys2\_shell.cmd)_ and update the rest with: pacman -Su ### Install the toolchains Once you have the shell environment ready, it's time to get the compilers. Execute the following command: pacman -S mingw-w64-i686-toolchain libtool autoconf automake make Also, you need the following specific dependency: pacman -S mingw-w64-i686-SDL If you also want to build for 64bits: pacman -S mingw-w64-x86_64-toolchain mingw-w64-x86_64-SDL You can search for packages with pacman -Ss package descripption ## Compilation MSYS2 installs three shortcut icons, one to run the msys shell, and two more that setup the environment to use either the 32bit compiler or the 64bit compiler. You can also start the 32bit compiler with msys2_shell.cmd -mingw32 and the 64bit compiler with msys2_shell.cmd -mingw64 ### Configure schismtracker to build Open the 32bit or 64bit shell depending on which version you want to build. The steps here only need to be done once, or when configure.ac or the installed package versions change. Go to schismtracker sources root (drive letters are mapped to /x , example C:/ is /c/, D:/ is /d/ ...) Reconfigure it: autoreconf -i _(note: if you get a "possibly undefined macro: AM\_PATH\_SDL" error, you're probably using the standard msys2 shell - either use the mingw start menu shortcuts, or start msys2_shell.cmd with either -mingw32 or -mingw64 as mentioned above)_ If you're planning to build both 32- and 64-bit binaries, you may wish to create the subdirs build32 and build64: mkdir build32 mkdir build64 and then follow the rest of the instructions twice, once with the -mingw32 shell in the build32 subdir, and once with the -mingw64 shell in the build64 subdir. Otherwise just build will do: mkdir build Now move into the build subdir and run the configure script: cd build # or build32 or build64 as appropriate ../configure ### Build and rebuild In order to build and run it, from the appropriate build subdir, run these: make ../schismtracker & ### Compilation problems The configure script should give hints on what is missing to compile. If you've followed the steps, everything should already be in the place, but in case it doesn't, see the config.log file, which reports a detailed output (more than what is seen on the screen) that could help identify what is missing, or which option is not working. ### Debugging When installing the toolchains, the gdb debugger is also installed. Run it from the win32 shell to debug the 32bit exe, or run it from the Win64 shell to debug the 64bit one. ## Prepare the distribution file To distribute the application, it is important to bundle the correct version of the SDL.dll file with the executable. For a 32bit build, the file is located in /msys2_path/mingw32/bin/SDL.dll For a 64bit build, the file is located in /msys2_path/mingw64/bin/SDL.dll The 32bit build also requires the files /msys2_path/mingw64/bin/libgcc_s_dw2-1.dll and /msys2_path/mingw64/bin/libwinpthread-1.dll If you want to reduce the exe size (removing the debugging information), use the following command: _(note: you MUST do this from the same shell than you used to build the executable, as the strip tool is architecture-dependent)_ strip -g schismtracker.exe ## SDL2 notes The current version of schismtracker uses SDL1, but a fork with SDL2 has been made here https://github.com/davvid/schismtracker/tree/laptop-octave In order to build that branch, installing the SDL2 packages AND pkg-config is needed pacman -S mingw-w64-i686-SDL2 mingw-w64-i686-SDL2_gfx mingw-w64-i686-pkg-config mingw-w64-x86_64-SDL2 mingw-w64-x86_64-SDL2_gfx mingw-w64-x86_64-pkg-config
{ "pile_set_name": "Github" }
// Copyright (c) 2006-2009 Nokia Corporation and/or its subsidiary(-ies). // All rights reserved. // This component and the accompanying materials are made available // under the terms of the License "Eclipse Public License v1.0" // which accompanies this distribution, and is available // at the URL "http://www.eclipse.org/legal/epl-v10.html". // // Initial Contributors: // Nokia Corporation - initial contribution. // // Contributors: // // Description: // f32test\server\t_findcapall.cpp // PlatSec compatibility test. Application capability is ALLFILES // Tests to confirm DEF088224 changes do not affect exisitng functionality // TFindFile should NEVER return KErrPermissionDenied for FindByDir requests // z:\sys\bin\t_findcaptestfile.txt is used for testing. // If the file is not avialable in the location, test will panic. // // #define __E32TEST_EXTENSION__ #include <e32test.h> #include <f32file.h> _LIT(KTestString,"t_findcapall"); LOCAL_C RTest test(KTestString); LOCAL_C RFs FileServer; LOCAL_C TInt TestFind(const TPtrC16 aTestDesc, const TPtrC16 aFPath, const TPtrC16 aFName) { TInt Err; test.Next(aTestDesc); TFindFile FindFile(FileServer); Err=FindFile.FindByDir(aFName,aFPath); return Err; } GLDEF_C TInt E32Main() { TInt Err; test.Title(); Err=FileServer.Connect(); test_KErrNone(Err); // RTest.Next is called from function "TestFind()". // RTest.Start is called here to start the test. test.Start(_L("Test Starts : Dummy Test")); // Test: Find non existing file in existing /sys folder // // Drive Name : Z: // Path : sys\bin // File Name : nonexistingfile.txt // // Expected return value: KErrNotFound Err=TestFind(_L("Drive specified & available Path exists File does not exist"), _L("z:\\sys\\bin\\"), _L("nonexistingfile.txt")); test_Value(Err, Err == KErrNotFound); // Test: Find existing file in existing /sys folder // // Drive Name : Z: // Path : sys\bin // File Name : t_findcaptestfile.txt // // Expected return value: KErrNone Err=TestFind(_L("Drive specified & available Path exists File exists"), _L("z:\\sys\\bin\\"), _L("t_findcaptestfile.txt")); test_KErrNone(Err); // Test: Find non existing file in existing / non existing /sys folder // // Drive Name : C: // Path : sys // File Name : nonexisting.txt // // Expected return value: KErrNotFound Err=TestFind(_L("Drive specified & available Path may exist File does not exist"), _L("c:\\sys\\"), _L("nonexisting.txt")); test_Value(Err, Err == KErrNotFound); // Test: Find existing file in /sys folder without specifying the path // // Drive Name : Not specified. // Path : sys\bin // File Name : t_findcaptestfile.txt // // Expected return value: KErrNone Err=TestFind(_L("Drive not specified Path exists File exists"), _L("\\sys\\bin\\"), _L("t_findcaptestfile.txt")); test_KErrNone(Err); // Test: Find non existing file in /sys folder without specifying the path // // Drive Name : Not specified // Path : sys // File Name : nonexistingfile.txt // // Expected return value: KErrNotFound Err=TestFind(_L("Drive not specified Path exists File does not exist"), _L("\\sys\\"), _L("nonexisting.txt")); test_Value(Err, Err == KErrNotFound); FileServer.Close(); test.Printf(_L("Test completed\n")); test.End(); test.Close(); return KErrNone; }
{ "pile_set_name": "Github" }
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build amd64,!gccgo,!appengine #include "textflag.h" DATA iv0<>+0x00(SB)/4, $0x6a09e667 DATA iv0<>+0x04(SB)/4, $0xbb67ae85 DATA iv0<>+0x08(SB)/4, $0x3c6ef372 DATA iv0<>+0x0c(SB)/4, $0xa54ff53a GLOBL iv0<>(SB), (NOPTR+RODATA), $16 DATA iv1<>+0x00(SB)/4, $0x510e527f DATA iv1<>+0x04(SB)/4, $0x9b05688c DATA iv1<>+0x08(SB)/4, $0x1f83d9ab DATA iv1<>+0x0c(SB)/4, $0x5be0cd19 GLOBL iv1<>(SB), (NOPTR+RODATA), $16 DATA rol16<>+0x00(SB)/8, $0x0504070601000302 DATA rol16<>+0x08(SB)/8, $0x0D0C0F0E09080B0A GLOBL rol16<>(SB), (NOPTR+RODATA), $16 DATA rol8<>+0x00(SB)/8, $0x0407060500030201 DATA rol8<>+0x08(SB)/8, $0x0C0F0E0D080B0A09 GLOBL rol8<>(SB), (NOPTR+RODATA), $16 DATA counter<>+0x00(SB)/8, $0x40 DATA counter<>+0x08(SB)/8, $0x0 GLOBL counter<>(SB), (NOPTR+RODATA), $16 #define ROTL_SSE2(n, t, v) \ MOVO v, t; \ PSLLL $n, t; \ PSRLL $(32-n), v; \ PXOR t, v #define ROTL_SSSE3(c, v) \ PSHUFB c, v #define ROUND_SSE2(v0, v1, v2, v3, m0, m1, m2, m3, t) \ PADDL m0, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSE2(16, t, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(20, t, v1); \ PADDL m1, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSE2(24, t, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(25, t, v1); \ PSHUFL $0x39, v1, v1; \ PSHUFL $0x4E, v2, v2; \ PSHUFL $0x93, v3, v3; \ PADDL m2, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSE2(16, t, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(20, t, v1); \ PADDL m3, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSE2(24, t, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(25, t, v1); \ PSHUFL $0x39, v3, v3; \ PSHUFL $0x4E, v2, v2; \ PSHUFL $0x93, v1, v1 #define ROUND_SSSE3(v0, v1, v2, v3, m0, m1, m2, m3, t, c16, c8) \ PADDL m0, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSSE3(c16, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(20, t, v1); \ PADDL m1, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSSE3(c8, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(25, t, v1); \ PSHUFL $0x39, v1, v1; \ PSHUFL $0x4E, v2, v2; \ PSHUFL $0x93, v3, v3; \ PADDL m2, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSSE3(c16, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(20, t, v1); \ PADDL m3, v0; \ PADDL v1, v0; \ PXOR v0, v3; \ ROTL_SSSE3(c8, v3); \ PADDL v3, v2; \ PXOR v2, v1; \ ROTL_SSE2(25, t, v1); \ PSHUFL $0x39, v3, v3; \ PSHUFL $0x4E, v2, v2; \ PSHUFL $0x93, v1, v1 #define LOAD_MSG_SSE4(m0, m1, m2, m3, src, i0, i1, i2, i3, i4, i5, i6, i7, i8, i9, i10, i11, i12, i13, i14, i15) \ MOVL i0*4(src), m0; \ PINSRD $1, i1*4(src), m0; \ PINSRD $2, i2*4(src), m0; \ PINSRD $3, i3*4(src), m0; \ MOVL i4*4(src), m1; \ PINSRD $1, i5*4(src), m1; \ PINSRD $2, i6*4(src), m1; \ PINSRD $3, i7*4(src), m1; \ MOVL i8*4(src), m2; \ PINSRD $1, i9*4(src), m2; \ PINSRD $2, i10*4(src), m2; \ PINSRD $3, i11*4(src), m2; \ MOVL i12*4(src), m3; \ PINSRD $1, i13*4(src), m3; \ PINSRD $2, i14*4(src), m3; \ PINSRD $3, i15*4(src), m3 #define PRECOMPUTE_MSG(dst, off, src, R8, R9, R10, R11, R12, R13, R14, R15) \ MOVQ 0*4(src), R8; \ MOVQ 2*4(src), R9; \ MOVQ 4*4(src), R10; \ MOVQ 6*4(src), R11; \ MOVQ 8*4(src), R12; \ MOVQ 10*4(src), R13; \ MOVQ 12*4(src), R14; \ MOVQ 14*4(src), R15; \ \ MOVL R8, 0*4+off+0(dst); \ MOVL R8, 9*4+off+64(dst); \ MOVL R8, 5*4+off+128(dst); \ MOVL R8, 14*4+off+192(dst); \ MOVL R8, 4*4+off+256(dst); \ MOVL R8, 2*4+off+320(dst); \ MOVL R8, 8*4+off+384(dst); \ MOVL R8, 12*4+off+448(dst); \ MOVL R8, 3*4+off+512(dst); \ MOVL R8, 15*4+off+576(dst); \ SHRQ $32, R8; \ MOVL R8, 4*4+off+0(dst); \ MOVL R8, 8*4+off+64(dst); \ MOVL R8, 14*4+off+128(dst); \ MOVL R8, 5*4+off+192(dst); \ MOVL R8, 12*4+off+256(dst); \ MOVL R8, 11*4+off+320(dst); \ MOVL R8, 1*4+off+384(dst); \ MOVL R8, 6*4+off+448(dst); \ MOVL R8, 10*4+off+512(dst); \ MOVL R8, 3*4+off+576(dst); \ \ MOVL R9, 1*4+off+0(dst); \ MOVL R9, 13*4+off+64(dst); \ MOVL R9, 6*4+off+128(dst); \ MOVL R9, 8*4+off+192(dst); \ MOVL R9, 2*4+off+256(dst); \ MOVL R9, 0*4+off+320(dst); \ MOVL R9, 14*4+off+384(dst); \ MOVL R9, 11*4+off+448(dst); \ MOVL R9, 12*4+off+512(dst); \ MOVL R9, 4*4+off+576(dst); \ SHRQ $32, R9; \ MOVL R9, 5*4+off+0(dst); \ MOVL R9, 15*4+off+64(dst); \ MOVL R9, 9*4+off+128(dst); \ MOVL R9, 1*4+off+192(dst); \ MOVL R9, 11*4+off+256(dst); \ MOVL R9, 7*4+off+320(dst); \ MOVL R9, 13*4+off+384(dst); \ MOVL R9, 3*4+off+448(dst); \ MOVL R9, 6*4+off+512(dst); \ MOVL R9, 10*4+off+576(dst); \ \ MOVL R10, 2*4+off+0(dst); \ MOVL R10, 1*4+off+64(dst); \ MOVL R10, 15*4+off+128(dst); \ MOVL R10, 10*4+off+192(dst); \ MOVL R10, 6*4+off+256(dst); \ MOVL R10, 8*4+off+320(dst); \ MOVL R10, 3*4+off+384(dst); \ MOVL R10, 13*4+off+448(dst); \ MOVL R10, 14*4+off+512(dst); \ MOVL R10, 5*4+off+576(dst); \ SHRQ $32, R10; \ MOVL R10, 6*4+off+0(dst); \ MOVL R10, 11*4+off+64(dst); \ MOVL R10, 2*4+off+128(dst); \ MOVL R10, 9*4+off+192(dst); \ MOVL R10, 1*4+off+256(dst); \ MOVL R10, 13*4+off+320(dst); \ MOVL R10, 4*4+off+384(dst); \ MOVL R10, 8*4+off+448(dst); \ MOVL R10, 15*4+off+512(dst); \ MOVL R10, 7*4+off+576(dst); \ \ MOVL R11, 3*4+off+0(dst); \ MOVL R11, 7*4+off+64(dst); \ MOVL R11, 13*4+off+128(dst); \ MOVL R11, 12*4+off+192(dst); \ MOVL R11, 10*4+off+256(dst); \ MOVL R11, 1*4+off+320(dst); \ MOVL R11, 9*4+off+384(dst); \ MOVL R11, 14*4+off+448(dst); \ MOVL R11, 0*4+off+512(dst); \ MOVL R11, 6*4+off+576(dst); \ SHRQ $32, R11; \ MOVL R11, 7*4+off+0(dst); \ MOVL R11, 14*4+off+64(dst); \ MOVL R11, 10*4+off+128(dst); \ MOVL R11, 0*4+off+192(dst); \ MOVL R11, 5*4+off+256(dst); \ MOVL R11, 9*4+off+320(dst); \ MOVL R11, 12*4+off+384(dst); \ MOVL R11, 1*4+off+448(dst); \ MOVL R11, 13*4+off+512(dst); \ MOVL R11, 2*4+off+576(dst); \ \ MOVL R12, 8*4+off+0(dst); \ MOVL R12, 5*4+off+64(dst); \ MOVL R12, 4*4+off+128(dst); \ MOVL R12, 15*4+off+192(dst); \ MOVL R12, 14*4+off+256(dst); \ MOVL R12, 3*4+off+320(dst); \ MOVL R12, 11*4+off+384(dst); \ MOVL R12, 10*4+off+448(dst); \ MOVL R12, 7*4+off+512(dst); \ MOVL R12, 1*4+off+576(dst); \ SHRQ $32, R12; \ MOVL R12, 12*4+off+0(dst); \ MOVL R12, 2*4+off+64(dst); \ MOVL R12, 11*4+off+128(dst); \ MOVL R12, 4*4+off+192(dst); \ MOVL R12, 0*4+off+256(dst); \ MOVL R12, 15*4+off+320(dst); \ MOVL R12, 10*4+off+384(dst); \ MOVL R12, 7*4+off+448(dst); \ MOVL R12, 5*4+off+512(dst); \ MOVL R12, 9*4+off+576(dst); \ \ MOVL R13, 9*4+off+0(dst); \ MOVL R13, 4*4+off+64(dst); \ MOVL R13, 8*4+off+128(dst); \ MOVL R13, 13*4+off+192(dst); \ MOVL R13, 3*4+off+256(dst); \ MOVL R13, 5*4+off+320(dst); \ MOVL R13, 7*4+off+384(dst); \ MOVL R13, 15*4+off+448(dst); \ MOVL R13, 11*4+off+512(dst); \ MOVL R13, 0*4+off+576(dst); \ SHRQ $32, R13; \ MOVL R13, 13*4+off+0(dst); \ MOVL R13, 10*4+off+64(dst); \ MOVL R13, 0*4+off+128(dst); \ MOVL R13, 3*4+off+192(dst); \ MOVL R13, 9*4+off+256(dst); \ MOVL R13, 6*4+off+320(dst); \ MOVL R13, 15*4+off+384(dst); \ MOVL R13, 4*4+off+448(dst); \ MOVL R13, 2*4+off+512(dst); \ MOVL R13, 12*4+off+576(dst); \ \ MOVL R14, 10*4+off+0(dst); \ MOVL R14, 12*4+off+64(dst); \ MOVL R14, 1*4+off+128(dst); \ MOVL R14, 6*4+off+192(dst); \ MOVL R14, 13*4+off+256(dst); \ MOVL R14, 4*4+off+320(dst); \ MOVL R14, 0*4+off+384(dst); \ MOVL R14, 2*4+off+448(dst); \ MOVL R14, 8*4+off+512(dst); \ MOVL R14, 14*4+off+576(dst); \ SHRQ $32, R14; \ MOVL R14, 14*4+off+0(dst); \ MOVL R14, 3*4+off+64(dst); \ MOVL R14, 7*4+off+128(dst); \ MOVL R14, 2*4+off+192(dst); \ MOVL R14, 15*4+off+256(dst); \ MOVL R14, 12*4+off+320(dst); \ MOVL R14, 6*4+off+384(dst); \ MOVL R14, 0*4+off+448(dst); \ MOVL R14, 9*4+off+512(dst); \ MOVL R14, 11*4+off+576(dst); \ \ MOVL R15, 11*4+off+0(dst); \ MOVL R15, 0*4+off+64(dst); \ MOVL R15, 12*4+off+128(dst); \ MOVL R15, 7*4+off+192(dst); \ MOVL R15, 8*4+off+256(dst); \ MOVL R15, 14*4+off+320(dst); \ MOVL R15, 2*4+off+384(dst); \ MOVL R15, 5*4+off+448(dst); \ MOVL R15, 1*4+off+512(dst); \ MOVL R15, 13*4+off+576(dst); \ SHRQ $32, R15; \ MOVL R15, 15*4+off+0(dst); \ MOVL R15, 6*4+off+64(dst); \ MOVL R15, 3*4+off+128(dst); \ MOVL R15, 11*4+off+192(dst); \ MOVL R15, 7*4+off+256(dst); \ MOVL R15, 10*4+off+320(dst); \ MOVL R15, 5*4+off+384(dst); \ MOVL R15, 9*4+off+448(dst); \ MOVL R15, 4*4+off+512(dst); \ MOVL R15, 8*4+off+576(dst) #define BLAKE2s_SSE2() \ PRECOMPUTE_MSG(SP, 16, SI, R8, R9, R10, R11, R12, R13, R14, R15); \ ROUND_SSE2(X4, X5, X6, X7, 16(SP), 32(SP), 48(SP), 64(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+64(SP), 32+64(SP), 48+64(SP), 64+64(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+128(SP), 32+128(SP), 48+128(SP), 64+128(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+192(SP), 32+192(SP), 48+192(SP), 64+192(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+256(SP), 32+256(SP), 48+256(SP), 64+256(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+320(SP), 32+320(SP), 48+320(SP), 64+320(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+384(SP), 32+384(SP), 48+384(SP), 64+384(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+448(SP), 32+448(SP), 48+448(SP), 64+448(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+512(SP), 32+512(SP), 48+512(SP), 64+512(SP), X8); \ ROUND_SSE2(X4, X5, X6, X7, 16+576(SP), 32+576(SP), 48+576(SP), 64+576(SP), X8) #define BLAKE2s_SSSE3() \ PRECOMPUTE_MSG(SP, 16, SI, R8, R9, R10, R11, R12, R13, R14, R15); \ ROUND_SSSE3(X4, X5, X6, X7, 16(SP), 32(SP), 48(SP), 64(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+64(SP), 32+64(SP), 48+64(SP), 64+64(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+128(SP), 32+128(SP), 48+128(SP), 64+128(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+192(SP), 32+192(SP), 48+192(SP), 64+192(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+256(SP), 32+256(SP), 48+256(SP), 64+256(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+320(SP), 32+320(SP), 48+320(SP), 64+320(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+384(SP), 32+384(SP), 48+384(SP), 64+384(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+448(SP), 32+448(SP), 48+448(SP), 64+448(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+512(SP), 32+512(SP), 48+512(SP), 64+512(SP), X8, X13, X14); \ ROUND_SSSE3(X4, X5, X6, X7, 16+576(SP), 32+576(SP), 48+576(SP), 64+576(SP), X8, X13, X14) #define BLAKE2s_SSE4() \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 0, 2, 4, 6, 1, 3, 5, 7, 8, 10, 12, 14, 9, 11, 13, 15); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 14, 4, 9, 13, 10, 8, 15, 6, 1, 0, 11, 5, 12, 2, 7, 3); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 11, 12, 5, 15, 8, 0, 2, 13, 10, 3, 7, 9, 14, 6, 1, 4); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 7, 3, 13, 11, 9, 1, 12, 14, 2, 5, 4, 15, 6, 10, 0, 8); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 9, 5, 2, 10, 0, 7, 4, 15, 14, 11, 6, 3, 1, 12, 8, 13); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 2, 6, 0, 8, 12, 10, 11, 3, 4, 7, 15, 1, 13, 5, 14, 9); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 12, 1, 14, 4, 5, 15, 13, 10, 0, 6, 9, 8, 7, 3, 2, 11); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 13, 7, 12, 3, 11, 14, 1, 9, 5, 15, 8, 2, 0, 4, 6, 10); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 6, 14, 11, 0, 15, 9, 3, 8, 12, 13, 1, 10, 2, 7, 4, 5); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14); \ LOAD_MSG_SSE4(X8, X9, X10, X11, SI, 10, 8, 7, 1, 2, 4, 6, 5, 15, 9, 3, 13, 11, 14, 12, 0); \ ROUND_SSSE3(X4, X5, X6, X7, X8, X9, X10, X11, X8, X13, X14) #define HASH_BLOCKS(h, c, flag, blocks_base, blocks_len, BLAKE2s_FUNC) \ MOVQ h, AX; \ MOVQ c, BX; \ MOVL flag, CX; \ MOVQ blocks_base, SI; \ MOVQ blocks_len, DX; \ \ MOVQ SP, BP; \ MOVQ SP, R9; \ ADDQ $15, R9; \ ANDQ $~15, R9; \ MOVQ R9, SP; \ \ MOVQ 0(BX), R9; \ MOVQ R9, 0(SP); \ XORQ R9, R9; \ MOVQ R9, 8(SP); \ MOVL CX, 8(SP); \ \ MOVOU 0(AX), X0; \ MOVOU 16(AX), X1; \ MOVOU iv0<>(SB), X2; \ MOVOU iv1<>(SB), X3 \ \ MOVOU counter<>(SB), X12; \ MOVOU rol16<>(SB), X13; \ MOVOU rol8<>(SB), X14; \ MOVO 0(SP), X15; \ \ loop: \ MOVO X0, X4; \ MOVO X1, X5; \ MOVO X2, X6; \ MOVO X3, X7; \ \ PADDQ X12, X15; \ PXOR X15, X7; \ \ BLAKE2s_FUNC(); \ \ PXOR X4, X0; \ PXOR X5, X1; \ PXOR X6, X0; \ PXOR X7, X1; \ \ LEAQ 64(SI), SI; \ SUBQ $64, DX; \ JNE loop; \ \ MOVO X15, 0(SP); \ MOVQ 0(SP), R9; \ MOVQ R9, 0(BX); \ \ MOVOU X0, 0(AX); \ MOVOU X1, 16(AX); \ \ MOVQ BP, SP // func hashBlocksSSE2(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) TEXT ·hashBlocksSSE2(SB), 0, $672-48 // frame = 656 + 16 byte alignment HASH_BLOCKS(h+0(FP), c+8(FP), flag+16(FP), blocks_base+24(FP), blocks_len+32(FP), BLAKE2s_SSE2) RET // func hashBlocksSSSE3(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) TEXT ·hashBlocksSSSE3(SB), 0, $672-48 // frame = 656 + 16 byte alignment HASH_BLOCKS(h+0(FP), c+8(FP), flag+16(FP), blocks_base+24(FP), blocks_len+32(FP), BLAKE2s_SSSE3) RET // func hashBlocksSSE4(h *[8]uint32, c *[2]uint32, flag uint32, blocks []byte) TEXT ·hashBlocksSSE4(SB), 0, $32-48 // frame = 16 + 16 byte alignment HASH_BLOCKS(h+0(FP), c+8(FP), flag+16(FP), blocks_base+24(FP), blocks_len+32(FP), BLAKE2s_SSE4) RET
{ "pile_set_name": "Github" }
/** * Copyright 2013 Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @emails [email protected] */ /*jshint evil:true*/ require('mock-modules').autoMockOff(); describe('jstransform-utils', function() { var transform, utils; var Syntax = require('esprima-fb').Syntax; beforeEach(function() { require('mock-modules').dumpCache(); transform = require('../jstransform').transform; utils = require('../utils'); }); describe('temporary variables', function() { it('should inject temporary variables at the start of functions', function() { function visitFunctionBlock(traverse, node, path, state) { utils.catchup(node.range[0] + 1, state); var x = utils.injectTempVar(state); var y = utils.injectTempVar(state); traverse(node.body, path, state); utils.append('return ' + x + ' + ' + y + ';', state); utils.catchup(node.range[1], state); return false; } visitFunctionBlock.test = function(node, path, state) { var parentType = path.length && path[0].type; return node.type === Syntax.BlockStatement && ( parentType === Syntax.FunctionDeclaration || parentType === Syntax.FunctionExpression ); }; expect(transform( [visitFunctionBlock], 'var x = function() {};' ).code).toEqual( 'var x = function() {var $__0, $__1;return $__0 + $__1;};' ); expect(eval(transform( [visitFunctionBlock], '2 + (function sum(x, y)\t{ $__0 = x; $__1 = y; }(3, 5))' ).code)).toEqual(10); }); }); });
{ "pile_set_name": "Github" }
# Editor.md ![](https://pandao.github.io/editor.md/images/logos/editormd-logo-180x180.png) ![](https://img.shields.io/github/stars/pandao/editor.md.svg) ![](https://img.shields.io/github/forks/pandao/editor.md.svg) ![](https://img.shields.io/github/tag/pandao/editor.md.svg) ![](https://img.shields.io/github/release/pandao/editor.md.svg) ![](https://img.shields.io/github/issues/pandao/editor.md.svg) ![](https://img.shields.io/bower/v/editor.md.svg) **Editor.md** : The open source embeddable online markdown editor (component), based on CodeMirror & jQuery & Marked. ### Features - Support Standard Markdown / CommonMark and GFM (GitHub Flavored Markdown); - Full-featured: Real-time Preview, Image (cross-domain) upload, Preformatted text/Code blocks/Tables insert, Code fold, Search replace, Read only, Themes, Multi-languages, L18n, HTML entities, Code syntax highlighting...; - Markdown Extras : Support [ToC (Table of Contents)](https://pandao.github.io/editor.md/examples/toc.html), [Emoji](https://pandao.github.io/editor.md/examples/emoji.html), [Task lists](https://pandao.github.io/editor.md/examples/task-lists.html), [@Links](https://pandao.github.io/editor.md/examples/@links.html)...; - Compatible with all major browsers (IE8+), compatible Zepto.js and iPad; - Support [decode & fliter of the HTML tags & attributes](https://pandao.github.io/editor.md/examples/html-tags-decode.html); - Support [TeX (LaTeX expressions, Based on KaTeX)](https://pandao.github.io/editor.md/examples/katex.html), [Flowchart](https://pandao.github.io/editor.md/examples/flowchart.html) and [Sequence Diagram](https://pandao.github.io/editor.md/examples/sequence-diagram.html) of Markdown extended syntax; - Support AMD/CMD (Require.js & Sea.js) Module Loader, and Custom/define editor plugins; [README & Examples (English)](https://pandao.github.io/editor.md/en.html) -------- **Editor.md** 是一款开源的、可嵌入的 Markdown 在线编辑器(组件),基于 CodeMirror、jQuery 和 Marked 构建。 ![editormd-screenshot](https://pandao.github.io/editor.md/examples/images/editormd-screenshot.png "editormd-screenshot") #### 主要特性 - 支持通用 Markdown / CommonMark 和 GFM (GitHub Flavored Markdown) 风格的语法,也可[变身为代码编辑器](https://pandao.github.io/editor.md/examples/change-mode.html); - 支持实时预览、图片(跨域)上传、预格式文本/代码/表格插入、代码折叠、跳转到行、搜索替换、只读模式、自定义样式主题和多语言语法高亮等功能; - 支持 [ToC(Table of Contents)](https://pandao.github.io/editor.md/examples/toc.html)、[Emoji表情](https://pandao.github.io/editor.md/examples/emoji.html)、[Task lists](https://pandao.github.io/editor.md/examples/task-lists.html)、[@链接](https://pandao.github.io/editor.md/examples/@links.html)等 Markdown 扩展语法; - 支持 TeX 科学公式(基于 [KaTeX](https://pandao.github.io/editor.md/examples/katex.html))、流程图 [Flowchart](https://pandao.github.io/editor.md/examples/flowchart.html) 和 [时序图 Sequence Diagram](https://pandao.github.io/editor.md/examples/sequence-diagram.html); - 支持[识别和解析 HTML 标签,并且支持自定义过滤标签及属性解析](https://pandao.github.io/editor.md/examples/html-tags-decode.html),具有可靠的安全性和几乎无限的扩展性; - 支持 AMD / CMD 模块化加载(支持 [Require.js](https://pandao.github.io/editor.md/examples/use-requirejs.html) & [Sea.js](https://pandao.github.io/editor.md/examples/use-seajs.html)),并且支持[自定义扩展插件](https://pandao.github.io/editor.md/examples/define-plugin.html); - 兼容主流的浏览器(IE8+)和 [Zepto.js](https://pandao.github.io/editor.md/examples/use-zepto.html),且支持 iPad 等平板设备; #### Examples [https://pandao.github.io/editor.md/examples/index.html](https://pandao.github.io/editor.md/examples/index.html) #### Download & install [Github download](https://github.com/pandao/editor.md/archive/master.zip) Bower install : ```shell bower install editor.md ``` #### Usages HTML: ```html <link rel="stylesheet" href="editormd.min.css" /> <div id="editormd"> <textarea style="display:none;">### Hello Editor.md !</textarea> </div> ``` > Tip: Editor.md can auto append `<textarea>` tag; javascript: ```html <script src="jquery.min.js"></script> <script src="editormd.min.js"></script> <script type="text/javascript"> $(function() { var editor = editormd("editormd", { path : "../lib/" // Autoload modules mode, codemirror, marked... dependents libs path }); /* // or var editor = editormd({ id : "editormd", path : "../lib/" }); */ }); </script> ``` Using modular script loader : - [Using Require.js](https://github.com/pandao/editor.md/tree/master/examples/use-requirejs.html) - [Using Sea.js](https://github.com/pandao/editor.md/tree/master/examples/use-seajs.html) #### Dependents - [CodeMirror](http://codemirror.net/ "CodeMirror") - [marked](https://github.com/chjj/marked "marked") - [jQuery](http://jquery.com/ "jQuery") - [FontAwesome](http://fontawesome.io/ "FontAwesome") - [github-markdown.css](https://github.com/sindresorhus/github-markdown-css "github-markdown.css") - [KaTeX](http://khan.github.io/KaTeX/ "KaTeX") - [prettify.js](http://code.google.com/p/google-code-prettify/ "prettify.js") - [Rephael.js](http://raphaeljs.com/ "Rephael.js") - [flowchart.js](http://adrai.github.io/flowchart.js/ "flowchart.js") - [sequence-diagram.js](http://bramp.github.io/js-sequence-diagrams/ "sequence-diagram.js") - [Prefixes.scss](https://github.com/pandao/prefixes.scss "Prefixes.scss") #### Changes [Change logs](https://github.com/pandao/editor.md/blob/master/CHANGE.md) #### License The MIT License. Copyright (c) 2015 Pandao
{ "pile_set_name": "Github" }
$ $ Alldifferent except 0 in Essence'. $ $ Decomposition of the global constraint alldifferent except 0. $ $ From Global constraint catalogue: $ http://www.emn.fr/x-info/sdemasse/gccat/sec4.6.html $ """ $ Enforce all variables of the collection VARIABLES to take distinct values, except those $ variables that are assigned to 0. $ $ Example $ (<5, 0, 1, 9, 0, 3>) $ $ The alldifferent_except_0 constraint holds since all the values (that are different from 0) $ 5, 1, 9 and 3 are distinct. $ """ $ $ Note: Essence' don't have predicates. $ $ Compare with the following models: $ * Comet: http://www.hakank.org/comet/alldifferent_except_0.co $ * ECLiPSE: http://www.hakank.org/eclipse/alldifferent_except_0.ecl $ * Gecode: http://www.hakank.org/gecode/alldifferent_except_0.cpp $ * Gecode/R: http://www.hakank.org/gecode_r/all_different_except_0.rb $ * MiniZinc: http://www.hakank.org/minizinc/alldifferent_except_0.mzn $ * Choco: http://www.hakank.org/choco/AllDifferentExcept0_test.java $ * JaCoP: http://www.hakank.org/JaCoP/AllDifferentExcept0_test.java $ $ $ Model created by Hakan Kjellerstrand, [email protected] $ See also my Essence'/Tailor page: http://www.hakank.org/minion_tailor ESSENCE' 1.0 given n : int find x : matrix indexed by [int(1..n)] of int(0..9) find z : int(0..10000) $ number of zeros in x param n be 6 such that z = (sum i : int(1..n) . x[i] = 0), z = 2, $ number of 0's in x $ all different except 0 forall i,j : int(1..n) . ( (i != j) => (((x[i] != 0) /\ (x[j] != 0)) => (x[i] != x[j])) ), $ symmetry breaking sorted forall i,j : int(2..n) . x[i-1] <= x[i]
{ "pile_set_name": "Github" }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/views/frame/desktop_browser_frame_auralinux.h" #include "base/command_line.h" #include "chrome/browser/shell_integration_linux.h" #include "chrome/browser/ui/views/frame/browser_frame.h" #include "chrome/browser/ui/views/frame/browser_view.h" #include "chrome/common/chrome_switches.h" #include "chrome/common/pref_names.h" #include "ui/views/widget/widget.h" DesktopBrowserFrameAuraLinux::DesktopBrowserFrameAuraLinux( BrowserFrame* browser_frame, BrowserView* browser_view) : DesktopBrowserFrameAura(browser_frame, browser_view) { use_custom_frame_pref_.Init( prefs::kUseCustomChromeFrame, browser_view->browser()->profile()->GetPrefs(), base::Bind(&DesktopBrowserFrameAuraLinux::OnUseCustomChromeFrameChanged, base::Unretained(this))); } DesktopBrowserFrameAuraLinux::~DesktopBrowserFrameAuraLinux() { } views::Widget::InitParams DesktopBrowserFrameAuraLinux::GetWidgetParams() { views::Widget::InitParams params; params.native_widget = this; // Set up a custom WM_CLASS for some sorts of window types. This allows // task switchers in X11 environments to distinguish between main browser // windows and e.g app windows. const base::CommandLine& command_line = *base::CommandLine::ForCurrentProcess(); const Browser& browser = *browser_view()->browser(); params.wm_class_class = shell_integration_linux::GetProgramClassName(); params.wm_class_name = params.wm_class_class; if (browser.is_app() && !browser.is_devtools()) { // This window is a hosted app or v1 packaged app. // NOTE: v2 packaged app windows are created by ChromeNativeAppWindowViews. params.wm_class_name = web_app::GetWMClassFromAppName(browser.app_name()); } else if (command_line.HasSwitch(switches::kUserDataDir)) { // Set the class name to e.g. "Chrome (/tmp/my-user-data)". The // class name will show up in the alt-tab list in gnome-shell if // you're running a binary that doesn't have a matching .desktop // file. const std::string user_data_dir = command_line.GetSwitchValueNative(switches::kUserDataDir); params.wm_class_name += " (" + user_data_dir + ")"; } const char kX11WindowRoleBrowser[] = "browser"; const char kX11WindowRolePopup[] = "pop-up"; params.wm_role_name = browser_view()->browser()->is_type_tabbed() ? std::string(kX11WindowRoleBrowser) : std::string(kX11WindowRolePopup); params.remove_standard_frame = UseCustomFrame(); return params; } bool DesktopBrowserFrameAuraLinux::UseCustomFrame() const { return use_custom_frame_pref_.GetValue() && browser_view()->IsBrowserTypeNormal(); } void DesktopBrowserFrameAuraLinux::OnUseCustomChromeFrameChanged() { // Tell the window manager to add or remove system borders. browser_frame()->set_frame_type( UseCustomFrame() ? views::Widget::FRAME_TYPE_FORCE_CUSTOM : views::Widget::FRAME_TYPE_FORCE_NATIVE); browser_frame()->FrameTypeChanged(); }
{ "pile_set_name": "Github" }
r4 r8 mib''8 do''4 do''4 | re''4 re''8. re''16 si'4. si'8 | do''4 do''16 re''16 mib''8 re''4 sol''4 ~| sol''8. fa''16 fa''8. fa''16 re''4 mib''8. mib''16 | mib''4 re''8. re''16 re''4 do''8 la'8 | fad'4 sol'8. sol'16 sol'8. la'16 fad'8. sol'16 | sol'4 si'8 mib''8 do''4 fa''8 re''8 | si'4 do''8. do''16 do''8. si'16 si'8. do''16 | do''2
{ "pile_set_name": "Github" }
// Boost.Units - A C++ library for zero-overhead dimensional analysis and // unit/quantity manipulation and conversion // // Copyright (C) 2003-2008 Matthias Christian Schabel // Copyright (C) 2007-2008 Steven Watanabe // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_UNITS_SI_CANDELA_BASE_UNIT_HPP #define BOOST_UNITS_SI_CANDELA_BASE_UNIT_HPP #include <string> #include <boost/units/config.hpp> #include <boost/units/base_unit.hpp> #include <boost/units/physical_dimensions/luminous_intensity.hpp> namespace boost { namespace units { namespace si { struct candela_base_unit : public base_unit<candela_base_unit, luminous_intensity_dimension, -3> { static std::string name() { return("candela"); } static std::string symbol() { return("cd"); } }; } // namespace si } // namespace units } // namespace boost #if BOOST_UNITS_HAS_BOOST_TYPEOF #include BOOST_TYPEOF_INCREMENT_REGISTRATION_GROUP() BOOST_TYPEOF_REGISTER_TYPE(boost::units::si::candela_base_unit) #endif //#include <boost/units/base_units/detail/conversions.hpp> #endif // BOOST_UNITS_SI_CANDELA_BASE_UNIT_HPP
{ "pile_set_name": "Github" }
-----BEGIN PRIVATE KEY----- MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC6iFGoRI4W1kH9 braIBjYQPTwT2erkNUq07PVoV2wke8HHJajg2B+9sZwGm24ahvJr4q9adWtqZHEI eqVap0WH9xzVJJwCfs1D/B5p0DggKZOrIMNJ5Nu5TMJrbA7tFYIP8X6taRqx0wI6 iypB7qdw4A8Njf1mCyuwJJKkfbmIYXmQsVeQPdI7xeC4SB+oN9OIQ+8nFthVt2Za qn4CkC86exCABiTMHGyXrZZhW7filhLAdTGjDJHdtMr3/K0dJdMJ77kXDqdo4bN7 LyJvaeO0ipVhHe4m1iWdq5EITjbLHCQELL8Wiy/l8Y+ZFzG4s/5JI/pyUcQx1QOs 2hgKNe2NAgMBAAECggEBAJ7LzjhhpFTsseD+j4XdQ8kvWCXOLpl4hNDhqUnaosWs VZskBFDlrJ/gw+McDu+mUlpl8MIhlABO4atGPd6e6CKHzJPnRqkZKcXmrD2IdT9s JbpZeec+XY+yOREaPNq4pLDN9fnKsF8SM6ODNcZLVWBSXn47kq18dQTPHcfLAFeI r8vh6Pld90AqFRUw1YCDRoZOs3CqeZVqWHhiy1M3kTB/cNkcltItABppAJuSPGgz iMnzbLm16+ZDAgQceNkIIGuHAJy4yrrK09vbJ5L7kRss9NtmA1hb6a4Mo7jmQXqg SwbkcOoaO1gcoDpngckxW2KzDmAR8iRyWUbuxXxtlEECgYEA3W4dT//r9o2InE0R TNqqnKpjpZN0KGyKXCmnF7umA3VkTVyqZ0xLi8cyY1hkYiDkVQ12CKwn1Vttt0+N gSfvj6CQmLaRR94GVXNEfhg9Iv59iFrOtRPZWB3V4HwakPXOCHneExNx7O/JznLp xD3BJ9I4GQ3oEXc8pdGTAfSMdCsCgYEA16dz2evDgKdn0v7Ak0rU6LVmckB3Gs3r ta15b0eP7E1FmF77yVMpaCicjYkQL63yHzTi3UlA66jAnW0fFtzClyl3TEMnXpJR 3b5JCeH9O/Hkvt9Go5uLODMo70rjuVuS8gcK8myefFybWH/t3gXo59hspXiG+xZY EKd7mEW8MScCgYEAlkcrQaYQwK3hryJmwWAONnE1W6QtS1oOtOnX6zWBQAul3RMs 2xpekyjHu8C7sBVeoZKXLt+X0SdR2Pz2rlcqMLHqMJqHEt1OMyQdse5FX8CT9byb WS11bmYhR08ywHryL7J100B5KzK6JZC7smGu+5WiWO6lN2VTFb6cJNGRmS0CgYAo tFCnp1qFZBOyvab3pj49lk+57PUOOCPvbMjo+ibuQT+LnRIFVA8Su+egx2got7pl rYPMpND+KiIBFOGzXQPVqFv+Jwa9UPzmz83VcbRspiG47UfWBbvnZbCqSgZlrCU2 TaIBVAMuEgS4VZ0+NPtbF3yaVv+TUQpaSmKHwVHeLQKBgCgGe5NVgB0u9S36ltit tYlnPPjuipxv9yruq+nva+WKT0q/BfeIlH3IUf2qNFQhR6caJGv7BU7naqNGq80m ks/J5ExR5vBpxzXgc7oBn2pyFJYckbJoccrqv48GRBigJpDjmo1f8wZ7fNt/ULH1 NBinA5ZsT8d0v3QCr2xDJH9D -----END PRIVATE KEY-----
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- #TextRNN: 1. embeddding layer, 2.Bi-LSTM layer, 3.concat output, 4.FC layer, 5.softmax import tensorflow as tf from tensorflow.contrib import rnn import numpy as np class TextRNN: def __init__(self,num_classes, learning_rate, batch_size, decay_steps, decay_rate,sequence_length, vocab_size,embed_size,is_training,initializer=tf.random_normal_initializer(stddev=0.1)): """init all hyperparameter here""" # set hyperparamter self.num_classes = num_classes self.batch_size = batch_size self.sequence_length=sequence_length self.vocab_size=vocab_size self.embed_size=embed_size self.hidden_size=embed_size self.is_training=is_training self.learning_rate=learning_rate self.initializer=initializer self.num_sampled=20 # add placeholder (X,label) self.input_x = tf.placeholder(tf.int32, [None, self.sequence_length], name="input_x") # X self.input_y = tf.placeholder(tf.int32,[None], name="input_y") # y [None,num_classes] self.dropout_keep_prob=tf.placeholder(tf.float32,name="dropout_keep_prob") self.global_step = tf.Variable(0, trainable=False, name="Global_Step") self.epoch_step=tf.Variable(0,trainable=False,name="Epoch_Step") self.epoch_increment=tf.assign(self.epoch_step,tf.add(self.epoch_step,tf.constant(1))) self.decay_steps, self.decay_rate = decay_steps, decay_rate self.instantiate_weights() self.logits = self.inference() #[None, self.label_size]. main computation graph is here. if not is_training: return self.loss_val = self.loss() #-->self.loss_nce() self.train_op = self.train() self.predictions = tf.argmax(self.logits, axis=1, name="predictions") # shape:[None,] correct_prediction = tf.equal(tf.cast(self.predictions,tf.int32), self.input_y) #tf.argmax(self.logits, 1)-->[batch_size] self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name="Accuracy") # shape=() def instantiate_weights(self): """define all weights here""" with tf.name_scope("embedding"): # embedding matrix self.Embedding = tf.get_variable("Embedding",shape=[self.vocab_size, self.embed_size],initializer=self.initializer) #[vocab_size,embed_size] tf.random_uniform([self.vocab_size, self.embed_size],-1.0,1.0) self.W_projection = tf.get_variable("W_projection",shape=[self.hidden_size*2, self.num_classes],initializer=self.initializer) #[embed_size,label_size] self.b_projection = tf.get_variable("b_projection",shape=[self.num_classes]) #[label_size] def inference(self): """main computation graph here: 1. embeddding layer, 2.Bi-LSTM layer, 3.concat, 4.FC layer 5.softmax """ #1.get emebedding of words in the sentence self.embedded_words = tf.nn.embedding_lookup(self.Embedding,self.input_x) #shape:[None,sentence_length,embed_size] #2. Bi-lstm layer # define lstm cess:get lstm cell output lstm_fw_cell=rnn.BasicLSTMCell(self.hidden_size) #forward direction cell lstm_bw_cell=rnn.BasicLSTMCell(self.hidden_size) #backward direction cell if self.dropout_keep_prob is not None: lstm_fw_cell=rnn.DropoutWrapper(lstm_fw_cell,output_keep_prob=self.dropout_keep_prob) lstm_bw_cell=rnn.DropoutWrapper(lstm_bw_cell,output_keep_prob=self.dropout_keep_prob) # bidirectional_dynamic_rnn: input: [batch_size, max_time, input_size] # output: A tuple (outputs, output_states) # where:outputs: A tuple (output_fw, output_bw) containing the forward and the backward rnn output `Tensor`. outputs,_=tf.nn.bidirectional_dynamic_rnn(lstm_fw_cell,lstm_bw_cell,self.embedded_words,dtype=tf.float32) #[batch_size,sequence_length,hidden_size] #creates a dynamic bidirectional recurrent neural network print("outputs:===>",outputs) #outputs:(<tf.Tensor 'bidirectional_rnn/fw/fw/transpose:0' shape=(?, 5, 100) dtype=float32>, <tf.Tensor 'ReverseV2:0' shape=(?, 5, 100) dtype=float32>)) #3. concat output output_rnn=tf.concat(outputs,axis=2) #[batch_size,sequence_length,hidden_size*2] #self.output_rnn_last=tf.reduce_mean(output_rnn,axis=1) #[batch_size,hidden_size*2] self.output_rnn_last=output_rnn[:,-1,:] ##[batch_size,hidden_size*2] #TODO print("output_rnn_last:", self.output_rnn_last) # <tf.Tensor 'strided_slice:0' shape=(?, 200) dtype=float32> #4. logits(use linear layer) with tf.name_scope("output"): #inputs: A `Tensor` of shape `[batch_size, dim]`. The forward activations of the input network. logits = tf.matmul(self.output_rnn_last, self.W_projection) + self.b_projection # [batch_size,num_classes] return logits def loss(self,l2_lambda=0.0001): with tf.name_scope("loss"): #input: `logits` and `labels` must have the same shape `[batch_size, num_classes]` #output: A 1-D `Tensor` of length `batch_size` of the same type as `logits` with the softmax cross entropy loss. losses = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.input_y, logits=self.logits);#sigmoid_cross_entropy_with_logits.#losses=tf.nn.softmax_cross_entropy_with_logits(labels=self.input_y,logits=self.logits) #print("1.sparse_softmax_cross_entropy_with_logits.losses:",losses) # shape=(?,) loss=tf.reduce_mean(losses)#print("2.loss.loss:", loss) #shape=() l2_losses = tf.add_n([tf.nn.l2_loss(v) for v in tf.trainable_variables() if 'bias' not in v.name]) * l2_lambda loss=loss+l2_losses return loss def loss_nce(self,l2_lambda=0.0001): #0.0001-->0.001 """calculate loss using (NCE)cross entropy here""" # Compute the average NCE loss for the batch. # tf.nce_loss automatically draws a new sample of the negative labels each # time we evaluate the loss. if self.is_training: #training #labels=tf.reshape(self.input_y,[-1]) #[batch_size,1]------>[batch_size,] labels=tf.expand_dims(self.input_y,1) #[batch_size,]----->[batch_size,1] loss = tf.reduce_mean( #inputs: A `Tensor` of shape `[batch_size, dim]`. The forward activations of the input network. tf.nn.nce_loss(weights=tf.transpose(self.W_projection),#[hidden_size*2, num_classes]--->[num_classes,hidden_size*2]. nce_weights:A `Tensor` of shape `[num_classes, dim].O.K. biases=self.b_projection, #[label_size]. nce_biases:A `Tensor` of shape `[num_classes]`. labels=labels, #[batch_size,1]. train_labels, # A `Tensor` of type `int64` and shape `[batch_size,num_true]`. The target classes. inputs=self.output_rnn_last,# [batch_size,hidden_size*2] #A `Tensor` of shape `[batch_size, dim]`. The forward activations of the input network. num_sampled=self.num_sampled, #scalar. 100 num_classes=self.num_classes,partition_strategy="div")) #scalar. 1999 l2_losses = tf.add_n([tf.nn.l2_loss(v) for v in tf.trainable_variables() if 'bias' not in v.name]) * l2_lambda loss = loss + l2_losses return loss def train(self): """based on the loss, use SGD to update parameter""" learning_rate = tf.train.exponential_decay(self.learning_rate, self.global_step, self.decay_steps,self.decay_rate, staircase=True) train_op = tf.contrib.layers.optimize_loss(self.loss_val, global_step=self.global_step,learning_rate=learning_rate, optimizer="Adam") return train_op #test started def test(): #below is a function test; if you use this for text classifiction, you need to tranform sentence to indices of vocabulary first. then feed data to the graph. num_classes=10 learning_rate=0.01 batch_size=8 decay_steps=1000 decay_rate=0.9 sequence_length=5 vocab_size=10000 embed_size=100 is_training=True dropout_keep_prob=1#0.5 textRNN=TextRNN(num_classes, learning_rate, batch_size, decay_steps, decay_rate,sequence_length,vocab_size,embed_size,is_training) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) for i in range(100): input_x=np.zeros((batch_size,sequence_length)) #[None, self.sequence_length] input_y=input_y=np.array([1,0,1,1,1,2,1,1]) #np.zeros((batch_size),dtype=np.int32) #[None, self.sequence_length] loss,acc,predict,_=sess.run([textRNN.loss_val,textRNN.accuracy,textRNN.predictions,textRNN.train_op],feed_dict={textRNN.input_x:input_x,textRNN.input_y:input_y,textRNN.dropout_keep_prob:dropout_keep_prob}) print("loss:",loss,"acc:",acc,"label:",input_y,"prediction:",predict) test()
{ "pile_set_name": "Github" }
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/storagegateway/model/CachediSCSIVolume.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace StorageGateway { namespace Model { CachediSCSIVolume::CachediSCSIVolume() : m_volumeARNHasBeenSet(false), m_volumeIdHasBeenSet(false), m_volumeTypeHasBeenSet(false), m_volumeStatusHasBeenSet(false), m_volumeAttachmentStatusHasBeenSet(false), m_volumeSizeInBytes(0), m_volumeSizeInBytesHasBeenSet(false), m_volumeProgress(0.0), m_volumeProgressHasBeenSet(false), m_sourceSnapshotIdHasBeenSet(false), m_volumeiSCSIAttributesHasBeenSet(false), m_createdDateHasBeenSet(false), m_volumeUsedInBytes(0), m_volumeUsedInBytesHasBeenSet(false), m_kMSKeyHasBeenSet(false), m_targetNameHasBeenSet(false) { } CachediSCSIVolume::CachediSCSIVolume(JsonView jsonValue) : m_volumeARNHasBeenSet(false), m_volumeIdHasBeenSet(false), m_volumeTypeHasBeenSet(false), m_volumeStatusHasBeenSet(false), m_volumeAttachmentStatusHasBeenSet(false), m_volumeSizeInBytes(0), m_volumeSizeInBytesHasBeenSet(false), m_volumeProgress(0.0), m_volumeProgressHasBeenSet(false), m_sourceSnapshotIdHasBeenSet(false), m_volumeiSCSIAttributesHasBeenSet(false), m_createdDateHasBeenSet(false), m_volumeUsedInBytes(0), m_volumeUsedInBytesHasBeenSet(false), m_kMSKeyHasBeenSet(false), m_targetNameHasBeenSet(false) { *this = jsonValue; } CachediSCSIVolume& CachediSCSIVolume::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("VolumeARN")) { m_volumeARN = jsonValue.GetString("VolumeARN"); m_volumeARNHasBeenSet = true; } if(jsonValue.ValueExists("VolumeId")) { m_volumeId = jsonValue.GetString("VolumeId"); m_volumeIdHasBeenSet = true; } if(jsonValue.ValueExists("VolumeType")) { m_volumeType = jsonValue.GetString("VolumeType"); m_volumeTypeHasBeenSet = true; } if(jsonValue.ValueExists("VolumeStatus")) { m_volumeStatus = jsonValue.GetString("VolumeStatus"); m_volumeStatusHasBeenSet = true; } if(jsonValue.ValueExists("VolumeAttachmentStatus")) { m_volumeAttachmentStatus = jsonValue.GetString("VolumeAttachmentStatus"); m_volumeAttachmentStatusHasBeenSet = true; } if(jsonValue.ValueExists("VolumeSizeInBytes")) { m_volumeSizeInBytes = jsonValue.GetInt64("VolumeSizeInBytes"); m_volumeSizeInBytesHasBeenSet = true; } if(jsonValue.ValueExists("VolumeProgress")) { m_volumeProgress = jsonValue.GetDouble("VolumeProgress"); m_volumeProgressHasBeenSet = true; } if(jsonValue.ValueExists("SourceSnapshotId")) { m_sourceSnapshotId = jsonValue.GetString("SourceSnapshotId"); m_sourceSnapshotIdHasBeenSet = true; } if(jsonValue.ValueExists("VolumeiSCSIAttributes")) { m_volumeiSCSIAttributes = jsonValue.GetObject("VolumeiSCSIAttributes"); m_volumeiSCSIAttributesHasBeenSet = true; } if(jsonValue.ValueExists("CreatedDate")) { m_createdDate = jsonValue.GetDouble("CreatedDate"); m_createdDateHasBeenSet = true; } if(jsonValue.ValueExists("VolumeUsedInBytes")) { m_volumeUsedInBytes = jsonValue.GetInt64("VolumeUsedInBytes"); m_volumeUsedInBytesHasBeenSet = true; } if(jsonValue.ValueExists("KMSKey")) { m_kMSKey = jsonValue.GetString("KMSKey"); m_kMSKeyHasBeenSet = true; } if(jsonValue.ValueExists("TargetName")) { m_targetName = jsonValue.GetString("TargetName"); m_targetNameHasBeenSet = true; } return *this; } JsonValue CachediSCSIVolume::Jsonize() const { JsonValue payload; if(m_volumeARNHasBeenSet) { payload.WithString("VolumeARN", m_volumeARN); } if(m_volumeIdHasBeenSet) { payload.WithString("VolumeId", m_volumeId); } if(m_volumeTypeHasBeenSet) { payload.WithString("VolumeType", m_volumeType); } if(m_volumeStatusHasBeenSet) { payload.WithString("VolumeStatus", m_volumeStatus); } if(m_volumeAttachmentStatusHasBeenSet) { payload.WithString("VolumeAttachmentStatus", m_volumeAttachmentStatus); } if(m_volumeSizeInBytesHasBeenSet) { payload.WithInt64("VolumeSizeInBytes", m_volumeSizeInBytes); } if(m_volumeProgressHasBeenSet) { payload.WithDouble("VolumeProgress", m_volumeProgress); } if(m_sourceSnapshotIdHasBeenSet) { payload.WithString("SourceSnapshotId", m_sourceSnapshotId); } if(m_volumeiSCSIAttributesHasBeenSet) { payload.WithObject("VolumeiSCSIAttributes", m_volumeiSCSIAttributes.Jsonize()); } if(m_createdDateHasBeenSet) { payload.WithDouble("CreatedDate", m_createdDate.SecondsWithMSPrecision()); } if(m_volumeUsedInBytesHasBeenSet) { payload.WithInt64("VolumeUsedInBytes", m_volumeUsedInBytes); } if(m_kMSKeyHasBeenSet) { payload.WithString("KMSKey", m_kMSKey); } if(m_targetNameHasBeenSet) { payload.WithString("TargetName", m_targetName); } return payload; } } // namespace Model } // namespace StorageGateway } // namespace Aws
{ "pile_set_name": "Github" }
{ "version": 3, "file": "highlight.css", "sources": [], "sourcesContent": [], "names": [], "mappings": "" }
{ "pile_set_name": "Github" }
// // Progress bars // -------------------------------------------------- // Bar animations // ------------------------- // WebKit // @-webkit-keyframes progress-bar-stripes { // from { background-position: 40px 0; } // to { background-position: 0 0; } // } // // // Spec and IE10+ // @keyframes progress-bar-stripes { // from { background-position: 40px 0; } // to { background-position: 0 0; } // } // Bar itself // ------------------------- // Outer container // .progress { // overflow: hidden; // height: $line-height-computed; // margin-bottom: $line-height-computed; // background-color: $progress-bg; // border-radius: $progress-border-radius; // @include box-shadow(inset 0 1px 2px rgba(0,0,0,.1)); // } // Bar of progress // .progress-bar { // float: left; // width: 0%; // height: 100%; // font-size: $font-size-small; // line-height: $line-height-computed; // color: $progress-bar-color; // text-align: center; // background-color: $progress-bar-bg; // @include box-shadow(inset 0 -1px 0 rgba(0,0,0,.15)); // @include transition(width .6s ease); // } // Striped bars // // `.progress-striped .progress-bar` is deprecated as of v3.2.0 in favor of the // `.progress-bar-striped` class, which you just add to an existing // `.progress-bar`. // .progress-striped .progress-bar, // .progress-bar-striped { // @include gradient-striped; // background-size: 40px 40px; // } // Call animation for the active one // // `.progress.active .progress-bar` is deprecated as of v3.2.0 in favor of the // `.progress-bar.active` approach. // .progress.active .progress-bar, // .progress-bar.active { // @include animation(progress-bar-stripes 2s linear infinite); // } // Variations // ------------------------- .progress-bar-primary { @include progress-bar-variant($progress-bar-bg); } .progress-bar-success { @include progress-bar-variant($progress-bar-success-bg); } .progress-bar-info { @include progress-bar-variant($progress-bar-info-bg); } .progress-bar-warning { @include progress-bar-variant($progress-bar-warning-bg); } .progress-bar-danger { @include progress-bar-variant($progress-bar-danger-bg); }
{ "pile_set_name": "Github" }
module Agda.TypeChecking.Empty ( isEmptyType , isEmptyTel , ensureEmptyType , checkEmptyTel ) where import Control.Monad.Except import Data.Semigroup import qualified Data.Set as Set import Agda.Syntax.Common import Agda.Syntax.Internal import Agda.Syntax.Internal.MetaVars import Agda.Syntax.Position import Agda.TypeChecking.Monad import Agda.TypeChecking.Coverage import Agda.TypeChecking.Coverage.Match ( fromSplitPatterns ) import Agda.TypeChecking.Records import Agda.TypeChecking.Substitute import Agda.TypeChecking.Telescope import Agda.Utils.Either import Agda.Utils.Monad data ErrorNonEmpty = Fail -- ^ Generic failure | FailBecause TCErr -- ^ Failure with informative error | DontKnow Blocker -- ^ Emptyness check blocked instance Semigroup ErrorNonEmpty where DontKnow u1 <> DontKnow u2 = DontKnow $ u1 <> u2 -- Both must unblock for this to proceed e@DontKnow{} <> _ = e _ <> e@DontKnow{} = e FailBecause err <> _ = FailBecause err Fail <> err = err instance Monoid ErrorNonEmpty where mempty = Fail mappend = (Data.Semigroup.<>) -- | Ensure that a type is empty. -- This check may be postponed as emptiness constraint. ensureEmptyType :: Range -- ^ Range of the absurd pattern. -> Type -- ^ Type that should be empty (empty data type or iterated product of such). -> TCM () ensureEmptyType r t = caseEitherM (checkEmptyType r t) failure return where failure (DontKnow u) = addConstraint u $ IsEmpty r t failure (FailBecause err) = throwError err failure Fail = typeError $ ShouldBeEmpty t [] -- | Check whether a type is empty. isEmptyType :: Type -> TCM Bool isEmptyType ty = isRight <$> checkEmptyType noRange ty -- | Check whether some type in a telescope is empty. isEmptyTel :: Telescope -> TCM Bool isEmptyTel tel = isRight <$> checkEmptyTel noRange tel -- Either the type is possibly non-empty (Left err) or it is really empty -- (Right ()). checkEmptyType :: Range -> Type -> TCM (Either ErrorNonEmpty ()) checkEmptyType range t = do mr <- tryRecordType t case mr of -- If t is blocked or a meta, we cannot decide emptiness now. Postpone. Left (Blocked b t) -> return $ Left (DontKnow b) -- If t is not a record type, try to split Left (NotBlocked nb t) -> do -- from the current context xs:ts, create a pattern list -- xs _ : ts t and try to split on _ (the last variable) tel0 <- getContextTelescope let gamma = telToList tel0 ++ [domFromArg $ defaultArg (underscore, t)] tel = telFromList gamma ps = teleNamedArgs tel dontAssignMetas $ do r <- splitLast Inductive tel ps case r of Left UnificationStuck{} -> return $ Left $ DontKnow $ unblockOnAnyMetaIn tel Left _ -> return $ Left Fail Right cov -> do let ps = map (namedArg . last . fromSplitPatterns . scPats) $ splitClauses cov if (null ps) then return (Right ()) else Left . FailBecause <$> do typeError_ $ ShouldBeEmpty t ps -- If t is a record type, see if any of the field types is empty Right (r, pars, def) -> do if | NoEta{} <- recEtaEquality def -> return $ Left Fail | otherwise -> void <$> do checkEmptyTel range $ recTel def `apply` pars -- | Check whether one of the types in the given telescope is constructor-less -- and if yes, return its index in the telescope (0 = leftmost). checkEmptyTel :: Range -> Telescope -> TCM (Either ErrorNonEmpty Int) checkEmptyTel r = loop 0 where loop i EmptyTel = return $ Left Fail loop i (ExtendTel dom tel) = orEitherM [ (i <$) <$> checkEmptyType r (unDom dom) , underAbstraction dom tel $ loop (succ i) ]
{ "pile_set_name": "Github" }
// +build ignore /* * MinIO Cloud Storage, (C) 2019 MinIO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package main import ( "context" "encoding/json" "log" "github.com/minio/minio/pkg/madmin" ) func main() { // Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY are // dummy values, please replace them with original values. // API requests are secure (HTTPS) if secure=true and insecure (HTTP) otherwise. // New returns an MinIO Admin client object. madmClnt, err := madmin.New("your-minio.example.com:9000", "YOUR-ACCESSKEYID", "YOUR-SECRETACCESSKEY", true) if err != nil { log.Fatalln(err) } locks, err := madmClnt.TopLocks(context.Background()) if err != nil { log.Fatalf("failed due to: %v", err) } out, err := json.Marshal(locks) if err != nil { log.Fatalf("Marshal failed due to: %v", err) } log.Println("Top Locks received successfully: ", string(out)) }
{ "pile_set_name": "Github" }
-----BEGIN RSA PRIVATE KEY----- MIICXQIBAAKBgQDRjVXfw8Uf5iLB2v2JxG9nDvN0B41cvaRsxx/Y8ue9woBLT4qQh TR26LrQWu18RjY1iEknwhxiE38c5MDLbV2nEBDRKv/y8plHEVrmp9uYpohF171cqm G1xKxddeiOgswxR/XAcHxDHMY+w2XJ590QTPhXovUGQVB25F6hZF2MdwIDAQABAoG BAKuErcdLrCuh6M0Xe/KpsC2AlVBhZhmEt/K5QZ4HpBsz+mKqJlaIURsK/u3Pnxbb OT+z330Fasnz/1Uc/5kCWWYLSJyaDOy739ah7uayAbgN2nSBfVOAlAfZ3zy+HhhkB jlzTb2Db6SGyqW644tT3w56PIOpPLkdU83zh6x3NJKBAkEA8YTpMutLXWhSB7mSjw J4TsrAwFOu+3rhgqiZ0s6YSunzTvO0vD37wOtm7D9kqGikfd5HrOcsk3uyjIlwW9A sYQJBAN4dwpkhtUaT7xUw7Sl7kX0FC+436rjveYG69qDYSCjUKBHY6dfb2nXoDPMe nZCS58oMQthvVEpv6TLWzfu1p9cCQDClJJMs8TEPIfaMWgiT5sd37vrN4CCmz2URs GnVUrdEB82PKIgBtANXZpOO8Sc6bGctriIVLdVGxN+pjwfvFgECQQCLE/9heyXSx5 aQok4eB22V5q8Nvp2OpwRiIvuCw2qLhiyA4F2r99HWujJBmQ34FMRmUz8X3r3czi4 q0y2vSpOJAkBzJw76+FJqcXeA3tCzKPTZN13yqhf4nIMYUm7z8TBZLVFqx0XGhjaj NlonyIHW+DOE8gino4ZumCf9goAHPW9x -----END RSA PRIVATE KEY-----
{ "pile_set_name": "Github" }
/* Copyright (c) 2003-2018, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license */ CKEDITOR.dialog.add("placeholder",function(a){var b=a.lang.placeholder;a=a.lang.common.generalTab;return{title:b.title,minWidth:300,minHeight:80,contents:[{id:"info",label:a,title:a,elements:[{id:"name",type:"text",style:"width: 100%;",label:b.name,"default":"",required:!0,validate:CKEDITOR.dialog.validate.regex(/^[^\[\]<>]+$/,b.invalidName),setup:function(a){this.setValue(a.data.name)},commit:function(a){a.setData("name",this.getValue())}}]}]}});
{ "pile_set_name": "Github" }
import json from django.contrib import messages from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import get_object_or_404, redirect, render, reverse from django.urls import reverse from django.utils import timezone from libvirt import libvirtError from accounts.models import UserInstance from admin.decorators import superuser_only from computes.forms import (SocketComputeForm, SshComputeForm, TcpComputeForm, TlsComputeForm) from computes.models import Compute from instances.models import Instance from vrtManager.connection import (CONN_SOCKET, CONN_SSH, CONN_TCP, CONN_TLS, connection_manager, wvmConnect) from vrtManager.hostdetails import wvmHostDetails from . import utils @superuser_only def computes(request): """ :param request: :return: """ computes = Compute.objects.filter().order_by('name') return render(request, 'computes/list.html', {'computes': computes}) @superuser_only def overview(request, compute_id): compute = get_object_or_404(Compute, pk=compute_id) status = 'true' if connection_manager.host_is_up(compute.type, compute.hostname) is True else 'false' conn = wvmHostDetails( compute.hostname, compute.login, compute.password, compute.type, ) hostname, host_arch, host_memory, logical_cpu, model_cpu, uri_conn = conn.get_node_info() hypervisor = conn.get_hypervisors_domain_types() mem_usage = conn.get_memory_usage() emulator = conn.get_emulator(host_arch) version = conn.get_version() lib_version = conn.get_lib_version() conn.close() return render(request, 'overview.html', locals()) @superuser_only def instances(request, compute_id): compute = get_object_or_404(Compute, pk=compute_id) utils.refresh_instance_database(compute) instances = Instance.objects.filter(compute=compute).prefetch_related('userinstance_set') return render(request, 'computes/instances.html', {'compute': compute, 'instances': instances}) @superuser_only def compute_create(request, FormClass): form = FormClass(request.POST or None) if form.is_valid(): form.save() return redirect(reverse('computes')) return render(request, 'computes/form.html', {'form': form}) @superuser_only def compute_update(request, compute_id): compute = get_object_or_404(Compute, pk=compute_id) if compute.type == 1: FormClass = TcpComputeForm elif compute.type == 2: FormClass = SshComputeForm elif compute.type == 3: FormClass = TlsComputeForm elif compute.type == 4: FormClass = SocketComputeForm form = FormClass(request.POST or None, instance=compute) if form.is_valid(): form.save() return redirect(reverse('computes')) return render(request, 'computes/form.html', {'form': form}) @superuser_only def compute_delete(request, compute_id): compute = get_object_or_404(Compute, pk=compute_id) if request.method == 'POST': compute.delete() return redirect('computes') return render( request, 'common/confirm_delete.html', {'object': compute}, ) def compute_graph(request, compute_id): """ :param request: :param compute_id: :return: """ compute = get_object_or_404(Compute, pk=compute_id) try: conn = wvmHostDetails( compute.hostname, compute.login, compute.password, compute.type, ) current_time = timezone.now().strftime("%H:%M:%S") cpu_usage = conn.get_cpu_usage() mem_usage = conn.get_memory_usage() conn.close() except libvirtError: cpu_usage = {'usage': 0} mem_usage = {'usage': 0} current_time = 0 data = json.dumps({ 'cpudata': cpu_usage['usage'], 'memdata': mem_usage, 'timeline': current_time, }) response = HttpResponse() response['Content-Type'] = "text/javascript" response.write(data) return response def get_compute_disk_buses(request, compute_id, arch, machine, disk): """ :param request: :param compute_id: :param arch: :param machine: :param disk: :return: """ data = dict() compute = get_object_or_404(Compute, pk=compute_id) try: conn = wvmConnect( compute.hostname, compute.login, compute.password, compute.type, ) disk_device_types = conn.get_disk_device_types(arch, machine) if disk in disk_device_types: if disk == 'disk': data['bus'] = sorted(disk_device_types) elif disk == 'cdrom': data['bus'] = ['ide', 'sata', 'scsi'] elif disk == 'floppy': data['bus'] = ['fdc'] elif disk == 'lun': data['bus'] = ['scsi'] except libvirtError: pass return HttpResponse(json.dumps(data)) def get_compute_machine_types(request, compute_id, arch): """ :param request: :param compute_id: :param arch: :return: """ data = dict() try: compute = get_object_or_404(Compute, pk=compute_id) conn = wvmConnect( compute.hostname, compute.login, compute.password, compute.type, ) data['machines'] = conn.get_machine_types(arch) except libvirtError: pass return HttpResponse(json.dumps(data)) def get_compute_video_models(request, compute_id, arch, machine): """ :param request: :param compute_id: :param arch: :param machine: :return: """ data = dict() try: compute = get_object_or_404(Compute, pk=compute_id) conn = wvmConnect( compute.hostname, compute.login, compute.password, compute.type, ) data['videos'] = conn.get_video_models(arch, machine) except libvirtError: pass return HttpResponse(json.dumps(data)) def get_dom_capabilities(request, compute_id, arch, machine): """ :param request: :param compute_id: :param arch: :param machine: :return: """ data = dict() try: compute = get_object_or_404(Compute, pk=compute_id) conn = wvmConnect( compute.hostname, compute.login, compute.password, compute.type, ) data['videos'] = conn.get_disk_device_types(arch, machine) data['bus'] = conn.get_disk_device_types(arch, machine) except libvirtError: pass return HttpResponse(json.dumps(data))
{ "pile_set_name": "Github" }
CONFIG_MAIN_PROJ_NAME="linux-2.6.22-arm32" CONFIG_ARM=y CONFIG_ARCH_MAGIC="2" CONFIG_COMMON_UTILISE=y CONFIG_PK_BUSYBOX=y CONFIG_BUSYBOX_WGET=y CONFIG_BUSYBOX_VERSION="1.13.3" CONFIG_BUSYBOX_SRC=3 CONFIG_PK_QEMU_SYSTEM=y CONFIG_QEMU_SYSTEM_WGET=y CONFIG_QEMU_SYSTEM_VERSION="3.1.0" CONFIG_QEMU_SYSTEM_SRC=3 CONFIG_ROOTFS=y CONFIG_FS_EXT4=y CONFIG_ROOTFS_SIZE=150 CONFIG_FREEZE_SIZE=512 CONFIG_TOOLCHAIN=y CONFIG_TOOL_ARM_LINUX_GCC341=y CONFIG_LINUX_KERNEL=y CONFIG_LINUX_NEWEST=y CONFIG_LINUX_KERNEL_VERSION="2.6.22" CONFIG_LINUX_BUILD_HIST="Newest" CONFIG_LINUX_KERNEL_WGET=y CONFIG_LINUX_KERNEL_WGET_SITE="http://ftp.sjtu.edu.cn/sites/ftp.kernel.org/pub/linux/kernel/v2.6/" CONFIG_LINUX_KERNEL_SRC=3 CONFIG_LINUX_KERNEL_TAR_TYPE="tar.xz" CONFIG_LINUX_KERNEL_CROSS_GUNEABI=y CONFIG_LINUX_KERNEL_CROSS_COMPILE="arm-linux-gnueabi"
{ "pile_set_name": "Github" }
/* * Copyright 2019 TNG Technology Consulting GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tngtech.junit.dataprovider; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import com.tngtech.junit.dataprovider.resolver.DataProviderMethodResolver; import com.tngtech.junit.dataprovider.resolver.DataProviderResolverContext; import com.tngtech.junit.dataprovider.resolver.DefaultDataProviderMethodResolver; import com.tngtech.junit.dataprovider.resolver.ResolveStrategy; /** * Annotate a test method for using it with a dataprovider. */ @Documented @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.ANNOTATION_TYPE, ElementType.METHOD }) public @interface UseDataProvider { /** * A value to derive the dataprovider method from. In which way depends on the given {@link #resolver()}. Defaults * to {@link DataProviderResolverContext#METHOD_NAME_TO_USE_CONVENTION}. * * @return a value from which the dataprovider method can be derived */ String value() default DataProviderResolverContext.METHOD_NAME_TO_USE_CONVENTION; /** * One or multiple locations where the {@link DataProviderMethodResolver} can look out for a proper dataprovider method. It depends on * the provided {@link #resolver()} how this is used. * * @return a array of {@link Class}es which could be used to derive the dataprovider method */ Class<?>[] location() default {}; /** * The resolvers used to derive the dataprovider method from. It is tried until the first resolver returns a proper dataprovider method * (= not {@code null}) or no more resolvers are available. * * @return the resolver which are used to derive the dataprovider method */ Class<? extends DataProviderMethodResolver>[] resolver() default { DefaultDataProviderMethodResolver.class }; /** * @return strategy how to resolve the dataprovider methods which corresponds to the test method where this annotation is applied. * @see ResolveStrategy */ ResolveStrategy resolveStrategy() default ResolveStrategy.UNTIL_FIRST_MATCH; }
{ "pile_set_name": "Github" }
<?php /* * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the MIT license. For more information, see * <http://www.doctrine-project.org>. */ namespace Doctrine\DBAL\Driver\OCI8; use Doctrine\DBAL\Driver\Connection; use Doctrine\DBAL\Driver\ServerInfoAwareConnection; use Doctrine\DBAL\Platforms\OraclePlatform; /** * OCI8 implementation of the Connection interface. * * @since 2.0 */ class OCI8Connection implements Connection, ServerInfoAwareConnection { /** * @var resource */ protected $dbh; /** * @var integer */ protected $executeMode = OCI_COMMIT_ON_SUCCESS; /** * Creates a Connection to an Oracle Database using oci8 extension. * * @param string $username * @param string $password * @param string $db * @param string|null $charset * @param integer $sessionMode * @param boolean $persistent * * @throws OCI8Exception */ public function __construct($username, $password, $db, $charset = null, $sessionMode = OCI_DEFAULT, $persistent = false) { if (!defined('OCI_NO_AUTO_COMMIT')) { define('OCI_NO_AUTO_COMMIT', 0); } $this->dbh = $persistent ? @oci_pconnect($username, $password, $db, $charset, $sessionMode) : @oci_connect($username, $password, $db, $charset, $sessionMode); if ( ! $this->dbh) { throw OCI8Exception::fromErrorInfo(oci_error()); } } /** * {@inheritdoc} * * @throws \UnexpectedValueException if the version string returned by the database server * does not contain a parsable version number. */ public function getServerVersion() { if ( ! preg_match('/\s+(\d+\.\d+\.\d+\.\d+\.\d+)\s+/', oci_server_version($this->dbh), $version)) { throw new \UnexpectedValueException( sprintf( 'Unexpected database version string "%s". Cannot parse an appropriate version number from it. ' . 'Please report this database version string to the Doctrine team.', oci_server_version($this->dbh) ) ); } return $version[1]; } /** * {@inheritdoc} */ public function requiresQueryForServerVersion() { return false; } /** * {@inheritdoc} */ public function prepare($prepareString) { return new OCI8Statement($this->dbh, $prepareString, $this); } /** * {@inheritdoc} */ public function query() { $args = func_get_args(); $sql = $args[0]; //$fetchMode = $args[1]; $stmt = $this->prepare($sql); $stmt->execute(); return $stmt; } /** * {@inheritdoc} */ public function quote($value, $type=\PDO::PARAM_STR) { if (is_int($value) || is_float($value)) { return $value; } $value = str_replace("'", "''", $value); return "'" . addcslashes($value, "\000\n\r\\\032") . "'"; } /** * {@inheritdoc} */ public function exec($statement) { $stmt = $this->prepare($statement); $stmt->execute(); return $stmt->rowCount(); } /** * {@inheritdoc} */ public function lastInsertId($name = null) { if ($name === null) { return false; } OraclePlatform::assertValidIdentifier($name); $sql = 'SELECT ' . $name . '.CURRVAL FROM DUAL'; $stmt = $this->query($sql); $result = $stmt->fetch(\PDO::FETCH_ASSOC); if ($result === false || !isset($result['CURRVAL'])) { throw new OCI8Exception("lastInsertId failed: Query was executed but no result was returned."); } return (int) $result['CURRVAL']; } /** * Returns the current execution mode. * * @return integer */ public function getExecuteMode() { return $this->executeMode; } /** * {@inheritdoc} */ public function beginTransaction() { $this->executeMode = OCI_NO_AUTO_COMMIT; return true; } /** * {@inheritdoc} */ public function commit() { if (!oci_commit($this->dbh)) { throw OCI8Exception::fromErrorInfo($this->errorInfo()); } $this->executeMode = OCI_COMMIT_ON_SUCCESS; return true; } /** * {@inheritdoc} */ public function rollBack() { if (!oci_rollback($this->dbh)) { throw OCI8Exception::fromErrorInfo($this->errorInfo()); } $this->executeMode = OCI_COMMIT_ON_SUCCESS; return true; } /** * {@inheritdoc} */ public function errorCode() { $error = oci_error($this->dbh); if ($error !== false) { $error = $error['code']; } return $error; } /** * {@inheritdoc} */ public function errorInfo() { return oci_error($this->dbh); } }
{ "pile_set_name": "Github" }
package com.ufreedom.demo; import android.graphics.Color; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import com.ufreedom.uikit.FloatingText; import com.ufreedom.uikit.effect.CurveFloatingPathEffect; import com.ufreedom.uikit.effect.CurvePathFloatingAnimator; import com.ufreedom.uikit.effect.ScaleFloatingAnimator; public class MainActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); final View layoutTranslateFloating = findViewById(R.id.layoutTranslateView); final View translateFloatingView = findViewById(R.id.translateView); final FloatingText translateFloatingText = new FloatingText.FloatingTextBuilder(MainActivity.this) .textColor(Color.RED) .textSize(100) .textContent("+1000") .build(); translateFloatingText.attach2Window(); assert layoutTranslateFloating != null; layoutTranslateFloating.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { translateFloatingText.startFloating(translateFloatingView); } }); final FloatingText cubicFloatingText = new FloatingText.FloatingTextBuilder(MainActivity.this) .textColor(Color.RED) .textSize(100) .floatingAnimatorEffect(new CurvePathFloatingAnimator()) .floatingPathEffect(new CurveFloatingPathEffect()) .textContent("Hello! ").build(); cubicFloatingText.attach2Window(); View layoutCurveView = findViewById(R.id.layoutCurveView); final View curveView = findViewById(R.id.curveView); assert curveView != null; assert layoutCurveView != null; layoutCurveView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { cubicFloatingText.startFloating(curveView); } }); View layoutScaleView = findViewById(R.id.layoutScaleView); final View scaleView = findViewById(R.id.scaleView); final FloatingText scaleFloatingText = new FloatingText.FloatingTextBuilder(MainActivity.this) .textColor(Color.parseColor("#7ED321")) .textSize(100) .offsetY(-100) .floatingAnimatorEffect(new ScaleFloatingAnimator()) .textContent("+188").build(); scaleFloatingText.attach2Window(); assert scaleView != null; assert layoutScaleView != null; layoutScaleView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { scaleFloatingText.startFloating(scaleView); } }); } }
{ "pile_set_name": "Github" }
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #if GOOGLE_CUDA || TENSORFLOW_USE_ROCM #include "tensorflow/core/kernels/cwise_ops_gpu_common.cu.h" namespace tensorflow { namespace functor { DEFINE_BINARY7(greater, Eigen::half, float, double, int64, uint8, int8, int16); } // namespace functor } // namespace tensorflow #endif // GOOGLE_CUDA || TENSORFLOW_USE_ROCM
{ "pile_set_name": "Github" }