text
stringlengths
2
1.04M
meta
dict
package org.apache.flink.runtime.state; import org.apache.flink.util.ExceptionUtils; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import java.util.Set; import java.util.UUID; /** * State handle for local copies of {@link IncrementalRemoteKeyedStateHandle}. Consists of a {@link * DirectoryStateHandle} that represents the directory of the native RocksDB snapshot, the key * groups, and a stream state handle for Flink's state meta data file. */ public class IncrementalLocalKeyedStateHandle extends DirectoryKeyedStateHandle implements IncrementalKeyedStateHandle { private static final long serialVersionUID = 1L; /** Id of the checkpoint that created this state handle. */ @Nonnegative private final long checkpointId; /** UUID to identify the backend which created this state handle. */ @Nonnull private final UUID backendIdentifier; /** Handle to Flink's state meta data. */ @Nonnull private final StreamStateHandle metaDataState; /** Set with the ids of all shared state handles created by the checkpoint. */ @Nonnull private final Set<StateHandleID> sharedStateHandleIDs; public IncrementalLocalKeyedStateHandle( @Nonnull UUID backendIdentifier, @Nonnegative long checkpointId, @Nonnull DirectoryStateHandle directoryStateHandle, @Nonnull KeyGroupRange keyGroupRange, @Nonnull StreamStateHandle metaDataState, @Nonnull Set<StateHandleID> sharedStateHandleIDs) { super(directoryStateHandle, keyGroupRange); this.backendIdentifier = backendIdentifier; this.checkpointId = checkpointId; this.metaDataState = metaDataState; this.sharedStateHandleIDs = sharedStateHandleIDs; } @Nonnull public StreamStateHandle getMetaDataState() { return metaDataState; } @Override public long getCheckpointId() { return checkpointId; } @Override @Nonnull public UUID getBackendIdentifier() { return backendIdentifier; } @Override @Nonnull public Set<StateHandleID> getSharedStateHandleIDs() { return sharedStateHandleIDs; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } IncrementalLocalKeyedStateHandle that = (IncrementalLocalKeyedStateHandle) o; return getMetaDataState().equals(that.getMetaDataState()); } @Override public void discardState() throws Exception { Exception collectedEx = null; try { super.discardState(); } catch (Exception e) { collectedEx = e; } try { metaDataState.discardState(); } catch (Exception e) { collectedEx = ExceptionUtils.firstOrSuppressed(e, collectedEx); } if (collectedEx != null) { throw collectedEx; } } @Override public long getStateSize() { return super.getStateSize() + metaDataState.getStateSize(); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + getMetaDataState().hashCode(); return result; } @Override public String toString() { return "IncrementalLocalKeyedStateHandle{" + "metaDataState=" + metaDataState + "} " + super.toString(); } }
{ "content_hash": "47877267d74dce4ed37daece041e1bbd", "timestamp": "", "source": "github", "line_count": 131, "max_line_length": 99, "avg_line_length": 27.85496183206107, "alnum_prop": 0.6426418196766237, "repo_name": "rmetzger/flink", "id": "615e34800f63e4c0dfb79e7c21b270cb89cdbf53", "size": "4450", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/state/IncrementalLocalKeyedStateHandle.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1863" }, { "name": "C", "bytes": "847" }, { "name": "Clojure", "bytes": "93543" }, { "name": "Dockerfile", "bytes": "6926" }, { "name": "FreeMarker", "bytes": "82636" }, { "name": "GAP", "bytes": "139514" }, { "name": "HTML", "bytes": "135607" }, { "name": "HiveQL", "bytes": "71445" }, { "name": "Java", "bytes": "83684824" }, { "name": "JavaScript", "bytes": "1829" }, { "name": "Less", "bytes": "65918" }, { "name": "Makefile", "bytes": "5134" }, { "name": "Python", "bytes": "2468561" }, { "name": "Scala", "bytes": "15030324" }, { "name": "Shell", "bytes": "540331" }, { "name": "TypeScript", "bytes": "288463" }, { "name": "q", "bytes": "7939" } ], "symlink_target": "" }
using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Data.Entity; using System.Linq; using AutoMapper.QueryableExtensions; using AutoMapper.UnitTests; using Xunit; using Shouldly; using System.Diagnostics; namespace AutoMapper.IntegrationTests.Net4 { public class ExpandCollections : AutoMapperSpecBase { TrainingCourseDto _course; protected override MapperConfiguration Configuration => new MapperConfiguration(cfg => { cfg.CreateMap<Category, CategoryDto>(); cfg.CreateMap<TrainingCourse, TrainingCourseDto>(); cfg.CreateMap<TrainingContent, TrainingContentDto>().ForMember(c => c.Category, o => o.ExplicitExpansion()); }); protected override void Because_of() { using(var context = new ClientContext()) { context.Database.Log = s => Trace.WriteLine(s); _course = context.TrainingCourses.ProjectTo<TrainingCourseDto>(Configuration, c => c.Content.Select(co => co.Category)).FirstOrDefault(n => n.CourseName == "Course 1"); } } [Fact] public void Should_expand_collections_items() { _course.Content[0].Category.CategoryName.ShouldBe("Category 1"); } class Initializer : DropCreateDatabaseAlways<ClientContext> { protected override void Seed(ClientContext context) { var category = new Category { CategoryName = "Category 1" }; var course = new TrainingCourse { CourseName = "Course 1" }; context.TrainingCourses.Add(course); var content = new TrainingContent { ContentName = "Content 1", Category = category }; context.TrainingContents.Add(content); course.Content.Add(content); } } class ClientContext : DbContext { public ClientContext() { Database.SetInitializer(new Initializer()); } public DbSet<Category> Categories { get; set; } public DbSet<TrainingCourse> TrainingCourses { get; set; } public DbSet<TrainingContent> TrainingContents { get; set; } } public class TrainingCourse { [Key] public int CourseId { get; set; } public string CourseName { get; set; } public virtual IList<TrainingContent> Content { get; set; } = new List<TrainingContent>(); } public class TrainingContent { [Key] public int ContentId { get; set; } public string ContentName { get; set; } public Category Category { get; set; } } public class Category { public int CategoryId { get; set; } public string CategoryName { get; set; } } public class TrainingCourseDto { public int CourseId { get; set; } public string CourseName { get; set; } public virtual IList<TrainingContentDto> Content { get; set; } } public class CategoryDto { public int CategoryId { get; set; } public string CategoryName { get; set; } } public class TrainingContentDto { public int ContentId { get; set; } public string ContentName { get; set; } public CategoryDto Category { get; set; } } } }
{ "content_hash": "4fd4e5ac54aac5c08e46a650c604ecac", "timestamp": "", "source": "github", "line_count": 115, "max_line_length": 184, "avg_line_length": 30.921739130434784, "alnum_prop": 0.578177727784027, "repo_name": "mjalil/AutoMapper", "id": "5ed3fc2997cc8e55f2f2c2e397e4895314e828ac", "size": "3558", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/IntegrationTests/ExplicitExpansion/ExpandCollections.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "484" }, { "name": "C#", "bytes": "1856197" }, { "name": "PowerShell", "bytes": "37732" } ], "symlink_target": "" }
{% if include.frame == false %} {% assign frame = false %} {% else %} {% assign frame = true %} {% endif %} {% assign fileName = page.path | split: '/' | last | split: '.' | first %} {% assign fileNameDate = fileName | truncate: 14 | replace: '...', '' %} {% assign postSlug = fileName | replace_first: fileNameDate, '' %} {% assign assetPath = postSlug | prepend: '/assets/posts/' | append: '/' | append: include.name %} {% assign extension = assetPath | split: '.' | last %} <figure class="figure {% if include.caption %}figure--caption{% endif %} {% if frame %}figure--frame{% endif %}"> <div class="figure__inner"> {% if extension == 'mov' %} <video autoplay loop> <source src="{{ assetPath }}" type="video/mp4"> Your browser does not support the video tag. </video> {% else %} <img class="figure__image" onload="this.parentElement.classList += ' figure__inner--loaded'" src="{{ assetPath }}" {% if include.alt %} alt="{{ include.alt }}" {% endif %} {% if include.width %} width="{{ include.width }}" {% endif %} /> {% endif %} {% if include.caption %} <figcaption class="figure__caption">{{ include.caption }}</figcaption> {% endif %} </div> </figure>
{ "content_hash": "1b2bbcad147d7ec796e0476acf0e8931", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 113, "avg_line_length": 37.11764705882353, "alnum_prop": 0.5689381933438986, "repo_name": "x0v/x0v.github.io", "id": "70cbbf87c9401caece8c3ff0548af624a4683f8d", "size": "1262", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_includes/helpers/image.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "235478" }, { "name": "HTML", "bytes": "37248" }, { "name": "JavaScript", "bytes": "72282" }, { "name": "Ruby", "bytes": "2747" } ], "symlink_target": "" }
import React from 'react' import Dialog from 'material-ui/lib/dialog' import FlatButton from 'material-ui/lib/flat-button' import bindMethods from '../../utils/bindMethods' /** * Usage: * * <DeleteDialog ref="deleteDialog" /> * * this.refs.deleteDialog.show({ * title: 'Delete scenario', * description: <span>Are you sure you want to delete <b>{title}</b>?</span> * }) * .then(doDelete => { * console.log('doDelete?', doDelete) // doDelete is true or false * }) * * this.refs.deleteDialog.hide() * */ export default class DeleteDialog extends React.Component { constructor (props) { super(props) bindMethods(this) this.state = { open: false, title: 'Delete', description: 'Are you sure?', handler: function () {} } } render () { const actions = [ <FlatButton label="Cancel" secondary={true} onTouchTap={this.hide} />, <FlatButton label="Delete" primary={true} keyboardFocused={true} onTouchTap={this._handleDeleteOk} /> ] return <Dialog title={this.state.title} actions={actions} modal={false} open={this.state.open} onRequestClose={this.hide} > <p> {this.state.description} </p> </Dialog> } show ({ title, description }) { return new Promise((resolve, reject) => { this.setState({ open: true, title: title || 'Title', description: description || 'Description', handler: resolve }) }) } hide () { this._handleDelete(false) } _handleDeleteOk () { this._handleDelete(true) } _handleDelete (doDelete) { this.state.handler(doDelete) this.setState({ open: false, handler: function () {} }) } }
{ "content_hash": "9182ae8e0066967f0a0bc903491f7bdc", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 82, "avg_line_length": 20.703296703296704, "alnum_prop": 0.5530785562632696, "repo_name": "vanpaz/vbi", "id": "b74a3a042ba79c9c691bcca2a5a94b3bc50df310", "size": "1884", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/client/components/dialogs/DeleteDialog.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "10305" }, { "name": "HTML", "bytes": "16256" }, { "name": "JavaScript", "bytes": "236979" } ], "symlink_target": "" }
DIR="$(cd "$(dirname "$0")" && pwd)" #Copy dotfiles cd files for f in * do ln -snf $DIR/files/$f ~/.$f done cd .. #Initialize submodules git submodule update --init --recursive ./gnome-terminal-colors-solarized/install.sh ./vim/bootstrap.sh
{ "content_hash": "4bccfe6567ece2fcab8899af1f76a937", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 44, "avg_line_length": 16.533333333333335, "alnum_prop": 0.6693548387096774, "repo_name": "adibsaad/dotfiles", "id": "282d1a70635f051c824f8f6851f3867cf80fd85d", "size": "260", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bootstrap.sh", "mode": "33261", "license": "mit", "language": [ { "name": "Shell", "bytes": "7297" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <chapter version="5.0" xsi:schemaLocation="http://docbook.org/ns/docbook http://www.docbook.org/xml/5.0/xsd/docbook.xsd http://www.w3.org/1999/xlink http://www.docbook.org/xml/5.0/xsd/xlink.xsd" xml:base="../" xml:id="welcome" xmlns="http://docbook.org/ns/docbook" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:ns="http://docbook.org/ns/docbook"> <title>Welcome</title> <para>I've always stated that end business users struggle understanding the differences between rules and processes, and more recently rules and event processing. For them they have this problem in their mind and they just want to model it using some software. The traditional way of using two vendor offerings forces the business user to work with a process oriented or rules oriented approach which just gets in the way, often with great confusion over which tool they should be using to model which bit.</para> <para>PegaSystems and Microsoft have done a great job of showing that the two can be combined and a behavioural modelling approach can be used. This allows the business user to work more naturally where the full range of approaches is available to them, without the tools getting in the way. From being process oriented to rule oriented or shades of grey in the middle - whatever suites the problem being modelled at that time.</para> <para>Drools 5.0 takes this one step further by not only adding BPMN2 based workflow with Drools Flow but also adding event processing with Drools Fusion, creating a more holistic approach to software development. Where the term holistic is used for emphasizing the importance of the whole and the interdependence of its parts.</para> <para>Drools 5.0 is now split into 5 modules, each with their own manual - Guvnor (BRMS/BPMS), Expert (Rules), Fusion (CEP), Flow (Process/Workflow) and Planner. Guvnor is our web based governance system, traditionally referred to in the rules world as a BRMS. We decided to move away from the BRMS term to a play on governance as it's not rules specific. Expert is the traditional rules engine. Fusion is the event processing side, it's a play on data/sensor fusion terminology. Flow is our workflow module, Kris Verlaenen leads this and has done some amazing work; he's currently moving flow to be incorporated into jBPM 5. The fith module called Planner, authored by Geoffrey De Smet, solves allocation and scheduling type problem and while still in the early stage of development is showing a lot of promise. We hope to add Semantics for 2011, based around description logc, and that is being work on as part of the next generaion Drools designs.</para> <para>I've been working in the rules field now for around 7 years and I finally feel like I'm getting to grips with things and ideas are starting to gel and the real innovation is starting to happen. To me It feels like we actually know what we are doing now, compared to the past where there was a lot of wild guessing and exploration. I've been working hard on the next generation Drools Expert design document with Edson Tirelli and Davide Sottara. I invite you to read the document and get involved, <link xlink:href="http://community.jboss.org/wiki/DroolsLanguageEnhancements">http://community.jboss.org/wiki/DroolsLanguageEnhancements</link>. The document takes things to the next level pushing Drools forward as a hybrid engine, not just a capable production rule system, but also melding in logic programming (prolog) with functional programming and description logic along with a host of other ideas for a more expressive and modern feeling language.</para> <para>I hope you can feel the passion that my team and I have while working on Drools, and that some of it rubs off on you during your adventures.</para> <mediaobject> <imageobject> <imagedata fileref="images/Chapter-Welcome/mark.png"></imagedata> </imageobject> </mediaobject> </chapter>
{ "content_hash": "4abb45b80cdba05ac042899f65248d5d", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 180, "avg_line_length": 59.67605633802817, "alnum_prop": 0.7554873731413736, "repo_name": "cyberdrcarr/droolsjbpm-knowledge", "id": "8f0ff96968069931207fa64089512990f5729975", "size": "4237", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "droolsjbpm-introduction-docs/src/main/docbook/en-US/Chapter-Welcome/Chapter-Welcome.xml", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
import { IQService } from 'angular'; import { IEcsLoadBalancerSourceData, IEcsLoadBalancer, IEcsTargetGroup } from '../domain/IEcsLoadBalancer'; export class EcsLoadBalancerTransformer { public static $inject = ['$q']; constructor(private $q: IQService) {} public normalizeLoadBalancer(loadBalancer: IEcsLoadBalancerSourceData): PromiseLike<IEcsLoadBalancer> { loadBalancer.targetGroups.forEach((tg: IEcsTargetGroup) => { tg.region = loadBalancer.region; tg.account = loadBalancer.account; tg.cloudProvider = loadBalancer.cloudProvider; if (loadBalancer.targetGroupServices) { const tgServiceMap = loadBalancer.targetGroupServices; tg.serverGroups = tgServiceMap[tg.targetGroupArn]; } }); return this.$q.resolve(loadBalancer); } }
{ "content_hash": "aad44402b92cc94eb38f5fd28b9a2327", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 107, "avg_line_length": 39.95, "alnum_prop": 0.7334167709637046, "repo_name": "ajordens/deck", "id": "0442b05eaac0f959ec8a40f07b2ab6346d39b6ba", "size": "799", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/scripts/modules/ecs/src/loadBalancer/loadBalancer.transformer.ts", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "174302" }, { "name": "Dockerfile", "bytes": "328" }, { "name": "HTML", "bytes": "1467416" }, { "name": "JavaScript", "bytes": "2086572" }, { "name": "Shell", "bytes": "16819" }, { "name": "TypeScript", "bytes": "4276533" } ], "symlink_target": "" }
package org.apache.hadoop.hdfs; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.FileNotFoundException; import java.io.IOException; import java.util.concurrent.TimeoutException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.test.GenericTestUtils; import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; /** * This class tests the FileStatus API. */ public class TestFileStatus { { GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL); GenericTestUtils.setLogLevel(FileSystem.LOG, Level.ALL); } static final long seed = 0xDEADBEEFL; static final int blockSize = 8192; static final int fileSize = 16384; private static Configuration conf; private static MiniDFSCluster cluster; private static FileSystem fs; private static FileContext fc; private static DFSClient dfsClient; private static Path file1; @BeforeClass public static void testSetUp() throws Exception { conf = new HdfsConfiguration(); conf.setInt(DFSConfigKeys.DFS_LIST_LIMIT, 2); cluster = new MiniDFSCluster.Builder(conf).build(); fs = cluster.getFileSystem(); fc = FileContext.getFileContext(cluster.getURI(0), conf); dfsClient = new DFSClient(DFSUtilClient.getNNAddress(conf), conf); file1 = new Path("filestatus.dat"); DFSTestUtil.createFile(fs, file1, fileSize, fileSize, blockSize, (short) 1, seed); } @AfterClass public static void testTearDown() throws Exception { if (fs != null) { fs.close(); } if (cluster != null) { cluster.shutdown(); } } private void checkFile(FileSystem fileSys, Path name, int repl) throws IOException, InterruptedException, TimeoutException { DFSTestUtil.waitReplication(fileSys, name, (short) repl); } /** Test calling getFileInfo directly on the client */ @Test public void testGetFileInfo() throws IOException { // Check that / exists Path path = new Path("/"); assertTrue("/ should be a directory", fs.getFileStatus(path).isDirectory()); ContractTestUtils.assertNotErasureCoded(fs, path); // Make sure getFileInfo returns null for files which do not exist HdfsFileStatus fileInfo = dfsClient.getFileInfo("/noSuchFile"); assertEquals("Non-existant file should result in null", null, fileInfo); Path path1 = new Path("/name1"); Path path2 = new Path("/name1/name2"); assertTrue(fs.mkdirs(path1)); FSDataOutputStream out = fs.create(path2, false); out.close(); fileInfo = dfsClient.getFileInfo(path1.toString()); assertEquals(1, fileInfo.getChildrenNum()); fileInfo = dfsClient.getFileInfo(path2.toString()); assertEquals(0, fileInfo.getChildrenNum()); // Test getFileInfo throws the right exception given a non-absolute path. try { dfsClient.getFileInfo("non-absolute"); fail("getFileInfo for a non-absolute path did not throw IOException"); } catch (RemoteException re) { assertTrue("Wrong exception for invalid file name: "+re, re.toString().contains("Absolute path required")); } } /** Test the FileStatus obtained calling getFileStatus on a file */ @Test public void testGetFileStatusOnFile() throws Exception { checkFile(fs, file1, 1); // test getFileStatus on a file FileStatus status = fs.getFileStatus(file1); assertFalse(file1 + " should be a file", status.isDirectory()); assertEquals(blockSize, status.getBlockSize()); assertEquals(1, status.getReplication()); assertEquals(fileSize, status.getLen()); ContractTestUtils.assertNotErasureCoded(fs, file1); assertEquals(file1.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString(), status.getPath().toString()); assertTrue(file1 + " should have erasure coding unset in " + "FileStatus#toString(): " + status, status.toString().contains("isErasureCoded=false")); } /** Test the FileStatus obtained calling listStatus on a file */ @Test public void testListStatusOnFile() throws IOException { FileStatus[] stats = fs.listStatus(file1); assertEquals(1, stats.length); FileStatus status = stats[0]; assertFalse(file1 + " should be a file", status.isDirectory()); assertEquals(blockSize, status.getBlockSize()); assertEquals(1, status.getReplication()); assertEquals(fileSize, status.getLen()); ContractTestUtils.assertNotErasureCoded(fs, file1); assertEquals(file1.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString(), status.getPath().toString()); RemoteIterator<FileStatus> itor = fc.listStatus(file1); status = itor.next(); assertEquals(stats[0], status); assertFalse(file1 + " should be a file", status.isDirectory()); } /** Test getting a FileStatus object using a non-existant path */ @Test public void testGetFileStatusOnNonExistantFileDir() throws IOException { Path dir = new Path("/test/mkdirs"); try { fs.listStatus(dir); fail("listStatus of non-existent path should fail"); } catch (FileNotFoundException fe) { assertEquals("File " + dir + " does not exist.",fe.getMessage()); } try { fc.listStatus(dir); fail("listStatus of non-existent path should fail"); } catch (FileNotFoundException fe) { assertEquals("File " + dir + " does not exist.", fe.getMessage()); } try { fs.getFileStatus(dir); fail("getFileStatus of non-existent path should fail"); } catch (FileNotFoundException fe) { assertTrue("Exception doesn't indicate non-existant path", fe.getMessage().startsWith("File does not exist")); } } /** Test FileStatus objects obtained from a directory */ @Test public void testGetFileStatusOnDir() throws Exception { // Create the directory Path dir = new Path("/test/mkdirs"); assertTrue("mkdir failed", fs.mkdirs(dir)); assertTrue("mkdir failed", fs.exists(dir)); // test getFileStatus on an empty directory FileStatus status = fs.getFileStatus(dir); assertTrue(dir + " should be a directory", status.isDirectory()); assertTrue(dir + " should be zero size ", status.getLen() == 0); ContractTestUtils.assertNotErasureCoded(fs, dir); assertEquals(dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString(), status.getPath().toString()); // test listStatus on an empty directory FileStatus[] stats = fs.listStatus(dir); assertEquals(dir + " should be empty", 0, stats.length); assertEquals(dir + " should be zero size ", 0, fs.getContentSummary(dir).getLength()); RemoteIterator<FileStatus> itor = fc.listStatus(dir); assertFalse(dir + " should be empty", itor.hasNext()); itor = fs.listStatusIterator(dir); assertFalse(dir + " should be empty", itor.hasNext()); // create another file that is smaller than a block. Path file2 = new Path(dir, "filestatus2.dat"); DFSTestUtil.createFile(fs, file2, blockSize/4, blockSize/4, blockSize, (short) 1, seed); checkFile(fs, file2, 1); // verify file attributes status = fs.getFileStatus(file2); assertEquals(blockSize, status.getBlockSize()); assertEquals(1, status.getReplication()); file2 = fs.makeQualified(file2); assertEquals(file2.toString(), status.getPath().toString()); // Create another file in the same directory Path file3 = new Path(dir, "filestatus3.dat"); DFSTestUtil.createFile(fs, file3, blockSize/4, blockSize/4, blockSize, (short) 1, seed); checkFile(fs, file3, 1); file3 = fs.makeQualified(file3); // Verify that the size of the directory increased by the size // of the two files final int expected = blockSize/2; assertEquals(dir + " size should be " + expected, expected, fs.getContentSummary(dir).getLength()); // Test listStatus on a non-empty directory stats = fs.listStatus(dir); assertEquals(dir + " should have two entries", 2, stats.length); assertEquals(file2.toString(), stats[0].getPath().toString()); assertEquals(file3.toString(), stats[1].getPath().toString()); itor = fc.listStatus(dir); assertEquals(file2.toString(), itor.next().getPath().toString()); assertEquals(file3.toString(), itor.next().getPath().toString()); assertFalse("Unexpected addtional file", itor.hasNext()); itor = fs.listStatusIterator(dir); assertEquals(file2.toString(), itor.next().getPath().toString()); assertEquals(file3.toString(), itor.next().getPath().toString()); assertFalse("Unexpected addtional file", itor.hasNext()); // Test iterative listing. Now dir has 2 entries, create one more. Path dir3 = fs.makeQualified(new Path(dir, "dir3")); fs.mkdirs(dir3); dir3 = fs.makeQualified(dir3); stats = fs.listStatus(dir); assertEquals(dir + " should have three entries", 3, stats.length); assertEquals(dir3.toString(), stats[0].getPath().toString()); assertEquals(file2.toString(), stats[1].getPath().toString()); assertEquals(file3.toString(), stats[2].getPath().toString()); itor = fc.listStatus(dir); assertEquals(dir3.toString(), itor.next().getPath().toString()); assertEquals(file2.toString(), itor.next().getPath().toString()); assertEquals(file3.toString(), itor.next().getPath().toString()); assertFalse("Unexpected addtional file", itor.hasNext()); itor = fs.listStatusIterator(dir); assertEquals(dir3.toString(), itor.next().getPath().toString()); assertEquals(file2.toString(), itor.next().getPath().toString()); assertEquals(file3.toString(), itor.next().getPath().toString()); assertFalse("Unexpected addtional file", itor.hasNext()); // Now dir has 3 entries, create two more Path dir4 = fs.makeQualified(new Path(dir, "dir4")); fs.mkdirs(dir4); dir4 = fs.makeQualified(dir4); Path dir5 = fs.makeQualified(new Path(dir, "dir5")); fs.mkdirs(dir5); dir5 = fs.makeQualified(dir5); stats = fs.listStatus(dir); assertEquals(dir + " should have five entries", 5, stats.length); assertEquals(dir3.toString(), stats[0].getPath().toString()); assertEquals(dir4.toString(), stats[1].getPath().toString()); assertEquals(dir5.toString(), stats[2].getPath().toString()); assertEquals(file2.toString(), stats[3].getPath().toString()); assertEquals(file3.toString(), stats[4].getPath().toString()); itor = fc.listStatus(dir); assertEquals(dir3.toString(), itor.next().getPath().toString()); assertEquals(dir4.toString(), itor.next().getPath().toString()); assertEquals(dir5.toString(), itor.next().getPath().toString()); assertEquals(file2.toString(), itor.next().getPath().toString()); assertEquals(file3.toString(), itor.next().getPath().toString()); assertFalse(itor.hasNext()); itor = fs.listStatusIterator(dir); assertEquals(dir3.toString(), itor.next().getPath().toString()); assertEquals(dir4.toString(), itor.next().getPath().toString()); assertEquals(dir5.toString(), itor.next().getPath().toString()); assertEquals(file2.toString(), itor.next().getPath().toString()); assertEquals(file3.toString(), itor.next().getPath().toString()); assertFalse(itor.hasNext()); fs.delete(dir, true); } }
{ "content_hash": "21d81793c6870355fbe411a858ea6805", "timestamp": "", "source": "github", "line_count": 308, "max_line_length": 79, "avg_line_length": 38.98376623376623, "alnum_prop": 0.6900974431581577, "repo_name": "legend-hua/hadoop", "id": "c74bb632a745bb70411e394ffb4c599b3f2d0092", "size": "12813", "binary": false, "copies": "2", "ref": "refs/heads/trunk", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileStatus.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "68758" }, { "name": "C", "bytes": "1435565" }, { "name": "C++", "bytes": "1814016" }, { "name": "CMake", "bytes": "54631" }, { "name": "CSS", "bytes": "60295" }, { "name": "HTML", "bytes": "231506" }, { "name": "Java", "bytes": "67032492" }, { "name": "JavaScript", "bytes": "777128" }, { "name": "Protocol Buffer", "bytes": "274924" }, { "name": "Python", "bytes": "23553" }, { "name": "Shell", "bytes": "386625" }, { "name": "TLA", "bytes": "14993" }, { "name": "TeX", "bytes": "19322" }, { "name": "XSLT", "bytes": "16894" } ], "symlink_target": "" }
<?php /* TwigBundle:Exception:traces_text.html.twig */ class __TwigTemplate_e0127da71765fb4613969d783c2c3218edd34d739768711e074492655a8ff7e7 extends Twig_Template { public function __construct(Twig_Environment $env) { parent::__construct($env); $this->parent = false; $this->blocks = array( ); } protected function doDisplay(array $context, array $blocks = array()) { // line 1 echo "<div class=\"block\"> <h2> Stack Trace (Plain Text)&nbsp; "; // line 4 ob_start(); // line 5 echo " <a href=\"#\" onclick=\"toggle('traces-text'); switchIcons('icon-traces-text-open', 'icon-traces-text-close'); return false;\"> <img class=\"toggle\" id=\"icon-traces-text-close\" alt=\"-\" src=\"data:image/gif;base64,R0lGODlhEgASAMQSANft94TG57Hb8GS44ez1+mC24IvK6ePx+Wa44dXs92+942e54o3L6W2844/M6dnu+P/+/l614P///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAABIALAAAAAASABIAQAVCoCQBTBOd6Kk4gJhGBCTPxysJb44K0qD/ER/wlxjmisZkMqBEBW5NHrMZmVKvv9hMVsO+hE0EoNAstEYGxG9heIhCADs=\" style=\"display: none\" /> <img class=\"toggle\" id=\"icon-traces-text-open\" alt=\"+\" src=\"data:image/gif;base64,R0lGODlhEgASAMQTANft99/v+Ga44bHb8ITG52S44dXs9+z1+uPx+YvK6WC24G+944/M6W28443L6dnu+Ge54v/+/l614P///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAABMALAAAAAASABIAQAVS4DQBTiOd6LkwgJgeUSzHSDoNaZ4PU6FLgYBA5/vFID/DbylRGiNIZu74I0h1hNsVxbNuUV4d9SsZM2EzWe1qThVzwWFOAFCQFa1RQq6DJB4iIQA7\" style=\"display: inline\" /> </a> "; echo trim(preg_replace('/>\s+</', '><', ob_get_clean())); // line 10 echo " </h2> <div id=\"traces-text\" class=\"trace\" style=\"display: none;\"> <pre>"; // line 13 $context['_parent'] = (array) $context; $context['_seq'] = twig_ensure_traversable($this->getAttribute((isset($context["exception"]) ? $context["exception"] : $this->getContext($context, "exception")), "toarray", array())); foreach ($context['_seq'] as $context["i"] => $context["e"]) { // line 14 echo "["; echo twig_escape_filter($this->env, ($context["i"] + 1), "html", null, true); echo "] "; echo twig_escape_filter($this->env, $this->getAttribute($context["e"], "class", array()), "html", null, true); echo ": "; echo twig_escape_filter($this->env, $this->getAttribute($context["e"], "message", array()), "html", null, true); echo " "; // line 15 $this->env->loadTemplate("TwigBundle:Exception:traces.txt.twig")->display(array("exception" => $context["e"])); } $_parent = $context['_parent']; unset($context['_seq'], $context['_iterated'], $context['i'], $context['e'], $context['_parent'], $context['loop']); $context = array_intersect_key($context, $_parent) + $_parent; // line 16 echo "</pre> </div> </div> "; } public function getTemplateName() { return "TwigBundle:Exception:traces_text.html.twig"; } public function isTraitable() { return false; } public function getDebugInfo() { return array ( 57 => 16, 51 => 15, 42 => 14, 38 => 13, 33 => 10, 26 => 5, 24 => 4, 19 => 1,); } }
{ "content_hash": "f5f036b6175a93cfb3a4deffb5d6279b", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 416, "avg_line_length": 43.506493506493506, "alnum_prop": 0.6011940298507462, "repo_name": "royrobsen/my_project_name", "id": "093cb2f634b5aabb0b5a7c209ab8ff5509a6ba6c", "size": "3350", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/cache/dev/twig/e0/12/7da71765fb4613969d783c2c3218edd34d739768711e074492655a8ff7e7.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "3073" }, { "name": "CSS", "bytes": "161695" }, { "name": "HTML", "bytes": "236424" }, { "name": "JavaScript", "bytes": "298210" }, { "name": "PHP", "bytes": "214968" } ], "symlink_target": "" }
describe('podio-js', function() { var PodioJS = require('../lib/podio-js'); var sinon = require('sinon'); describe('constructor', function() { it('should have version property', function() { var authOptions = { authType: 'client', clientId: 123, }; var instance = new PodioJS(authOptions); expect(instance.VERSION).toEqual(require('../package.json').version); }); it('should set auth data correctly', function() { var authOptions = { authType: 'server', clientId: 123, clientSecret: 'abcdef' }; var instance = new PodioJS(authOptions); expect(instance.authType).toEqual(authOptions.authType); expect(instance.clientId).toEqual(authOptions.clientId); expect(instance.clientSecret).toEqual(authOptions.clientSecret); }); it('should throw an exception if one of the auth properties is missing', function() { expect(function() { new PodioJS(); }).toThrow(new Error('Authentication options are missing')); expect(function() { new PodioJS({}); }).toThrow(new Error('Missing auth property authType')); expect(function() { new PodioJS({ authType: 'client' }); }).toThrow(new Error('Missing auth property clientId')); expect(function() { new PodioJS({ authType: 'server', clientId: 123 }); }).toThrow(new Error('Missing auth property clientSecret')); }); it('should not throw an exception if clientSecret is missing for a client auth', function() { var instance = new PodioJS({ authType: 'client', clientId: 123 }); expect(instance).toBeDefined(); }); it('should set the API URL to default', function() { var authOptions = { authType: 'client', clientId: 123 }; var instance = new PodioJS(authOptions); expect(instance.apiURL).toEqual('https://api.podio.com:443'); }); it('should set the API URL to the one provided in options', function() { var authOptions = { authType: 'client', clientId: 123 }; var apiURL = 'https://api2.podio.com'; var instance = new PodioJS(authOptions, { apiURL: apiURL}); expect(instance.apiURL).toEqual(apiURL); }); it('should get the authObject from the session store when one is provided', function() { var authOptions = { authType: 'client', clientId: 123 }; var sessionStore = {}; var instance; sinon.stub(PodioJS.prototype, 'refreshAuthFromStore'); instance = new PodioJS(authOptions, { sessionStore: sessionStore }); expect(instance.refreshAuthFromStore.calledOnce).toBe(true); PodioJS.prototype.refreshAuthFromStore.restore(); }); it('should still set the apiURL to default when a session store is provided', function() { var authOptions = { authType: 'client', clientId: 123 }; var sessionStore = { get: sinon.stub().returns({ accessToken: 123 }) }; var instance = new PodioJS(authOptions, { sessionStore: sessionStore }); expect(instance.apiURL).toEqual('https://api.podio.com:443'); }); it('should set onTokenWillRefresh callback', function() { var authOptions = { authType: 'client', clientId: 123 }; var onTokenWillRefresh = function() {}; var instance = new PodioJS(authOptions, { onTokenWillRefresh: onTokenWillRefresh }); expect(instance.onTokenWillRefresh).toEqual(onTokenWillRefresh); }); it('should set afterTokenRefreshed callback', function() { var authOptions = { authType: 'client', clientId: 123 }; var afterTokenRefreshed = function() {}; var instance = new PodioJS(authOptions, { afterTokenRefreshed: afterTokenRefreshed }); expect(instance.afterTokenRefreshed).toEqual(afterTokenRefreshed); }); it('should no include Push Service functionality by default', function() { var authOptions = { authType: 'client', clientId: 123 }; var instance = new PodioJS(authOptions); expect(instance.push).toBeUndefined(); expect(instance._getSubscription).toBeUndefined(); expect(instance._setSubscription).toBeUndefined(); expect(instance._fayeExtensionOutgoing).toBeUndefined(); expect(instance._getFayeClient).toBeUndefined(); }); it('should include Push Service functionality when .enablePushService is true', function() { var authOptions = { authType: 'client', clientId: 123 }; var options = { enablePushService: true }; var instance = new PodioJS(authOptions, options); expect(instance.push).toBeDefined(); expect(instance._getSubscription).toBeDefined(); expect(instance._setSubscription).toBeDefined(); expect(instance._fayeExtensionOutgoing).toBeDefined(); expect(instance._getFayeClient).toBeDefined(); }); }); });
{ "content_hash": "d78f008906e6e4fa87ad0c84f35c9a6b", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 138, "avg_line_length": 32.13548387096774, "alnum_prop": 0.6374222043766312, "repo_name": "theflow/theflow-js", "id": "b72feaf889285b85a2e643730b6ee2b8bad6b540", "size": "4981", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/podio-js.spec.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "75808" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "ac6889e8570036664e80e912e1d1a983", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "78cdafd69b4097ce79ad4273ad9a30e93a0f8cdc", "size": "216", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Chromista/Haptophyta/Prymnesiophyceae/Coccosphaerales/Calyptrosphaeraceae/Calyptrolithophora/Calyptrolithophora gracillima/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
module Spree class StaticContentController < StoreController rescue_from ActiveRecord::RecordNotFound, with: :render_404 helper 'spree/products' #layout :determine_layout def show @page = Spree::Page.by_store(current_store).visible.find_by_slug!(request.path) end private def determine_layout return @page.layout if @page && @page.layout.present? && [email protected]_layout_as_partial? Spree::Config.layout end def accurate_title @page ? (@page.meta_title.present? ? @page.meta_title : @page.title) : nil end end end
{ "content_hash": "7820ca561d146d6ce687599f658c0d22", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 95, "avg_line_length": 25.52173913043478, "alnum_prop": 0.6746166950596252, "repo_name": "APohio/spree_static_content", "id": "365fb58d8b69bd8079ecf1dab023032bd99dd263", "size": "587", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/controllers/spree/static_content_controller.rb", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "374" }, { "name": "HTML", "bytes": "8255" }, { "name": "JavaScript", "bytes": "51" }, { "name": "Ruby", "bytes": "20917" } ], "symlink_target": "" }
import * as React from 'react'; import { createStyles, withStyles, withTheme, WithTheme, WithStyles, makeStyles, CSSProperties, CreateCSSProperties, PropsFunc, } from '@material-ui/styles'; import Button from '@material-ui/core/Button'; import { Theme } from '@material-ui/core/styles'; import { expectType } from '@material-ui/types'; // Example 1 const simpleStyles = ({ palette, spacing }: Theme) => ({ root: { padding: spacing(1), backgroundColor: palette.background.default, color: palette.primary.dark, }, }); // Shared types for examples interface SimpleComponentProps extends WithStyles<typeof simpleStyles> { text: string; } const StyledExampleOne = withStyles(simpleStyles)(({ classes, text }: SimpleComponentProps) => ( <div className={classes.root}>{text}</div> )); <StyledExampleOne text="I am styled!" />; // Example 2 const SimpleComponent: React.FunctionComponent< SimpleComponentProps & WithStyles<typeof simpleStyles> > = ({ classes, text }) => <div className={classes.root}>{text}</div>; const StyledExampleTwo = withStyles(simpleStyles)(SimpleComponent); <StyledExampleTwo text="I am styled!" />; // Example 3 const styleRule = createStyles({ root: { display: 'flex', alignItems: 'stretch', height: '100vh', width: '100%', }, }); const ComponentWithChildren: React.FunctionComponent<WithStyles<typeof simpleStyles>> = ({ classes, children, }) => <div className={classes.root}>{children}</div>; const StyledExampleThree = withStyles(styleRule)(ComponentWithChildren); <StyledExampleThree />; // Also works with a plain object const stylesAsPojo = { root: { backgroundColor: 'hotpink', }, }; const AnotherStyledSFC = withStyles({ root: { backgroundColor: 'hotpink' }, })(({ classes }: WithStyles<'root'>) => <div className={classes.root}>Stylish!</div>); // withTheme const ComponentWithTheme = withTheme<Theme, React.FunctionComponent<WithTheme<Theme>>>( ({ theme }: WithTheme<Theme>) => <div>{theme.spacing(1)}</div>, ); const componentWithThemeRef = React.createRef<HTMLDivElement>(); <ComponentWithTheme ref={componentWithThemeRef} />; // withStyles + withTheme type AllTheProps = WithTheme<Theme> & WithStyles<typeof simpleStyles>; const SimpleStyledComponent = withStyles(simpleStyles)(({ theme, classes }: AllTheProps) => ( <div className={classes.root}>{theme.palette.text.primary}</div> )); // @ts-expect-error missing prop theme <SimpleStyledComponent />; const AllTheComposition = withTheme<Theme, typeof SimpleStyledComponent>(SimpleStyledComponent); <AllTheComposition />; { const Foo = withTheme<Theme, React.ComponentClass<WithTheme<Theme>>>( class extends React.Component<WithTheme<Theme>> { render() { return null; } }, ); <Foo />; } declare const themed: boolean; { // this is necessary so that TypesScript can infer the theme // usually it's better to just use withTheme<Theme> if you're not actual styling const themedStyles = (theme: Theme) => ({ root: {} }); // Test that withTheme: true guarantees the presence of the theme const Foo = withStyles(themedStyles, { withTheme: true })( class extends React.Component<WithTheme<Theme>> { render() { return <div style={{ margin: this.props.theme.spacing(1) }} />; } }, ); <Foo />; const Bar = withStyles(themedStyles, { withTheme: true, })(({ theme }: WithStyles<typeof themedStyles, true>) => ( <div style={{ margin: theme.spacing(1) }} /> )); <Bar />; } // Can't use withStyles effectively as a decorator in TypeScript // due to https://github.com/Microsoft/TypeScript/issues/4881 // @withStyles(styles) const DecoratedComponent = withStyles(simpleStyles)( class extends React.Component<SimpleComponentProps & WithStyles<typeof simpleStyles>> { render() { const { classes, text } = this.props; return <div className={classes.root}>{text}</div>; } }, ); // no 'classes' property required at element creation time (#8267) <DecoratedComponent text="foo" />; // Allow nested pseudo selectors withStyles((theme) => createStyles({ guttered: { '&:hover': { textDecoration: 'none', }, }, listItem: { '&:hover $listItemIcon': { visibility: 'inherit', }, }, }), ); { // allow top level media queries // https://github.com/mui-org/material-ui/issues/12277 // typescript thinks `content` is the CSS property not a classname const ambiguousStyles = createStyles({ content: { minHeight: '100vh', }, '@media (min-width: 960px)': { content: { // @ts-expect-error display: 'flex', }, }, }); const styles = createStyles({ contentClass: { minHeight: '100vh', }, '@media (min-width: 960px)': { contentClass: { display: 'flex', }, }, }); } { const styles = (theme: Theme) => createStyles({ // Styled similar to ListItemText root: { '&:first-child': { paddingLeft: 0, }, flex: '1 1 auto', padding: '0 16px', }, inset: { '&:first-child': { paddingLeft: theme.spacing(7), }, }, row: { alignItems: 'center', display: 'flex', flexDirection: 'row', }, }); interface ListItemContentProps extends WithStyles<typeof styles> { children?: React.ReactElement; inset?: boolean; row?: boolean; } const ListItemContent = withStyles(styles, { name: 'ui-ListItemContent' })( ({ children, classes, inset, row }: ListItemContentProps) => ( <div className={classes.root} color="text.secondary"> {children} </div> ), ); } { interface FooProps extends WithStyles<'x' | 'y'> { a: number; b: boolean; } const ListItemContent = withStyles({ x: {}, y: {} })((props: FooProps) => <div />); } { // https://github.com/mui-org/material-ui/issues/11109 // The real test here is with "strictFunctionTypes": false, // but we don't have a way currently to test under varying // TypeScript configurations. interface ComponentProps extends WithStyles<typeof styles> { caption: string; } const styles = (theme: Theme) => createStyles({ content: { margin: 4, }, }); const Component = (props: ComponentProps) => { return <div className={props.classes.content}>Hello {props.caption}</div>; }; const StyledComponent = withStyles(styles)(Component); class App extends React.Component { render() { return ( <div className="App"> <StyledComponent caption="Developer" /> </div> ); } } <App />; } { // https://github.com/mui-org/material-ui/issues/11191 const styles = (theme: Theme) => createStyles({ main: {}, }); interface Props extends WithStyles<typeof styles> { someProp?: string; } class SomeComponent extends React.PureComponent<Props> { render() { return <div />; } } const DecoratedSomeComponent = withStyles(styles)(SomeComponent); <DecoratedSomeComponent someProp="hello world" />; } { // https://github.com/mui-org/material-ui/issues/11312 withStyles(simpleStyles, { name: 'MyComponent', index: 0 })(() => <div />); } { // can't provide own `classes` type interface Props { classes: number; } class Component extends React.Component<Props & WithStyles<typeof simpleStyles>> {} // @ts-expect-error const StyledComponent = withStyles(simpleStyles)(Component); // @ts-expect-error implicit FunctionComponent withStyles(simpleStyles)((props: Props) => null); // @ts-expect-error withStyles(simpleStyles)((props: Props & WithStyles<typeof simpleStyles>) => null); // @ts-expect-error withStyles(simpleStyles)((props: Props & { children?: React.ReactNode }) => null); withStyles(simpleStyles)( // @ts-expect-error (props: Props & WithStyles<typeof simpleStyles> & { children?: React.ReactNode }) => null, ); // explicit not but with "Property 'children' is missing in type 'ValidationMap<Props>'". // which is not helpful const StatelessComponent: React.FunctionComponent<Props> = (props) => null; const StatelessComponentWithStyles: React.FunctionComponent< Props & WithStyles<typeof simpleStyles> > = (props) => null; // @ts-expect-error withStyles(simpleStyles)(StatelessComponent); // @ts-expect-error withStyles(simpleStyles)(StatelessComponentWithStyles); } { // https://github.com/mui-org/material-ui/issues/12670 interface Props { nonDefaulted: string; defaulted: number; } class MyButton extends React.Component<Props & WithStyles<typeof styles>> { static defaultProps = { defaulted: 0, }; render() { const { classes, nonDefaulted, defaulted } = this.props; return ( <Button className={classes.btn}> {defaulted}, {nonDefaulted} </Button> ); } } const styles = () => createStyles({ btn: { color: 'red', }, }); const StyledMyButton = withStyles(styles)(MyButton); const CorrectUsage = () => <StyledMyButton nonDefaulted="2" />; // @ts-expect-error Property 'nonDefaulted' is missing in type '{}' const MissingPropUsage = () => <StyledMyButton />; } { // styles from props interface StyleProps { color?: 'blue' | 'red'; } const styles = (theme: Theme) => ({ root: (props: StyleProps) => ({ backgroundColor: props.color || theme.palette.primary.main }), }); interface MyComponentProps extends WithStyles<typeof styles> { message: string; } class MyComponent extends React.Component<MyComponentProps> { render() { const { classes, color, message } = this.props; return ( <div className={classes.root}> {color}: {message} </div> ); } } const StyledMyComponent = withStyles(styles)(MyComponent); const renderedStyledMyComponent = <StyledMyComponent message="Hi" />; // @ts-expect-error number is not assignable to 'blue' | 'red' interface InconsistentProps extends WithStyles<typeof styles> { color: number; } } function ForwardRefTest() { const styles = createStyles({ root: { color: 'red' }, }); function Anchor(props: WithStyles<typeof styles>) { const { classes } = props; return <a className={classes.root} />; } const StyledAnchor = withStyles(styles)(Anchor); const anchorRef = React.useRef<HTMLAnchorElement>(null); // forwarded to function components which can't hold refs // @ts-expect-error property 'ref' does not exists <StyledAnchor ref={anchorRef} />; const RefableAnchor = React.forwardRef<HTMLAnchorElement, WithStyles<typeof styles>>( (props, ref) => { const { classes } = props; return <a className={classes.root} />; }, ); const StyledRefableAnchor = withStyles(styles)(RefableAnchor); <StyledRefableAnchor ref={anchorRef} />; const buttonRef = React.createRef<HTMLButtonElement>(); // @ts-expect-error HTMLButtonElement is missing properties <StyledRefableAnchor ref={buttonRef} />; } { // https://github.com/mui-org/material-ui/pull/15546 // Update type definition to let CSS properties be functions interface TestProps { foo: boolean; } const useStyles = makeStyles((theme: Theme) => ({ root: { width: (prop: TestProps) => (prop.foo ? 100 : 0), }, root2: (prop2: TestProps) => ({ width: (prop: TestProps) => (prop.foo && prop2.foo ? 100 : 0), height: 100, }), })); // eslint-disable-next-line react-hooks/rules-of-hooks const styles = useStyles({ foo: true }); expectType<Record<'root' | 'root2', string>, typeof styles>(styles); } { // If there are no props, use the definition that doesn't accept them // https://github.com/mui-org/material-ui/issues/16198 const styles = createStyles({ root: { width: 1, }, }); expectType< Record<'root', CSSProperties | CreateCSSProperties | PropsFunc<{}, CreateCSSProperties>>, typeof styles >(styles); const styles2 = createStyles({ root: () => ({ width: 1, }), }); expectType< Record<'root', CSSProperties | CreateCSSProperties | PropsFunc<{}, CreateCSSProperties>>, typeof styles2 >(styles2); interface TestProps { foo: boolean; } const styles3 = createStyles({ root: (props: TestProps) => ({ width: 1, }), }); expectType< Record< 'root', | CSSProperties | CreateCSSProperties<TestProps> | PropsFunc<TestProps, CreateCSSProperties<TestProps>> >, typeof styles3 >(styles3); }
{ "content_hash": "ae732b0b72088d5322e7880aae402f8e", "timestamp": "", "source": "github", "line_count": 497, "max_line_length": 98, "avg_line_length": 25.549295774647888, "alnum_prop": 0.6438809261300992, "repo_name": "mbrookes/material-ui", "id": "e2c63360118c993bf66f72f1e3f8a85352523e5d", "size": "12698", "binary": false, "copies": "1", "ref": "refs/heads/next", "path": "packages/material-ui-styles/test/styles.spec.tsx", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2092" }, { "name": "JavaScript", "bytes": "16089809" }, { "name": "TypeScript", "bytes": "1788737" } ], "symlink_target": "" }
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ 'use strict'; var diff = require('arr-diff'); var debug = require('debug')('micromatch'); var typeOf = require('kind-of'); var omit = require('object.omit'); var cache = require('regex-cache'); var isGlob = require('is-glob'); var expand = require('./lib/expand'); var utils = require('./lib/utils'); /** * The main function. Pass an array of filepaths, * and a string or array of glob patterns * * @param {Array|String} `files` * @param {Array|String} `patterns` * @param {Object} `opts` * @return {Array} Array of matches */ function micromatch(files, patterns, opts) { if (!files || !patterns) return []; opts = opts || {}; if (typeof opts.cache === 'undefined') { opts.cache = true; } if (!Array.isArray(patterns)) { return match(files, patterns, opts); } var len = patterns.length, i = 0; var omit = [], keep = []; while (len--) { var glob = patterns[i++]; if (glob.charCodeAt(0) === 33 /* ! */) { omit.push.apply(omit, match(files, glob.slice(1), opts)); } else { keep.push.apply(keep, match(files, glob, opts)); } } return diff(keep, omit); } /** * Pass an array of files and a glob pattern as a string. * * This function is called by the main `micromatch` function * If you only need to pass a single pattern you might get * very minor speed improvements using this function. * * @param {Array} `files` * @param {Array} `pattern` * @param {Object} `options` * @return {Array} */ function match(files, pattern, opts) { if (typeOf(files) !== 'string' && !Array.isArray(files)) { throw new Error(msg('match', 'files', 'a string or array')); } files = utils.arrayify(files); opts = opts || {}; var negate = opts.negate || false; var orig = pattern; if (typeof pattern === 'string' && opts.nonegate !== true) { negate = pattern.charAt(0) === '!'; if (negate) { pattern = pattern.slice(1); } } var _isMatch = matcher(pattern, opts); var len = files.length, i = 0; var res = []; while (i < len) { var file = files[i++]; var fp = utils.unixify(file, opts); if (!_isMatch(fp)) { continue; } res.push(fp); } if (res.length === 0) { if (opts.failglob === true) { throw new Error('micromatch.match() found no matches for: "' + orig + '".'); } if (opts.nonull || opts.nullglob) { res.push(utils.unescapeGlob(orig)); } } // if `negate` was defined, diff negated files if (negate) { res = diff(files, res); } // if `ignore` was defined, diff ignored filed if (opts.ignore && opts.ignore.length) { pattern = opts.ignore; opts = omit(opts, ['ignore']); return diff(res, micromatch(res, pattern, opts)); } return res; } /** * Returns a function that takes a glob pattern or array of glob patterns * to be used with `Array#filter()`. (Internally this function generates * the matching function using the [matcher] method). * * ```js * var fn = mm.filter('[a-c]'); * ['a', 'b', 'c', 'd', 'e'].filter(fn); * //=> ['a', 'b', 'c'] * ``` * * @param {String|Array} `patterns` Can be a glob or array of globs. * @param {Options} `opts` Options to pass to the [matcher] method. * @return {Function} Filter function to be passed to `Array#filter()`. */ function filter(patterns, opts) { if (!Array.isArray(patterns) && typeof patterns !== 'string') { throw new TypeError(msg('filter', 'patterns', 'a string or array')); } patterns = utils.arrayify(patterns); return function (fp) { if (fp == null) return []; var len = patterns.length, i = 0; var res = true; fp = utils.unixify(fp, opts); while (i < len) { var fn = matcher(patterns[i++], opts); if (!fn(fp)) { res = false; break; } } return res; }; } /** * Returns true if the filepath contains the given * pattern. Can also return a function for matching. * * ```js * isMatch('foo.md', '*.md', {}); * //=> true * * isMatch('*.md', {})('foo.md') * //=> true * ``` * * @param {String} `fp` * @param {String} `pattern` * @param {Object} `opts` * @return {Boolean} */ function isMatch(fp, pattern, opts) { if (typeof fp !== 'string') { throw new TypeError(msg('isMatch', 'filepath', 'a string')); } fp = utils.unixify(fp, opts); if (typeOf(pattern) === 'object') { return matcher(fp, pattern); } return matcher(pattern, opts)(fp); } /** * Returns true if the filepath matches the * given pattern. */ function contains(fp, pattern, opts) { if (typeof fp !== 'string') { throw new TypeError(msg('contains', 'pattern', 'a string')); } opts = opts || {}; opts.contains = (pattern !== ''); fp = utils.unixify(fp, opts); if (opts.contains && !isGlob(pattern)) { return fp.indexOf(pattern) !== -1; } return matcher(pattern, opts)(fp); } /** * Returns true if a file path matches any of the * given patterns. * * @param {String} `fp` The filepath to test. * @param {String|Array} `patterns` Glob patterns to use. * @param {Object} `opts` Options to pass to the `matcher()` function. * @return {String} */ function any(fp, patterns, opts) { if (!Array.isArray(patterns) && typeof patterns !== 'string') { throw new TypeError(msg('any', 'patterns', 'a string or array')); } fp = utils.unixify(fp, opts); patterns = utils.arrayify(patterns); var len = patterns.length; while (len--) { var isMatch = matcher(patterns[len], opts); if (isMatch(fp)) { return true; } } return false; } /** * Filter the keys of an object with the given `glob` pattern * and `options` * * @param {Object} `object` * @param {Pattern} `object` * @return {Array} */ function matchKeys(obj, glob, options) { if (typeOf(obj) !== 'object') { throw new TypeError(msg('matchKeys', 'first argument', 'an object')); } var fn = matcher(glob, options); var res = {}; for (var key in obj) { if (obj.hasOwnProperty(key) && fn(key)) { res[key] = obj[key]; } } return res; } /** * Return a function for matching based on the * given `pattern` and `options`. * * @param {String} `pattern` * @param {Object} `options` * @return {Function} */ function matcher(pattern, opts) { // pattern is a function if (typeof pattern === 'function') { return pattern; } // pattern is a regex if (pattern instanceof RegExp) { return function(fp) { return pattern.test(fp); }; } // pattern is a non-glob string if (!isGlob(pattern)) { return utils.matchPath(pattern, opts); } // pattern is a glob string var re = makeRe(pattern, opts); // `matchBase` is defined if (opts && opts.matchBase) { return utils.hasFilename(re, opts); } // `matchBase` is not defined return function(fp) { fp = utils.unixify(fp, opts); return re.test(fp); }; } /** * Create and cache a regular expression for matching * file paths. * * If the leading character in the `glob` is `!`, a negation * regex is returned. * * @param {String} `glob` * @param {Object} `options` * @return {RegExp} */ function toRegex(glob, options) { if (typeOf(glob) !== 'string') { throw new Error(msg('toRegex', 'glob', 'a string')); } // clone options to prevent mutating the original object var opts = Object.create(options || {}); var flags = opts.flags || ''; if (opts.nocase && flags.indexOf('i') === -1) { flags += 'i'; } var parsed = expand(glob, opts); // pass in tokens to avoid parsing more than once opts.negated = opts.negated || parsed.negated; opts.negate = opts.negated; glob = wrapGlob(parsed.pattern, opts); var re; try { re = new RegExp(glob, flags); return re; } catch (err) { var msg = 'micromatch invalid regex: (' + re + ')'; if (opts.strict) throw new SyntaxError(msg + err); } return /$^/; } /** * Create the regex to do the matching. If the leading * character in the `glob` is `!` a negation regex is returned. * * @param {String} `glob` * @param {Boolean} `negate` */ function wrapGlob(glob, opts) { var prefix = (opts && !opts.contains) ? '^' : ''; var after = (opts && !opts.contains) ? '$' : ''; glob = ('(?:' + glob + ')' + after); if (opts && opts.negate) { return prefix + ('(?!^' + glob + ').*$'); } return prefix + glob; } /** * Wrap `toRegex` to memoize the generated regex * the string and options don't change */ function makeRe(glob, opts) { return cache(toRegex, glob, opts); } /** * Make error messages consistent. Follows this format: * * ```js * msg(methodName, argNumber, nativeType); * // example: * msg('matchKeys', 'first', 'an object'); * ``` * * @param {String} `method` * @param {String} `num` * @param {String} `type` * @return {String} */ function msg(method, what, type) { return 'micromatch.' + method + '(): ' + what + ' should be ' + type + '.'; } /** * Public methods */ micromatch.any = any; micromatch.braces = micromatch.braceExpand = require('braces'); micromatch.contains = contains; micromatch.expand = expand; micromatch.filter = filter; micromatch.isMatch = isMatch; micromatch.makeRe = makeRe; micromatch.match = match; micromatch.matcher = matcher; micromatch.matchKeys = matchKeys; /** * Expose `micromatch` */ module.exports = micromatch; },{"./lib/expand":3,"./lib/utils":5,"arr-diff":6,"braces":8,"debug":23,"is-glob":28,"kind-of":29,"object.omit":30,"regex-cache":39}],2:[function(require,module,exports){ 'use strict'; var chars = {}; /** * Regex for common characters */ chars.escapeRegex = { '?': /\?/g, '@': /\@/g, '!': /\!/g, '+': /\+/g, '*': /\*/g, '(': /\(/g, ')': /\)/g, '[': /\[/g, ']': /\]/g, }; /** * Escape characters */ chars.ESC = { '?': '__UNESC_QMRK__', '@': '__UNESC_AMPE__', '!': '__UNESC_EXCL__', '+': '__UNESC_PLUS__', '*': '__UNESC_STAR__', ',': '__UNESC_COMMA__', '(': '__UNESC_LTPAREN__', ')': '__UNESC_RTPAREN__', '[': '__UNESC_LTBRACK__', ']': '__UNESC_RTBRACK__', }; /** * Unescape characters */ chars.UNESC = { '__UNESC_QMRK__' : '\\?', '__UNESC_AMPE__' : '\\@', '__UNESC_EXCL__' : '\\!', '__UNESC_PLUS__' : '\\+', '__UNESC_STAR__' : '\\*', '__UNESC_COMMA__' : '\\*', '__UNESC_LTPAREN__': '\\(', '__UNESC_RTPAREN__': '\\)', '__UNESC_LTBRACK__': '\\[', '__UNESC_RTBRACK__': '\\]', }; chars.ESC_TEMP = { '?': '__TEMP_QMRK__', '@': '__TEMP_AMPE__', '!': '__TEMP_EXCL__', '*': '__TEMP_STAR__', '+': '__TEMP_PLUS__', ',': '__TEMP_COMMA__', '(': '__TEMP_LTPAREN__', ')': '__TEMP_RTPAREN__', '[': '__TEMP_LTBRACK__', ']': '__TEMP_RTBRACK__', }; chars.TEMP = { '__TEMP_QMRK__' : '?', '__TEMP_AMPE__' : '@', '__TEMP_EXCL__' : '!', '__TEMP_STAR__' : '*', '__TEMP_PLUS__' : '+', '__TEMP_COMMA__' : ',', '__TEMP_LTPAREN__': '(', '__TEMP_RTPAREN__': ')', '__TEMP_LTBRACK__': '[', '__TEMP_RTBRACK__': ']', }; module.exports = chars; },{}],3:[function(require,module,exports){ /*! * micromatch <https://github.com/jonschlinkert/micromatch> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License. */ 'use strict'; var utils = require('./utils'); var Glob = require('./glob'); /** * Expose `expand` */ module.exports = expand; /** * Expand a glob pattern to resolve braces and * similar patterns before converting to regex. * * @param {String|Array} `pattern` * @param {Array} `files` * @param {Options} `opts` * @return {Array} */ function expand(pattern, options) { if (typeof pattern !== 'string') { throw new TypeError('micromatch.expand(): argument should be a string.'); } var glob = new Glob(pattern, options || {}); var opts = glob.options; // return early if glob pattern matches special patterns if (specialCase(pattern) && opts.safemode) { return new RegExp(utils.escapeRe(pattern), 'g'); } if (opts.nonegate !== true) { opts.negated = glob.negated; } glob._replace('/.', '/\\.'); // parse the glob pattern into tokens glob.parse(); var tok = glob.tokens; tok.is.negated = opts.negated; if (tok.is.dotfile) { glob.options.dot = true; opts.dot = true; } if (!tok.is.glob) { return { pattern: utils.escapePath(glob.pattern), tokens: tok, options: opts }; } // see if it might be a dotfile pattern if (/[{,]\./.test(glob.pattern)) { opts.makeRe = false; opts.dot = true; } // expand braces, e.g `{1..5}` glob.track('before brackets'); if (tok.is.brackets) { glob.brackets(); } glob.track('before braces'); if (tok.is.braces) { glob.braces(); } glob.track('after braces'); glob._replace('[]', '\\[\\]'); glob._replace('(?', '__QMARK_GROUP__'); // windows drives glob._replace(/^(\w):([\\\/]+?)/gi, lookahead + '$1:$2', true); // negate slashes in exclusion ranges if (glob.pattern.indexOf('[^') !== -1) { glob.pattern = negateSlash(glob.pattern); } if (glob.pattern === '**' && opts.globstar !== false) { glob.pattern = globstar(opts); } else { if (/^\*\.\w*$/.test(glob.pattern)) { glob._replace('*', star(opts.dot) + '\\'); glob._replace('__QMARK_GROUP__', '(?'); return glob; } // '/*/*/*' => '(?:/*){3}' glob._replace(/(\/\*)+/g, function (match) { var len = match.length / 2; if (len === 1) { return match; } return '(?:\\/*){' + len + '}'; }); glob.pattern = balance(glob.pattern, '[', ']'); glob.escape(glob.pattern); // if the glob is for one directory deep, we can // simplify the parsing and generated regex if (tok.path.dirname === '' && !tok.is.globstar) { glob.track('before expand filename'); return expandFilename(glob, opts); } // if the pattern has `**` if (tok.is.globstar) { glob._replace(/\*{2,}/g, '**'); glob.pattern = collapse(glob.pattern, '/**'); glob.pattern = optionalGlobstar(glob.pattern); // reduce extraneous globstars glob._replace(/(^|[^\\])\*{2,}([^\\]|$)/g, '$1**$2'); // 'foo/*' glob._replace(/(\w+)\*(?!\/)/g, '(?=.)$1[^/]*?', true); glob._replace('**', globstar(opts), true); } // ends with /* glob._replace(/\/\*$/, '\\/' + stardot(opts), true); // ends with *, no slashes glob._replace(/(?!\/)\*$/, boxQ, true); // has '*' glob._replace('*', stardot(opts), true); glob._replace('?.', '?\\.', true); glob._replace('?:', '?:', true); glob._replace(/\?+/g, function (match) { var len = match.length; if (len === 1) { return box; } return box + '{' + len + '}'; }); // escape '.abc' => '\\.abc' glob._replace(/\.([*\w]+)/g, '\\.$1'); // fix '[^\\\\/]' glob._replace(/\[\^[\\\/]+\]/g, box); // '///' => '\/' glob._replace(/\/+/g, '\\/'); // '\\\\\\' => '\\' glob._replace(/\\{2,}/g, '\\'); } glob._replace('__QMARK_GROUP__', '(?'); glob.unescape(glob.pattern); glob._replace('__UNESC_STAR__', '*'); glob._replace('%~', '?'); glob._replace('%%', '*'); glob._replace('?.', '?\\.'); glob._replace('[^\\/]', '[^/]'); return glob; } /** * Expand the filename part of the glob into a regex * compatible string * * @param {String} glob * @param {Object} tok Tokens * @param {Options} opts * @return {Object} */ function expandFilename(glob, opts) { var tok = glob.tokens; switch (glob.pattern) { case '.': glob.pattern = '\\.'; break; case '.*': glob.pattern = '\\..*'; break; case '*.*': glob.pattern = star(opts.dot) + '\\.[^/]*?'; break; case '*': glob.pattern = star(opts.dot); break; default: if (tok.path.filename === '*' && !tok.path.dirname) { glob.pattern = star(opts.dot) + '\\' + glob.pattern.slice(1); } else { glob._replace(/(?!\()\?/g, '[^/]'); if (tok.path.basename.charAt(0) !== '.') { opts.dot = true; } glob._replace('*', star(opts.dot)); } } if (glob.pattern.charAt(0) === '.') { glob.pattern = '\\' + glob.pattern; } glob._replace('__QMARK_GROUP__', '(?'); glob.unescape(glob.pattern); glob._replace('__UNESC_STAR__', '*'); return glob; } /** * Special cases */ function specialCase(glob) { if (glob === '\\') { return true; } return false; } /** * Collapse repeated character sequences. * * ```js * collapse('a/../../../b', '../'); * //=> 'a/../b' * ``` * * @param {String} `str` * @param {String} `ch` * @return {String} */ function collapse(str, ch, repeat) { var res = str.split(ch); var len = res.length; var isFirst = res[0] === ''; var isLast = res[res.length - 1] === ''; res = res.filter(Boolean); if (isFirst) { res.unshift(''); } if (isLast) { res.push(''); } var diff = len - res.length; if (repeat && diff >= 1) { ch = '(?:' + ch + '){' + (diff + 1) + '}'; } return res.join(ch); } /** * Make globstars optional, as in glob spec: * * ```js * optionalGlobstar('a\/**\/b'); * //=> '(?:a\/b|a\/**\/b)' * ``` * * @param {String} `str` * @return {String} */ function optionalGlobstar(glob) { // globstars preceded and followed by a word character if (/[^\/]\/\*\*\/[^\/]/.test(glob)) { var tmp = glob.split('/**/').join('/'); glob = '(?:' + tmp + '|' + glob + ')'; // leading globstars } else if (/^\*\*\/[^\/]/.test(glob)) { glob = glob.split(/^\*\*\//).join('(^|.+\\/)'); } return glob; } /** * Negate slashes in exclusion ranges, per glob spec: * * ```js * negateSlash('[^foo]'); * //=> '[^\\/foo]' * ``` * * @param {[type]} str [description] * @return {[type]} */ function negateSlash(str) { var re = /\[\^([^\]]*?)\]/g; return str.replace(re, function (match, inner) { if (inner.indexOf('/') === -1) { inner = '\\/' + inner; } return '[^' + inner + ']'; }); } /** * Escape imbalanced braces/bracket */ function balance(str, a, b) { var aarr = str.split(a); var alen = aarr.join('').length; var blen = str.split(b).join('').length; if (alen !== blen) { str = aarr.join('\\' + a); return str.split(b).join('\\' + b); } return str; } /** * Escape utils */ function esc(str) { str = str.split('?').join('%~'); str = str.split('*').join('%%'); return str; } /** * Special patterns to be converted to regex. * Heuristics are used to simplify patterns * and speed up processing. */ var box = '[^/]'; var boxQ = '[^/]*?'; var lookahead = '(?=.)'; var nodot = '(?!\\.)(?=.)'; var ex = {}; ex.dotfileGlob = '(?:^|\\/)(?:\\.{1,2})(?:$|\\/)'; ex.stardot = '(?!' + ex.dotfileGlob + ')(?=.)[^/]*?'; ex.twoStarDot = '(?:(?!' + ex.dotfileGlob + ').)*?'; /** * Create a regex for `*`. If `dot` is true, * or the pattern does not begin with a leading * star, then return the simple regex. */ function star(dotfile) { return dotfile ? boxQ : nodot + boxQ; } function dotstarbase(dotfile) { var re = dotfile ? ex.dotfileGlob : '\\.'; return '(?!' + re + ')' + lookahead; } function globstar(opts) { if (opts.dot) { return ex.twoStarDot; } return '(?:(?!(?:^|\\/)\\.).)*?'; } function stardot(opts) { return dotstarbase(opts && opts.dot) + '[^/]*?'; } },{"./glob":4,"./utils":5}],4:[function(require,module,exports){ 'use strict'; var brackets = require('expand-brackets'); var braces = require('braces'); var parse = require('parse-glob'); var chars = require('./chars'); /** * Expose `Glob` */ module.exports = Glob; function Glob(pattern, options) { this.options = options || {}; this.pattern = pattern; this.history = []; this.tokens = {}; this.init(pattern); } /** * Initialize defaults */ Glob.prototype.init = function(pattern) { this.orig = pattern; this.negated = this.isNegated(); this.options.track = this.options.track || false; this.options.dot = this.options.dot || this.options.dotfiles; this.options.makeRe = true; }; /** * Push a change into `glob.history`. Useful * for debugging. */ Glob.prototype.track = function(msg) { if (this.options.track) { this.history.push({msg: msg, pattern: this.pattern}); } }; /** * Return true if the glob pattern has the given * `ch`aracter. * * @param {String} `pattern` * @param {String} `ch` * @return {Boolean} */ Glob.prototype.has = function(pattern, ch) { if (ch instanceof RegExp) { return ch.test(pattern); } return pattern.indexOf(ch) !== -1; }; /** * Return true if `glob.pattern` was negated * with `!`. Also removes the `!` from the pattern. * * @return {Boolean} */ Glob.prototype.isNegated = function() { if (this.pattern.charCodeAt(0) === 33 /* '!' */) { this.pattern = this.pattern.slice(1); return true; } return false; }; /** * Return true if the glob pattern has braces * * @param {String} `pattern` * @return {Boolean} */ Glob.prototype.hasBraces = function(pattern) { return this.has((pattern || this.pattern), '{'); }; /** * Expand braces in the given glob pattern. * * We only need to use the [braces] lib when * patterns are nested. */ Glob.prototype.braces = function() { if (this.hasBraces() && this.options.nobraces !== true) { var a = this.pattern.match(/[\{\(\[]/g); var b = this.pattern.match(/[\}\)\]]/g); if (a && b && (a.length !== b.length)) { this.options.makeRe = false; } var expanded = braces(this.pattern, this.options); this.pattern = expanded.join('|'); } }; /** * Return true if the glob pattern has a POSIX * bracket expression (character class) * * @param {String} `pattern` * @return {Boolean} */ Glob.prototype.hasBrackets = function(pattern) { return this.has((pattern || this.pattern), '[:'); }; /** * Expand bracket expressions in `glob.pattern` */ Glob.prototype.brackets = function() { if (this.hasBrackets() && this.options.nobrackets !== true) { this.pattern = brackets(this.pattern); } }; /** * Parse the given glob `pattern` or `glob.pattern` */ Glob.prototype.parse = function(pattern) { this.tokens = parse(pattern || this.pattern, true); return this.tokens; }; /** * Replace `a` with `b`. Also tracks the change before and * after each replacement. This is disabled by default, but * can be enabled by setting `options.track` to true. * * Also, when the pattern is a string, `.split()` is used, * because it's much faster than replace. * * @param {RegExp|String} `a` * @param {String} `b` * @param {Boolean} `escape` When `true`, escapes `*` and `?` in the replacement. * @return {String} */ Glob.prototype._replace = function(a, b, escape) { this.track('before (find): "' + a + '" (replace with): "' + b + '"'); if (escape) b = esc(b); if (a && b && typeof a === 'string') { this.pattern = this.pattern.split(a).join(b); } else if (a instanceof RegExp) { this.pattern = this.pattern.replace(a, b); } this.track('after'); }; /** * Escape special characters in the given string. * * @param {String} `str` Glob pattern * @return {String} */ Glob.prototype.escape = function(str) { this.track('before escape: '); var re = /["\\](['"]?[^"'\\]['"]?)/g; this.pattern = str.replace(re, function($0, $1) { var o = chars.ESC; var ch = o && o[$1]; if (ch) { return ch; } if (/[a-z]/i.test($0)) { return $0.split('\\').join(''); } return $0; }); this.track('after escape: '); }; /** * Unescape special characters in the given string. * * @param {String} `str` * @return {String} */ Glob.prototype.unescape = function(str) { var re = /__([A-Z]+)_([A-Z]+)__/g; this.pattern = str.replace(re, function($0, $1) { return chars[$1][$0]; }); }; /** * Escape utils */ function esc(str) { str = str.split('?').join('%~'); str = str.split('*').join('%%'); return str; } },{"./chars":2,"braces":8,"expand-brackets":26,"parse-glob":34}],5:[function(require,module,exports){ (function (process){ 'use strict'; var path = require('path'); var fileRe = require('filename-regex'); var win32 = process.platform === 'win32'; var win; var utils = {}; utils.filename = function filename(fp) { var seg = fp.match(fileRe()); return seg && seg[0]; }; utils.isPath = function isPath(pattern) { return function (fp) { return fp === pattern; }; }; utils.hasPath = function hasPath(pattern) { return function (fp) { return fp.indexOf(pattern) !== -1; }; }; utils.matchPath = function matchPath(pattern, opts) { var fn = (opts && opts.contains) ? utils.hasPath(pattern) : utils.isPath(pattern); return fn; }; utils.hasFilename = function hasFilename(re) { return function (fp) { var name = utils.filename(fp); return name && re.test(name); }; }; /** * Coerce `val` to an array * * @param {*} val * @return {Array} */ utils.arrayify = function arrayify(val) { return !Array.isArray(val) ? [val] : val; }; /** * Convert a file path to a unix path. */ utils.unixify = function unixify(fp, opts) { if (opts && opts.unixify === false) return fp; if (opts && opts.unixify === true) { win = true; } else if (opts && opts.cache && typeof win === 'undefined') { win = win32 || path.sep === '\\'; } if (win) return fp.replace(/[\\\/]+/g, '/'); return fp; }; /** * Escape/unescape utils */ utils.escapePath = function escapePath(fp) { return fp.replace(/[\\.]/g, '\\$&'); }; utils.unescapeGlob = function unescapeGlob(fp) { return fp.replace(/[\\"']/g, ''); }; utils.escapeRe = function escapeRe(str) { return str.replace(/[-[\\$*+?.#^\s{}(|)\]]/g, '\\$&'); }; /** * Expose `utils` */ module.exports = utils; }).call(this,require('_process')) },{"_process":22,"filename-regex":27,"path":21}],6:[function(require,module,exports){ /*! * arr-diff <https://github.com/jonschlinkert/arr-diff> * * Copyright (c) 2014 Jon Schlinkert, contributors. * Licensed under the MIT License */ 'use strict'; var slice = require('array-slice'); /** * Expose `diff` */ module.exports = diff; /** * Return the difference between the first array and * additional arrays. * * ```js * var diff = require('{%= name %}'); * * var a = ['a', 'b', 'c', 'd']; * var b = ['b', 'c']; * * console.log(diff(a, b)) * //=> ['a', 'd'] * ``` * * @param {Array} `a` * @param {Array} `b` * @return {Array} * @api public */ function diff(a, b, c) { var len = a.length; var arr = []; var rest; if (!b) { return a; } if (!c) { rest = b; } else { rest = [].concat.apply([], slice(arguments, 1)); } while (len--) { if (rest.indexOf(a[len]) === -1) { arr.unshift(a[len]); } } return arr; } },{"array-slice":7}],7:[function(require,module,exports){ /*! * array-slice <https://github.com/jonschlinkert/array-slice> * * Copyright (c) 2014 Jon Schlinkert, contributors. * Licensed under the MIT License */ 'use strict'; module.exports = function slice(arr, start, end) { var len = arr.length >>> 0; var range = []; start = idx(arr, start); end = idx(arr, end, len); while (start < end) { range.push(arr[start++]); } return range; }; function idx(arr, pos, end) { var len = arr.length >>> 0; if (pos == null) { pos = end || 0; } else if (pos < 0) { pos = Math.max(len + pos, 0); } else { pos = Math.min(pos, len); } return pos; } },{}],8:[function(require,module,exports){ /*! * braces <https://github.com/jonschlinkert/braces> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT license. */ 'use strict'; /** * Module dependencies */ var expand = require('expand-range'); var repeat = require('repeat-element'); var tokens = require('preserve'); var cache = {}; /** * Expose `braces` */ module.exports = function (str, options) { if (typeof str !== 'string') { throw new Error('braces expects a string'); } return braces(str, options); }; /** * Expand `{foo,bar}` or `{1..5}` braces in the * given `string`. * * @param {String} `str` * @param {Array} `arr` * @param {Object} `options` * @return {Array} */ function braces(str, arr, options) { if (str === '') { return []; } if (!Array.isArray(arr)) { options = arr; arr = []; } var opts = options || {}; arr = arr || []; if (typeof opts.nodupes === 'undefined') { opts.nodupes = true; } var fn = opts.fn; var es6; if (typeof opts === 'function') { fn = opts; opts = {}; } if (!(patternRe instanceof RegExp)) { patternRe = patternRegex(); } var matches = str.match(patternRe) || []; var m = matches[0]; switch(m) { case '\\,': return escapeCommas(str, arr, opts); case '\\.': return escapeDots(str, arr, opts); case '\/.': return escapePaths(str, arr, opts); case ' ': return splitWhitespace(str); case '{,}': return exponential(str, opts, braces); case '{}': return emptyBraces(str, arr, opts); case '\\{': case '\\}': return escapeBraces(str, arr, opts); case '${': if (!/\{[^{]+\{/.test(str)) { return arr.concat(str); } else { es6 = true; str = tokens.before(str, es6Regex()); } } if (!(braceRe instanceof RegExp)) { braceRe = braceRegex(); } var match = braceRe.exec(str); if (match == null) { return [str]; } var outter = match[1]; var inner = match[2]; if (inner === '') { return [str]; } var segs, segsLength; if (inner.indexOf('..') !== -1) { segs = expand(inner, opts, fn) || inner.split(','); segsLength = segs.length; } else if (inner[0] === '"' || inner[0] === '\'') { return arr.concat(str.split(/['"]/).join('')); } else { segs = inner.split(','); if (opts.makeRe) { return braces(str.replace(outter, wrap(segs, '|')), opts); } segsLength = segs.length; if (segsLength === 1 && opts.bash) { segs[0] = wrap(segs[0], '\\'); } } var len = segs.length; var i = 0, val; while (len--) { var path = segs[i++]; var bash = false; if (/(\.[^.\/])/.test(path)) { if (segsLength > 1) { return segs; } else { return [str]; } } val = splice(str, outter, path); if (/\{[^{}]+?\}/.test(val)) { arr = braces(val, arr, opts); } else if (val !== '') { if (opts.nodupes && arr.indexOf(val) !== -1) { continue; } arr.push(es6 ? tokens.after(val) : val); } } if (opts.strict) { return filter(arr, filterEmpty); } return arr; } /** * Expand exponential ranges * * `a{,}{,}` => ['a', 'a', 'a', 'a'] */ function exponential(str, options, fn) { if (typeof options === 'function') { fn = options; options = null; } var opts = options || {}; var esc = '__ESC_EXP__'; var exp = 0; var res; var parts = str.split('{,}'); if (opts.nodupes) { return fn(parts.join(''), opts); } exp = parts.length - 1; res = fn(parts.join(esc), opts); var len = res.length; var arr = []; var i = 0; while (len--) { var ele = res[i++]; var idx = ele.indexOf(esc); if (idx === -1) { arr.push(ele); } else { ele = ele.split('__ESC_EXP__').join(''); if (!!ele && opts.nodupes !== false) { arr.push(ele); } else { var num = Math.pow(2, exp); arr.push.apply(arr, repeat(ele, num)); } } } return arr; } /** * Wrap a value with parens, brackets or braces, * based on the given character/separator. * * @param {String|Array} `val` * @param {String} `ch` * @return {String} */ function wrap(val, ch) { if (ch === '|') { return '(' + val.join(ch) + ')'; } if (ch === ',') { return '{' + val.join(ch) + '}'; } if (ch === '-') { return '[' + val.join(ch) + ']'; } if (ch === '\\') { return '\\{' + val + '\\}'; } } /** * Handle empty braces: `{}` */ function emptyBraces(str, arr, opts) { return braces(str.split('{}').join('\\{\\}'), arr, opts); } /** * Filter out empty-ish values */ function filterEmpty(ele) { return !!ele && ele !== '\\'; } /** * Handle patterns with whitespace */ function splitWhitespace(str) { var segs = str.split(' '); var len = segs.length; var res = []; var i = 0; while (len--) { res.push.apply(res, braces(segs[i++])); } return res; } /** * Handle escaped braces: `\\{foo,bar}` */ function escapeBraces(str, arr, opts) { if (!/\{[^{]+\{/.test(str)) { return arr.concat(str.split('\\').join('')); } else { str = str.split('\\{').join('__LT_BRACE__'); str = str.split('\\}').join('__RT_BRACE__'); return map(braces(str, arr, opts), function (ele) { ele = ele.split('__LT_BRACE__').join('{'); return ele.split('__RT_BRACE__').join('}'); }); } } /** * Handle escaped dots: `{1\\.2}` */ function escapeDots(str, arr, opts) { if (!/[^\\]\..+\\\./.test(str)) { return arr.concat(str.split('\\').join('')); } else { str = str.split('\\.').join('__ESC_DOT__'); return map(braces(str, arr, opts), function (ele) { return ele.split('__ESC_DOT__').join('.'); }); } } /** * Handle escaped dots: `{1\\.2}` */ function escapePaths(str, arr, opts) { str = str.split('\/.').join('__ESC_PATH__'); return map(braces(str, arr, opts), function (ele) { return ele.split('__ESC_PATH__').join('\/.'); }); } /** * Handle escaped commas: `{a\\,b}` */ function escapeCommas(str, arr, opts) { if (!/\w,/.test(str)) { return arr.concat(str.split('\\').join('')); } else { str = str.split('\\,').join('__ESC_COMMA__'); return map(braces(str, arr, opts), function (ele) { return ele.split('__ESC_COMMA__').join(','); }); } } /** * Regex for common patterns */ function patternRegex() { return /\$\{|[ \t]|{}|{,}|\\,|\/\.|\\\.|\\{|\\}/; } /** * Braces regex. */ function braceRegex() { return /.*(\\?\{([^}]+)\})/; } /** * es6 delimiter regex. */ function es6Regex() { return /\$\{([^}]+)\}/; } var braceRe; var patternRe; /** * Faster alternative to `String.replace()` when the * index of the token to be replaces can't be supplied */ function splice(str, token, replacement) { var i = str.indexOf(token); return str.substr(0, i) + replacement + str.substr(i + token.length); } /** * Fast array map */ function map(arr, fn) { if (arr == null) { return []; } var len = arr.length; var res = new Array(len); var i = -1; while (++i < len) { res[i] = fn(arr[i], i, arr); } return res; } /** * Fast array filter */ function filter(arr, cb) { if (arr == null) return []; if (typeof cb !== 'function') { throw new TypeError('braces: filter expects a callback function.'); } var len = arr.length; var res = arr.slice(); var i = 0; while (len--) { if (!cb(arr[len], i++)) { res.splice(len, 1); } } return res; } },{"expand-range":9,"preserve":15,"repeat-element":16}],9:[function(require,module,exports){ /*! * expand-range <https://github.com/jonschlinkert/expand-range> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT license. */ 'use strict'; var fill = require('fill-range'); module.exports = function expandRange(str, options, fn) { if (typeof str !== 'string') { throw new TypeError('expand-range expects a string.'); } if (typeof options === 'function') { fn = options; options = {}; } if (typeof options === 'boolean') { options = {}; options.makeRe = true; } // create arguments to pass to fill-range var opts = options || {}; var args = str.split('..'); var len = args.length; if (len > 3) { return str; } // if only one argument, it can't expand so return it if (len === 1) { return args; } // if `true`, tell fill-range to regexify the string if (typeof fn === 'boolean' && fn === true) { opts.makeRe = true; } args.push(opts); return fill.apply(fill, args.concat(fn)); }; },{"fill-range":10}],10:[function(require,module,exports){ /*! * fill-range <https://github.com/jonschlinkert/fill-range> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License. */ 'use strict'; var isObject = require('isobject'); var isNumber = require('is-number'); var randomize = require('randomatic'); var repeatStr = require('repeat-string'); var repeat = require('repeat-element'); /** * Expose `fillRange` */ module.exports = fillRange; /** * Return a range of numbers or letters. * * @param {String} `a` Start of the range * @param {String} `b` End of the range * @param {String} `step` Increment or decrement to use. * @param {Function} `fn` Custom function to modify each element in the range. * @return {Array} */ function fillRange(a, b, step, options, fn) { if (a == null || b == null) { throw new Error('fill-range expects the first and second args to be strings.'); } if (typeof step === 'function') { fn = step; options = {}; step = null; } if (typeof options === 'function') { fn = options; options = {}; } if (isObject(step)) { options = step; step = ''; } var expand, regex = false, sep = ''; var opts = options || {}; if (typeof opts.silent === 'undefined') { opts.silent = true; } step = step || opts.step; // store a ref to unmodified arg var origA = a, origB = b; b = (b.toString() === '-0') ? 0 : b; if (opts.makeRe) { step = step ? (step += '~') : step; expand = true; regex = true; sep = '~'; } // handle special step characters if (typeof step === 'string') { var match = stepRe().exec(step); if (match) { var i = match.index; var m = match[0]; // repeat string if (m === '+') { return repeat(a, b); // randomize a, `b` times } else if (m === '?') { return [randomize(a, b)]; // expand right, no regex reduction } else if (m === '>') { step = step.substr(0, i) + step.substr(i + 1); expand = true; // expand to an array, or if valid create a reduced // string for a regex logic `or` } else if (m === '|') { step = step.substr(0, i) + step.substr(i + 1); expand = true; regex = true; sep = m; // expand to an array, or if valid create a reduced // string for a regex range } else if (m === '~') { step = step.substr(0, i) + step.substr(i + 1); expand = true; regex = true; sep = m; } } else if (!isNumber(step)) { if (!opts.silent) { throw new TypeError('fill-range: invalid step.'); } return null; } } if (/[.&*()[\]^%$#@!]/.test(a) || /[.&*()[\]^%$#@!]/.test(b)) { if (!opts.silent) { throw new RangeError('fill-range: invalid range arguments.'); } return null; } // has neither a letter nor number, or has both letters and numbers // this needs to be after the step logic if (!noAlphaNum(a) || !noAlphaNum(b) || hasBoth(a) || hasBoth(b)) { if (!opts.silent) { throw new RangeError('fill-range: invalid range arguments.'); } return null; } // validate arguments var isNumA = isNumber(zeros(a)); var isNumB = isNumber(zeros(b)); if ((!isNumA && isNumB) || (isNumA && !isNumB)) { if (!opts.silent) { throw new TypeError('fill-range: first range argument is incompatible with second.'); } return null; } // by this point both are the same, so we // can use A to check going forward. var isNum = isNumA; var num = formatStep(step); // is the range alphabetical? or numeric? if (isNum) { // if numeric, coerce to an integer a = +a; b = +b; } else { // otherwise, get the charCode to expand alpha ranges a = a.charCodeAt(0); b = b.charCodeAt(0); } // is the pattern descending? var isDescending = a > b; // don't create a character class if the args are < 0 if (a < 0 || b < 0) { expand = false; regex = false; } // detect padding var padding = isPadded(origA, origB); var res, pad, arr = []; var ii = 0; // character classes, ranges and logical `or` if (regex) { if (shouldExpand(a, b, num, isNum, padding, opts)) { // make sure the correct separator is used if (sep === '|' || sep === '~') { sep = detectSeparator(a, b, num, isNum, isDescending); } return wrap([origA, origB], sep, opts); } } while (isDescending ? (a >= b) : (a <= b)) { if (padding && isNum) { pad = padding(a); } // custom function if (typeof fn === 'function') { res = fn(a, isNum, pad, ii++); // letters } else if (!isNum) { if (regex && isInvalidChar(a)) { res = null; } else { res = String.fromCharCode(a); } // numbers } else { res = formatPadding(a, pad); } // add result to the array, filtering any nulled values if (res !== null) arr.push(res); // increment or decrement if (isDescending) { a -= num; } else { a += num; } } // now that the array is expanded, we need to handle regex // character classes, ranges or logical `or` that wasn't // already handled before the loop if ((regex || expand) && !opts.noexpand) { // make sure the correct separator is used if (sep === '|' || sep === '~') { sep = detectSeparator(a, b, num, isNum, isDescending); } if (arr.length === 1 || a < 0 || b < 0) { return arr; } return wrap(arr, sep, opts); } return arr; } /** * Wrap the string with the correct regex * syntax. */ function wrap(arr, sep, opts) { if (sep === '~') { sep = '-'; } var str = arr.join(sep); var pre = opts && opts.regexPrefix; // regex logical `or` if (sep === '|') { str = pre ? pre + str : str; str = '(' + str + ')'; } // regex character class if (sep === '-') { str = (pre && pre === '^') ? pre + str : str; str = '[' + str + ']'; } return [str]; } /** * Check for invalid characters */ function isCharClass(a, b, step, isNum, isDescending) { if (isDescending) { return false; } if (isNum) { return a <= 9 && b <= 9; } if (a < b) { return step === 1; } return false; } /** * Detect the correct separator to use */ function shouldExpand(a, b, num, isNum, padding, opts) { if (isNum && (a > 9 || b > 9)) { return false; } return !padding && num === 1 && a < b; } /** * Detect the correct separator to use */ function detectSeparator(a, b, step, isNum, isDescending) { var isChar = isCharClass(a, b, step, isNum, isDescending); if (!isChar) { return '|'; } return '~'; } /** * Correctly format the step based on type */ function formatStep(step) { return Math.abs(step >> 0) || 1; } /** * Format padding, taking leading `-` into account */ function formatPadding(ch, pad) { var res = pad ? pad + ch : ch; if (pad && ch.toString().charAt(0) === '-') { res = '-' + pad + ch.toString().substr(1); } return res.toString(); } /** * Check for invalid characters */ function isInvalidChar(str) { var ch = toStr(str); return ch === '\\' || ch === '[' || ch === ']' || ch === '^' || ch === '(' || ch === ')' || ch === '`'; } /** * Convert to a string from a charCode */ function toStr(ch) { return String.fromCharCode(ch); } /** * Step regex */ function stepRe() { return /\?|>|\||\+|\~/g; } /** * Return true if `val` has either a letter * or a number */ function noAlphaNum(val) { return /[a-z0-9]/i.test(val); } /** * Return true if `val` has both a letter and * a number (invalid) */ function hasBoth(val) { return /[a-z][0-9]|[0-9][a-z]/i.test(val); } /** * Normalize zeros for checks */ function zeros(val) { if (/^-*0+$/.test(val.toString())) { return '0'; } return val; } /** * Return true if `val` has leading zeros, * or a similar valid pattern. */ function hasZeros(val) { return /[^.]\.|^-*0+[0-9]/.test(val); } /** * If the string is padded, returns a curried function with * the a cached padding string, or `false` if no padding. * * @param {*} `origA` String or number. * @return {String|Boolean} */ function isPadded(origA, origB) { if (hasZeros(origA) || hasZeros(origB)) { var alen = length(origA); var blen = length(origB); var len = alen >= blen ? alen : blen; return function (a) { return repeatStr('0', len - length(a)); }; } return false; } /** * Get the string length of `val` */ function length(val) { return val.toString().length; } },{"is-number":11,"isobject":12,"randomatic":13,"repeat-element":16,"repeat-string":14}],11:[function(require,module,exports){ /*! * is-number <https://github.com/jonschlinkert/is-number> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License. */ 'use strict'; module.exports = function isNumber(n) { return (!!(+n) && !Array.isArray(n)) && isFinite(n) || n === '0' || n === 0; }; },{}],12:[function(require,module,exports){ /*! * isobject <https://github.com/jonschlinkert/isobject> * * Copyright (c) 2014 Jon Schlinkert, contributors. * Licensed under the MIT License */ 'use strict'; /** * is the value an object, and not an array? * * @param {*} `value` * @return {Boolean} */ module.exports = function isObject(o) { return o != null && typeof o === 'object' && !Array.isArray(o); }; },{}],13:[function(require,module,exports){ /*! * randomatic <https://github.com/jonschlinkert/randomatic> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License (MIT) * * Many changes have been made, but this was originally * inspired by <http://stackoverflow.com/a/10727155/1267639> */ 'use strict'; var isNumber = require('is-number'); var typeOf = require('kind-of'); /** * Expose `randomatic` */ module.exports = randomatic; /** * Available mask characters */ var type = { lower: 'abcdefghijklmnopqrstuvwxyz', upper: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', number: '0123456789', special: '~!@#$%^&()_+-={}[];\',.' }; type.all = type.lower + type.upper + type.number; /** * Generate random character sequences of a specified `length`, * based on the given `pattern`. * * @param {String} `pattern` The pattern to use for generating the random string. * @param {String} `length` The length of the string to generate. * @param {String} `options` * @return {String} * @api public */ function randomatic(pattern, length, options) { if (typeof pattern === 'undefined') { throw new Error('randomatic expects a string or number.'); } var custom = false; if (arguments.length === 1) { if (typeof pattern === 'string') { length = pattern.length; } else if (isNumber(pattern)) { options = {}; length = pattern; pattern = '*'; } } if(typeOf(length) === 'object' && length.hasOwnProperty('chars')) { options = length; pattern = options.chars; length = pattern.length; custom = true; } var opts = options || {}; var mask = ''; var res = ''; // Characters to be used if (pattern.indexOf('?') !== -1) mask += opts.chars; if (pattern.indexOf('a') !== -1) mask += type.lower; if (pattern.indexOf('A') !== -1) mask += type.upper; if (pattern.indexOf('0') !== -1) mask += type.number; if (pattern.indexOf('!') !== -1) mask += type.special; if (pattern.indexOf('*') !== -1) mask += type.all; if (custom) mask += pattern; while (length--) { res += mask.charAt(parseInt(Math.random() * mask.length)); } return res; }; },{"is-number":11,"kind-of":29}],14:[function(require,module,exports){ /*! * repeat-string <https://github.com/jonschlinkert/repeat-string> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License */ 'use strict'; /** * Expose `repeat` */ module.exports = repeat; /** * Repeat the given `string` the specified `number` * of times. * * **Example:** * * ```js * var repeat = require('repeat-string'); * repeat('A', 5); * //=> AAAAA * ``` * * @param {String} `string` The string to repeat * @param {Number} `number` The number of times to repeat the string * @return {String} Repeated string * @api public */ function repeat(str, num) { if (typeof str !== 'string') { throw new TypeError('repeat-string expects a string.'); } if (num === 1) return str; if (num === 2) return str + str; var max = str.length * num; if (cache !== str || typeof cache === 'undefined') { cache = str; res = ''; } while (max > res.length && num > 0) { if (num & 1) { res += str; } num >>= 1; if (!num) break; str += str; } return res.substr(0, max); } /** * Results cache */ var res = ''; var cache; },{}],15:[function(require,module,exports){ /*! * preserve <https://github.com/jonschlinkert/preserve> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT license. */ 'use strict'; /** * Replace tokens in `str` with a temporary, heuristic placeholder. * * ```js * tokens.before('{a\\,b}'); * //=> '{__ID1__}' * ``` * * @param {String} `str` * @return {String} String with placeholders. * @api public */ exports.before = function before(str, re) { return str.replace(re, function (match) { var id = randomize(); cache[id] = match; return '__ID' + id + '__'; }); }; /** * Replace placeholders in `str` with original tokens. * * ```js * tokens.after('{__ID1__}'); * //=> '{a\\,b}' * ``` * * @param {String} `str` String with placeholders * @return {String} `str` String with original tokens. * @api public */ exports.after = function after(str) { return str.replace(/__ID(.{5})__/g, function (_, id) { return cache[id]; }); }; function randomize() { return Math.random().toString().slice(2, 7); } var cache = {}; },{}],16:[function(require,module,exports){ /*! * repeat-element <https://github.com/jonschlinkert/repeat-element> * * Copyright (c) 2015 Jon Schlinkert, contributors. * Licensed under the MIT license. */ 'use strict'; module.exports = repeat; function repeat(ele, num) { var arr = new Array(num); for (var i = 0; i < num; i++) { arr[i] = ele; } return arr; } },{}],17:[function(require,module,exports){ /*! * The buffer module from node.js, for the browser. * * @author Feross Aboukhadijeh <[email protected]> <http://feross.org> * @license MIT */ var base64 = require('base64-js') var ieee754 = require('ieee754') var isArray = require('is-array') exports.Buffer = Buffer exports.SlowBuffer = SlowBuffer exports.INSPECT_MAX_BYTES = 50 Buffer.poolSize = 8192 // not used by this implementation var kMaxLength = 0x3fffffff var rootParent = {} /** * If `Buffer.TYPED_ARRAY_SUPPORT`: * === true Use Uint8Array implementation (fastest) * === false Use Object implementation (most compatible, even IE6) * * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, * Opera 11.6+, iOS 4.2+. * * Note: * * - Implementation must support adding new properties to `Uint8Array` instances. * Firefox 4-29 lacked support, fixed in Firefox 30+. * See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438. * * - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function. * * - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of * incorrect length in some situations. * * We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they will * get the Object implementation, which is slower but will work correctly. */ Buffer.TYPED_ARRAY_SUPPORT = (function () { try { var buf = new ArrayBuffer(0) var arr = new Uint8Array(buf) arr.foo = function () { return 42 } return arr.foo() === 42 && // typed array instances can be augmented typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray` new Uint8Array(1).subarray(1, 1).byteLength === 0 // ie10 has broken `subarray` } catch (e) { return false } })() /** * Class: Buffer * ============= * * The Buffer constructor returns instances of `Uint8Array` that are augmented * with function properties for all the node `Buffer` API functions. We use * `Uint8Array` so that square bracket notation works as expected -- it returns * a single octet. * * By augmenting the instances, we can avoid modifying the `Uint8Array` * prototype. */ function Buffer (subject, encoding) { var self = this if (!(self instanceof Buffer)) return new Buffer(subject, encoding) var type = typeof subject var length if (type === 'number') { length = +subject } else if (type === 'string') { length = Buffer.byteLength(subject, encoding) } else if (type === 'object' && subject !== null) { // assume object is array-like if (subject.type === 'Buffer' && isArray(subject.data)) subject = subject.data length = +subject.length } else { throw new TypeError('must start with number, buffer, array or string') } if (length > kMaxLength) { throw new RangeError('Attempt to allocate Buffer larger than maximum size: 0x' + kMaxLength.toString(16) + ' bytes') } if (length < 0) length = 0 else length >>>= 0 // coerce to uint32 if (Buffer.TYPED_ARRAY_SUPPORT) { // Preferred: Return an augmented `Uint8Array` instance for best performance self = Buffer._augment(new Uint8Array(length)) // eslint-disable-line consistent-this } else { // Fallback: Return THIS instance of Buffer (created by `new`) self.length = length self._isBuffer = true } var i if (Buffer.TYPED_ARRAY_SUPPORT && typeof subject.byteLength === 'number') { // Speed optimization -- use set if we're copying from a typed array self._set(subject) } else if (isArrayish(subject)) { // Treat array-ish objects as a byte array if (Buffer.isBuffer(subject)) { for (i = 0; i < length; i++) { self[i] = subject.readUInt8(i) } } else { for (i = 0; i < length; i++) { self[i] = ((subject[i] % 256) + 256) % 256 } } } else if (type === 'string') { self.write(subject, 0, encoding) } else if (type === 'number' && !Buffer.TYPED_ARRAY_SUPPORT) { for (i = 0; i < length; i++) { self[i] = 0 } } if (length > 0 && length <= Buffer.poolSize) self.parent = rootParent return self } function SlowBuffer (subject, encoding) { if (!(this instanceof SlowBuffer)) return new SlowBuffer(subject, encoding) var buf = new Buffer(subject, encoding) delete buf.parent return buf } Buffer.isBuffer = function isBuffer (b) { return !!(b != null && b._isBuffer) } Buffer.compare = function compare (a, b) { if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { throw new TypeError('Arguments must be Buffers') } if (a === b) return 0 var x = a.length var y = b.length for (var i = 0, len = Math.min(x, y); i < len && a[i] === b[i]; i++) {} if (i !== len) { x = a[i] y = b[i] } if (x < y) return -1 if (y < x) return 1 return 0 } Buffer.isEncoding = function isEncoding (encoding) { switch (String(encoding).toLowerCase()) { case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'raw': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': return true default: return false } } Buffer.concat = function concat (list, totalLength) { if (!isArray(list)) throw new TypeError('list argument must be an Array of Buffers.') if (list.length === 0) { return new Buffer(0) } else if (list.length === 1) { return list[0] } var i if (totalLength === undefined) { totalLength = 0 for (i = 0; i < list.length; i++) { totalLength += list[i].length } } var buf = new Buffer(totalLength) var pos = 0 for (i = 0; i < list.length; i++) { var item = list[i] item.copy(buf, pos) pos += item.length } return buf } Buffer.byteLength = function byteLength (str, encoding) { var ret str = str + '' switch (encoding || 'utf8') { case 'ascii': case 'binary': case 'raw': ret = str.length break case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': ret = str.length * 2 break case 'hex': ret = str.length >>> 1 break case 'utf8': case 'utf-8': ret = utf8ToBytes(str).length break case 'base64': ret = base64ToBytes(str).length break default: ret = str.length } return ret } // pre-set for values that may exist in the future Buffer.prototype.length = undefined Buffer.prototype.parent = undefined // toString(encoding, start=0, end=buffer.length) Buffer.prototype.toString = function toString (encoding, start, end) { var loweredCase = false start = start >>> 0 end = end === undefined || end === Infinity ? this.length : end >>> 0 if (!encoding) encoding = 'utf8' if (start < 0) start = 0 if (end > this.length) end = this.length if (end <= start) return '' while (true) { switch (encoding) { case 'hex': return hexSlice(this, start, end) case 'utf8': case 'utf-8': return utf8Slice(this, start, end) case 'ascii': return asciiSlice(this, start, end) case 'binary': return binarySlice(this, start, end) case 'base64': return base64Slice(this, start, end) case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': return utf16leSlice(this, start, end) default: if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) encoding = (encoding + '').toLowerCase() loweredCase = true } } } Buffer.prototype.equals = function equals (b) { if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') if (this === b) return true return Buffer.compare(this, b) === 0 } Buffer.prototype.inspect = function inspect () { var str = '' var max = exports.INSPECT_MAX_BYTES if (this.length > 0) { str = this.toString('hex', 0, max).match(/.{2}/g).join(' ') if (this.length > max) str += ' ... ' } return '<Buffer ' + str + '>' } Buffer.prototype.compare = function compare (b) { if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') if (this === b) return 0 return Buffer.compare(this, b) } Buffer.prototype.indexOf = function indexOf (val, byteOffset) { if (byteOffset > 0x7fffffff) byteOffset = 0x7fffffff else if (byteOffset < -0x80000000) byteOffset = -0x80000000 byteOffset >>= 0 if (this.length === 0) return -1 if (byteOffset >= this.length) return -1 // Negative offsets start from the end of the buffer if (byteOffset < 0) byteOffset = Math.max(this.length + byteOffset, 0) if (typeof val === 'string') { if (val.length === 0) return -1 // special case: looking for empty string always fails return String.prototype.indexOf.call(this, val, byteOffset) } if (Buffer.isBuffer(val)) { return arrayIndexOf(this, val, byteOffset) } if (typeof val === 'number') { if (Buffer.TYPED_ARRAY_SUPPORT && Uint8Array.prototype.indexOf === 'function') { return Uint8Array.prototype.indexOf.call(this, val, byteOffset) } return arrayIndexOf(this, [ val ], byteOffset) } function arrayIndexOf (arr, val, byteOffset) { var foundIndex = -1 for (var i = 0; byteOffset + i < arr.length; i++) { if (arr[byteOffset + i] === val[foundIndex === -1 ? 0 : i - foundIndex]) { if (foundIndex === -1) foundIndex = i if (i - foundIndex + 1 === val.length) return byteOffset + foundIndex } else { foundIndex = -1 } } return -1 } throw new TypeError('val must be string, number or Buffer') } // `get` will be removed in Node 0.13+ Buffer.prototype.get = function get (offset) { console.log('.get() is deprecated. Access using array indexes instead.') return this.readUInt8(offset) } // `set` will be removed in Node 0.13+ Buffer.prototype.set = function set (v, offset) { console.log('.set() is deprecated. Access using array indexes instead.') return this.writeUInt8(v, offset) } function hexWrite (buf, string, offset, length) { offset = Number(offset) || 0 var remaining = buf.length - offset if (!length) { length = remaining } else { length = Number(length) if (length > remaining) { length = remaining } } // must be an even number of digits var strLen = string.length if (strLen % 2 !== 0) throw new Error('Invalid hex string') if (length > strLen / 2) { length = strLen / 2 } for (var i = 0; i < length; i++) { var parsed = parseInt(string.substr(i * 2, 2), 16) if (isNaN(parsed)) throw new Error('Invalid hex string') buf[offset + i] = parsed } return i } function utf8Write (buf, string, offset, length) { var charsWritten = blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) return charsWritten } function asciiWrite (buf, string, offset, length) { var charsWritten = blitBuffer(asciiToBytes(string), buf, offset, length) return charsWritten } function binaryWrite (buf, string, offset, length) { return asciiWrite(buf, string, offset, length) } function base64Write (buf, string, offset, length) { var charsWritten = blitBuffer(base64ToBytes(string), buf, offset, length) return charsWritten } function utf16leWrite (buf, string, offset, length) { var charsWritten = blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) return charsWritten } Buffer.prototype.write = function write (string, offset, length, encoding) { // Support both (string, offset, length, encoding) // and the legacy (string, encoding, offset, length) if (isFinite(offset)) { if (!isFinite(length)) { encoding = length length = undefined } } else { // legacy var swap = encoding encoding = offset offset = length length = swap } offset = Number(offset) || 0 if (length < 0 || offset < 0 || offset > this.length) { throw new RangeError('attempt to write outside buffer bounds') } var remaining = this.length - offset if (!length) { length = remaining } else { length = Number(length) if (length > remaining) { length = remaining } } encoding = String(encoding || 'utf8').toLowerCase() var ret switch (encoding) { case 'hex': ret = hexWrite(this, string, offset, length) break case 'utf8': case 'utf-8': ret = utf8Write(this, string, offset, length) break case 'ascii': ret = asciiWrite(this, string, offset, length) break case 'binary': ret = binaryWrite(this, string, offset, length) break case 'base64': ret = base64Write(this, string, offset, length) break case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': ret = utf16leWrite(this, string, offset, length) break default: throw new TypeError('Unknown encoding: ' + encoding) } return ret } Buffer.prototype.toJSON = function toJSON () { return { type: 'Buffer', data: Array.prototype.slice.call(this._arr || this, 0) } } function base64Slice (buf, start, end) { if (start === 0 && end === buf.length) { return base64.fromByteArray(buf) } else { return base64.fromByteArray(buf.slice(start, end)) } } function utf8Slice (buf, start, end) { var res = '' var tmp = '' end = Math.min(buf.length, end) for (var i = start; i < end; i++) { if (buf[i] <= 0x7F) { res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i]) tmp = '' } else { tmp += '%' + buf[i].toString(16) } } return res + decodeUtf8Char(tmp) } function asciiSlice (buf, start, end) { var ret = '' end = Math.min(buf.length, end) for (var i = start; i < end; i++) { ret += String.fromCharCode(buf[i] & 0x7F) } return ret } function binarySlice (buf, start, end) { var ret = '' end = Math.min(buf.length, end) for (var i = start; i < end; i++) { ret += String.fromCharCode(buf[i]) } return ret } function hexSlice (buf, start, end) { var len = buf.length if (!start || start < 0) start = 0 if (!end || end < 0 || end > len) end = len var out = '' for (var i = start; i < end; i++) { out += toHex(buf[i]) } return out } function utf16leSlice (buf, start, end) { var bytes = buf.slice(start, end) var res = '' for (var i = 0; i < bytes.length; i += 2) { res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256) } return res } Buffer.prototype.slice = function slice (start, end) { var len = this.length start = ~~start end = end === undefined ? len : ~~end if (start < 0) { start += len if (start < 0) start = 0 } else if (start > len) { start = len } if (end < 0) { end += len if (end < 0) end = 0 } else if (end > len) { end = len } if (end < start) end = start var newBuf if (Buffer.TYPED_ARRAY_SUPPORT) { newBuf = Buffer._augment(this.subarray(start, end)) } else { var sliceLen = end - start newBuf = new Buffer(sliceLen, undefined) for (var i = 0; i < sliceLen; i++) { newBuf[i] = this[i + start] } } if (newBuf.length) newBuf.parent = this.parent || this return newBuf } /* * Need to make sure that buffer isn't trying to write out of bounds. */ function checkOffset (offset, ext, length) { if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') } Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { offset = offset >>> 0 byteLength = byteLength >>> 0 if (!noAssert) checkOffset(offset, byteLength, this.length) var val = this[offset] var mul = 1 var i = 0 while (++i < byteLength && (mul *= 0x100)) { val += this[offset + i] * mul } return val } Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { offset = offset >>> 0 byteLength = byteLength >>> 0 if (!noAssert) { checkOffset(offset, byteLength, this.length) } var val = this[offset + --byteLength] var mul = 1 while (byteLength > 0 && (mul *= 0x100)) { val += this[offset + --byteLength] * mul } return val } Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { if (!noAssert) checkOffset(offset, 1, this.length) return this[offset] } Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) return this[offset] | (this[offset + 1] << 8) } Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) return (this[offset] << 8) | this[offset + 1] } Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return ((this[offset]) | (this[offset + 1] << 8) | (this[offset + 2] << 16)) + (this[offset + 3] * 0x1000000) } Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return (this[offset] * 0x1000000) + ((this[offset + 1] << 16) | (this[offset + 2] << 8) | this[offset + 3]) } Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { offset = offset >>> 0 byteLength = byteLength >>> 0 if (!noAssert) checkOffset(offset, byteLength, this.length) var val = this[offset] var mul = 1 var i = 0 while (++i < byteLength && (mul *= 0x100)) { val += this[offset + i] * mul } mul *= 0x80 if (val >= mul) val -= Math.pow(2, 8 * byteLength) return val } Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { offset = offset >>> 0 byteLength = byteLength >>> 0 if (!noAssert) checkOffset(offset, byteLength, this.length) var i = byteLength var mul = 1 var val = this[offset + --i] while (i > 0 && (mul *= 0x100)) { val += this[offset + --i] * mul } mul *= 0x80 if (val >= mul) val -= Math.pow(2, 8 * byteLength) return val } Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { if (!noAssert) checkOffset(offset, 1, this.length) if (!(this[offset] & 0x80)) return (this[offset]) return ((0xff - this[offset] + 1) * -1) } Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) var val = this[offset] | (this[offset + 1] << 8) return (val & 0x8000) ? val | 0xFFFF0000 : val } Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) var val = this[offset + 1] | (this[offset] << 8) return (val & 0x8000) ? val | 0xFFFF0000 : val } Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return (this[offset]) | (this[offset + 1] << 8) | (this[offset + 2] << 16) | (this[offset + 3] << 24) } Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return (this[offset] << 24) | (this[offset + 1] << 16) | (this[offset + 2] << 8) | (this[offset + 3]) } Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return ieee754.read(this, offset, true, 23, 4) } Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return ieee754.read(this, offset, false, 23, 4) } Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { if (!noAssert) checkOffset(offset, 8, this.length) return ieee754.read(this, offset, true, 52, 8) } Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { if (!noAssert) checkOffset(offset, 8, this.length) return ieee754.read(this, offset, false, 52, 8) } function checkInt (buf, value, offset, ext, max, min) { if (!Buffer.isBuffer(buf)) throw new TypeError('buffer must be a Buffer instance') if (value > max || value < min) throw new RangeError('value is out of bounds') if (offset + ext > buf.length) throw new RangeError('index out of range') } Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { value = +value offset = offset >>> 0 byteLength = byteLength >>> 0 if (!noAssert) checkInt(this, value, offset, byteLength, Math.pow(2, 8 * byteLength), 0) var mul = 1 var i = 0 this[offset] = value & 0xFF while (++i < byteLength && (mul *= 0x100)) { this[offset + i] = (value / mul) >>> 0 & 0xFF } return offset + byteLength } Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { value = +value offset = offset >>> 0 byteLength = byteLength >>> 0 if (!noAssert) checkInt(this, value, offset, byteLength, Math.pow(2, 8 * byteLength), 0) var i = byteLength - 1 var mul = 1 this[offset + i] = value & 0xFF while (--i >= 0 && (mul *= 0x100)) { this[offset + i] = (value / mul) >>> 0 & 0xFF } return offset + byteLength } Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) this[offset] = value return offset + 1 } function objectWriteUInt16 (buf, value, offset, littleEndian) { if (value < 0) value = 0xffff + value + 1 for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; i++) { buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> (littleEndian ? i : 1 - i) * 8 } } Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = value this[offset + 1] = (value >>> 8) } else { objectWriteUInt16(this, value, offset, true) } return offset + 2 } Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 8) this[offset + 1] = value } else { objectWriteUInt16(this, value, offset, false) } return offset + 2 } function objectWriteUInt32 (buf, value, offset, littleEndian) { if (value < 0) value = 0xffffffff + value + 1 for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; i++) { buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff } } Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset + 3] = (value >>> 24) this[offset + 2] = (value >>> 16) this[offset + 1] = (value >>> 8) this[offset] = value } else { objectWriteUInt32(this, value, offset, true) } return offset + 4 } Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 24) this[offset + 1] = (value >>> 16) this[offset + 2] = (value >>> 8) this[offset + 3] = value } else { objectWriteUInt32(this, value, offset, false) } return offset + 4 } Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) { checkInt( this, value, offset, byteLength, Math.pow(2, 8 * byteLength - 1) - 1, -Math.pow(2, 8 * byteLength - 1) ) } var i = 0 var mul = 1 var sub = value < 0 ? 1 : 0 this[offset] = value & 0xFF while (++i < byteLength && (mul *= 0x100)) { this[offset + i] = ((value / mul) >> 0) - sub & 0xFF } return offset + byteLength } Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) { checkInt( this, value, offset, byteLength, Math.pow(2, 8 * byteLength - 1) - 1, -Math.pow(2, 8 * byteLength - 1) ) } var i = byteLength - 1 var mul = 1 var sub = value < 0 ? 1 : 0 this[offset + i] = value & 0xFF while (--i >= 0 && (mul *= 0x100)) { this[offset + i] = ((value / mul) >> 0) - sub & 0xFF } return offset + byteLength } Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) if (value < 0) value = 0xff + value + 1 this[offset] = value return offset + 1 } Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = value this[offset + 1] = (value >>> 8) } else { objectWriteUInt16(this, value, offset, true) } return offset + 2 } Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 8) this[offset + 1] = value } else { objectWriteUInt16(this, value, offset, false) } return offset + 2 } Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = value this[offset + 1] = (value >>> 8) this[offset + 2] = (value >>> 16) this[offset + 3] = (value >>> 24) } else { objectWriteUInt32(this, value, offset, true) } return offset + 4 } Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { value = +value offset = offset >>> 0 if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) if (value < 0) value = 0xffffffff + value + 1 if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 24) this[offset + 1] = (value >>> 16) this[offset + 2] = (value >>> 8) this[offset + 3] = value } else { objectWriteUInt32(this, value, offset, false) } return offset + 4 } function checkIEEE754 (buf, value, offset, ext, max, min) { if (value > max || value < min) throw new RangeError('value is out of bounds') if (offset + ext > buf.length) throw new RangeError('index out of range') if (offset < 0) throw new RangeError('index out of range') } function writeFloat (buf, value, offset, littleEndian, noAssert) { if (!noAssert) { checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) } ieee754.write(buf, value, offset, littleEndian, 23, 4) return offset + 4 } Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { return writeFloat(this, value, offset, true, noAssert) } Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { return writeFloat(this, value, offset, false, noAssert) } function writeDouble (buf, value, offset, littleEndian, noAssert) { if (!noAssert) { checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) } ieee754.write(buf, value, offset, littleEndian, 52, 8) return offset + 8 } Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { return writeDouble(this, value, offset, true, noAssert) } Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { return writeDouble(this, value, offset, false, noAssert) } // copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) Buffer.prototype.copy = function copy (target, target_start, start, end) { if (!start) start = 0 if (!end && end !== 0) end = this.length if (target_start >= target.length) target_start = target.length if (!target_start) target_start = 0 if (end > 0 && end < start) end = start // Copy 0 bytes; we're done if (end === start) return 0 if (target.length === 0 || this.length === 0) return 0 // Fatal error conditions if (target_start < 0) { throw new RangeError('targetStart out of bounds') } if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds') if (end < 0) throw new RangeError('sourceEnd out of bounds') // Are we oob? if (end > this.length) end = this.length if (target.length - target_start < end - start) { end = target.length - target_start + start } var len = end - start if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) { for (var i = 0; i < len; i++) { target[i + target_start] = this[i + start] } } else { target._set(this.subarray(start, start + len), target_start) } return len } // fill(value, start=0, end=buffer.length) Buffer.prototype.fill = function fill (value, start, end) { if (!value) value = 0 if (!start) start = 0 if (!end) end = this.length if (end < start) throw new RangeError('end < start') // Fill 0 bytes; we're done if (end === start) return if (this.length === 0) return if (start < 0 || start >= this.length) throw new RangeError('start out of bounds') if (end < 0 || end > this.length) throw new RangeError('end out of bounds') var i if (typeof value === 'number') { for (i = start; i < end; i++) { this[i] = value } } else { var bytes = utf8ToBytes(value.toString()) var len = bytes.length for (i = start; i < end; i++) { this[i] = bytes[i % len] } } return this } /** * Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance. * Added in Node 0.12. Only available in browsers that support ArrayBuffer. */ Buffer.prototype.toArrayBuffer = function toArrayBuffer () { if (typeof Uint8Array !== 'undefined') { if (Buffer.TYPED_ARRAY_SUPPORT) { return (new Buffer(this)).buffer } else { var buf = new Uint8Array(this.length) for (var i = 0, len = buf.length; i < len; i += 1) { buf[i] = this[i] } return buf.buffer } } else { throw new TypeError('Buffer.toArrayBuffer not supported in this browser') } } // HELPER FUNCTIONS // ================ var BP = Buffer.prototype /** * Augment a Uint8Array *instance* (not the Uint8Array class!) with Buffer methods */ Buffer._augment = function _augment (arr) { arr.constructor = Buffer arr._isBuffer = true // save reference to original Uint8Array set method before overwriting arr._set = arr.set // deprecated, will be removed in node 0.13+ arr.get = BP.get arr.set = BP.set arr.write = BP.write arr.toString = BP.toString arr.toLocaleString = BP.toString arr.toJSON = BP.toJSON arr.equals = BP.equals arr.compare = BP.compare arr.indexOf = BP.indexOf arr.copy = BP.copy arr.slice = BP.slice arr.readUIntLE = BP.readUIntLE arr.readUIntBE = BP.readUIntBE arr.readUInt8 = BP.readUInt8 arr.readUInt16LE = BP.readUInt16LE arr.readUInt16BE = BP.readUInt16BE arr.readUInt32LE = BP.readUInt32LE arr.readUInt32BE = BP.readUInt32BE arr.readIntLE = BP.readIntLE arr.readIntBE = BP.readIntBE arr.readInt8 = BP.readInt8 arr.readInt16LE = BP.readInt16LE arr.readInt16BE = BP.readInt16BE arr.readInt32LE = BP.readInt32LE arr.readInt32BE = BP.readInt32BE arr.readFloatLE = BP.readFloatLE arr.readFloatBE = BP.readFloatBE arr.readDoubleLE = BP.readDoubleLE arr.readDoubleBE = BP.readDoubleBE arr.writeUInt8 = BP.writeUInt8 arr.writeUIntLE = BP.writeUIntLE arr.writeUIntBE = BP.writeUIntBE arr.writeUInt16LE = BP.writeUInt16LE arr.writeUInt16BE = BP.writeUInt16BE arr.writeUInt32LE = BP.writeUInt32LE arr.writeUInt32BE = BP.writeUInt32BE arr.writeIntLE = BP.writeIntLE arr.writeIntBE = BP.writeIntBE arr.writeInt8 = BP.writeInt8 arr.writeInt16LE = BP.writeInt16LE arr.writeInt16BE = BP.writeInt16BE arr.writeInt32LE = BP.writeInt32LE arr.writeInt32BE = BP.writeInt32BE arr.writeFloatLE = BP.writeFloatLE arr.writeFloatBE = BP.writeFloatBE arr.writeDoubleLE = BP.writeDoubleLE arr.writeDoubleBE = BP.writeDoubleBE arr.fill = BP.fill arr.inspect = BP.inspect arr.toArrayBuffer = BP.toArrayBuffer return arr } var INVALID_BASE64_RE = /[^+\/0-9A-z\-]/g function base64clean (str) { // Node strips out invalid characters like \n and \t from the string, base64-js does not str = stringtrim(str).replace(INVALID_BASE64_RE, '') // Node converts strings with length < 2 to '' if (str.length < 2) return '' // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not while (str.length % 4 !== 0) { str = str + '=' } return str } function stringtrim (str) { if (str.trim) return str.trim() return str.replace(/^\s+|\s+$/g, '') } function isArrayish (subject) { return isArray(subject) || Buffer.isBuffer(subject) || subject && typeof subject === 'object' && typeof subject.length === 'number' } function toHex (n) { if (n < 16) return '0' + n.toString(16) return n.toString(16) } function utf8ToBytes (string, units) { units = units || Infinity var codePoint var length = string.length var leadSurrogate = null var bytes = [] var i = 0 for (; i < length; i++) { codePoint = string.charCodeAt(i) // is surrogate component if (codePoint > 0xD7FF && codePoint < 0xE000) { // last char was a lead if (leadSurrogate) { // 2 leads in a row if (codePoint < 0xDC00) { if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) leadSurrogate = codePoint continue } else { // valid surrogate pair codePoint = leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00 | 0x10000 leadSurrogate = null } } else { // no lead yet if (codePoint > 0xDBFF) { // unexpected trail if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) continue } else if (i + 1 === length) { // unpaired lead if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) continue } else { // valid lead leadSurrogate = codePoint continue } } } else if (leadSurrogate) { // valid bmp char, but last char was a lead if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) leadSurrogate = null } // encode utf8 if (codePoint < 0x80) { if ((units -= 1) < 0) break bytes.push(codePoint) } else if (codePoint < 0x800) { if ((units -= 2) < 0) break bytes.push( codePoint >> 0x6 | 0xC0, codePoint & 0x3F | 0x80 ) } else if (codePoint < 0x10000) { if ((units -= 3) < 0) break bytes.push( codePoint >> 0xC | 0xE0, codePoint >> 0x6 & 0x3F | 0x80, codePoint & 0x3F | 0x80 ) } else if (codePoint < 0x200000) { if ((units -= 4) < 0) break bytes.push( codePoint >> 0x12 | 0xF0, codePoint >> 0xC & 0x3F | 0x80, codePoint >> 0x6 & 0x3F | 0x80, codePoint & 0x3F | 0x80 ) } else { throw new Error('Invalid code point') } } return bytes } function asciiToBytes (str) { var byteArray = [] for (var i = 0; i < str.length; i++) { // Node's code seems to be doing this and not & 0x7F.. byteArray.push(str.charCodeAt(i) & 0xFF) } return byteArray } function utf16leToBytes (str, units) { var c, hi, lo var byteArray = [] for (var i = 0; i < str.length; i++) { if ((units -= 2) < 0) break c = str.charCodeAt(i) hi = c >> 8 lo = c % 256 byteArray.push(lo) byteArray.push(hi) } return byteArray } function base64ToBytes (str) { return base64.toByteArray(base64clean(str)) } function blitBuffer (src, dst, offset, length) { for (var i = 0; i < length; i++) { if ((i + offset >= dst.length) || (i >= src.length)) break dst[i + offset] = src[i] } return i } function decodeUtf8Char (str) { try { return decodeURIComponent(str) } catch (err) { return String.fromCharCode(0xFFFD) // UTF 8 invalid char } } },{"base64-js":18,"ieee754":19,"is-array":20}],18:[function(require,module,exports){ var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; ;(function (exports) { 'use strict'; var Arr = (typeof Uint8Array !== 'undefined') ? Uint8Array : Array var PLUS = '+'.charCodeAt(0) var SLASH = '/'.charCodeAt(0) var NUMBER = '0'.charCodeAt(0) var LOWER = 'a'.charCodeAt(0) var UPPER = 'A'.charCodeAt(0) var PLUS_URL_SAFE = '-'.charCodeAt(0) var SLASH_URL_SAFE = '_'.charCodeAt(0) function decode (elt) { var code = elt.charCodeAt(0) if (code === PLUS || code === PLUS_URL_SAFE) return 62 // '+' if (code === SLASH || code === SLASH_URL_SAFE) return 63 // '/' if (code < NUMBER) return -1 //no match if (code < NUMBER + 10) return code - NUMBER + 26 + 26 if (code < UPPER + 26) return code - UPPER if (code < LOWER + 26) return code - LOWER + 26 } function b64ToByteArray (b64) { var i, j, l, tmp, placeHolders, arr if (b64.length % 4 > 0) { throw new Error('Invalid string. Length must be a multiple of 4') } // the number of equal signs (place holders) // if there are two placeholders, than the two characters before it // represent one byte // if there is only one, then the three characters before it represent 2 bytes // this is just a cheap hack to not do indexOf twice var len = b64.length placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0 // base64 is 4/3 + up to two characters of the original data arr = new Arr(b64.length * 3 / 4 - placeHolders) // if there are placeholders, only get up to the last complete 4 chars l = placeHolders > 0 ? b64.length - 4 : b64.length var L = 0 function push (v) { arr[L++] = v } for (i = 0, j = 0; i < l; i += 4, j += 3) { tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3)) push((tmp & 0xFF0000) >> 16) push((tmp & 0xFF00) >> 8) push(tmp & 0xFF) } if (placeHolders === 2) { tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4) push(tmp & 0xFF) } else if (placeHolders === 1) { tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2) push((tmp >> 8) & 0xFF) push(tmp & 0xFF) } return arr } function uint8ToBase64 (uint8) { var i, extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes output = "", temp, length function encode (num) { return lookup.charAt(num) } function tripletToBase64 (num) { return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F) } // go through the array every three bytes, we'll deal with trailing stuff later for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) { temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]) output += tripletToBase64(temp) } // pad the end with zeros, but make sure to not forget the extra bytes switch (extraBytes) { case 1: temp = uint8[uint8.length - 1] output += encode(temp >> 2) output += encode((temp << 4) & 0x3F) output += '==' break case 2: temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]) output += encode(temp >> 10) output += encode((temp >> 4) & 0x3F) output += encode((temp << 2) & 0x3F) output += '=' break } return output } exports.toByteArray = b64ToByteArray exports.fromByteArray = uint8ToBase64 }(typeof exports === 'undefined' ? (this.base64js = {}) : exports)) },{}],19:[function(require,module,exports){ exports.read = function(buffer, offset, isLE, mLen, nBytes) { var e, m, eLen = nBytes * 8 - mLen - 1, eMax = (1 << eLen) - 1, eBias = eMax >> 1, nBits = -7, i = isLE ? (nBytes - 1) : 0, d = isLE ? -1 : 1, s = buffer[offset + i]; i += d; e = s & ((1 << (-nBits)) - 1); s >>= (-nBits); nBits += eLen; for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8); m = e & ((1 << (-nBits)) - 1); e >>= (-nBits); nBits += mLen; for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8); if (e === 0) { e = 1 - eBias; } else if (e === eMax) { return m ? NaN : ((s ? -1 : 1) * Infinity); } else { m = m + Math.pow(2, mLen); e = e - eBias; } return (s ? -1 : 1) * m * Math.pow(2, e - mLen); }; exports.write = function(buffer, value, offset, isLE, mLen, nBytes) { var e, m, c, eLen = nBytes * 8 - mLen - 1, eMax = (1 << eLen) - 1, eBias = eMax >> 1, rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0), i = isLE ? 0 : (nBytes - 1), d = isLE ? 1 : -1, s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0; value = Math.abs(value); if (isNaN(value) || value === Infinity) { m = isNaN(value) ? 1 : 0; e = eMax; } else { e = Math.floor(Math.log(value) / Math.LN2); if (value * (c = Math.pow(2, -e)) < 1) { e--; c *= 2; } if (e + eBias >= 1) { value += rt / c; } else { value += rt * Math.pow(2, 1 - eBias); } if (value * c >= 2) { e++; c /= 2; } if (e + eBias >= eMax) { m = 0; e = eMax; } else if (e + eBias >= 1) { m = (value * c - 1) * Math.pow(2, mLen); e = e + eBias; } else { m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen); e = 0; } } for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8); e = (e << mLen) | m; eLen += mLen; for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8); buffer[offset + i - d] |= s * 128; }; },{}],20:[function(require,module,exports){ /** * isArray */ var isArray = Array.isArray; /** * toString */ var str = Object.prototype.toString; /** * Whether or not the given `val` * is an array. * * example: * * isArray([]); * // > true * isArray(arguments); * // > false * isArray(''); * // > false * * @param {mixed} val * @return {bool} */ module.exports = isArray || function (val) { return !! val && '[object Array]' == str.call(val); }; },{}],21:[function(require,module,exports){ (function (process){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // resolves . and .. elements in a path array with directory names there // must be no slashes, empty elements, or device names (c:\) in the array // (so also no leading and trailing slashes - it does not distinguish // relative and absolute paths) function normalizeArray(parts, allowAboveRoot) { // if the path tries to go above the root, `up` ends up > 0 var up = 0; for (var i = parts.length - 1; i >= 0; i--) { var last = parts[i]; if (last === '.') { parts.splice(i, 1); } else if (last === '..') { parts.splice(i, 1); up++; } else if (up) { parts.splice(i, 1); up--; } } // if the path is allowed to go above the root, restore leading ..s if (allowAboveRoot) { for (; up--; up) { parts.unshift('..'); } } return parts; } // Split a filename into [root, dir, basename, ext], unix version // 'root' is just a slash, or nothing. var splitPathRe = /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; var splitPath = function(filename) { return splitPathRe.exec(filename).slice(1); }; // path.resolve([from ...], to) // posix version exports.resolve = function() { var resolvedPath = '', resolvedAbsolute = false; for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { var path = (i >= 0) ? arguments[i] : process.cwd(); // Skip empty and invalid entries if (typeof path !== 'string') { throw new TypeError('Arguments to path.resolve must be strings'); } else if (!path) { continue; } resolvedPath = path + '/' + resolvedPath; resolvedAbsolute = path.charAt(0) === '/'; } // At this point the path should be resolved to a full absolute path, but // handle relative paths to be safe (might happen when process.cwd() fails) // Normalize the path resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) { return !!p; }), !resolvedAbsolute).join('/'); return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; }; // path.normalize(path) // posix version exports.normalize = function(path) { var isAbsolute = exports.isAbsolute(path), trailingSlash = substr(path, -1) === '/'; // Normalize the path path = normalizeArray(filter(path.split('/'), function(p) { return !!p; }), !isAbsolute).join('/'); if (!path && !isAbsolute) { path = '.'; } if (path && trailingSlash) { path += '/'; } return (isAbsolute ? '/' : '') + path; }; // posix version exports.isAbsolute = function(path) { return path.charAt(0) === '/'; }; // posix version exports.join = function() { var paths = Array.prototype.slice.call(arguments, 0); return exports.normalize(filter(paths, function(p, index) { if (typeof p !== 'string') { throw new TypeError('Arguments to path.join must be strings'); } return p; }).join('/')); }; // path.relative(from, to) // posix version exports.relative = function(from, to) { from = exports.resolve(from).substr(1); to = exports.resolve(to).substr(1); function trim(arr) { var start = 0; for (; start < arr.length; start++) { if (arr[start] !== '') break; } var end = arr.length - 1; for (; end >= 0; end--) { if (arr[end] !== '') break; } if (start > end) return []; return arr.slice(start, end - start + 1); } var fromParts = trim(from.split('/')); var toParts = trim(to.split('/')); var length = Math.min(fromParts.length, toParts.length); var samePartsLength = length; for (var i = 0; i < length; i++) { if (fromParts[i] !== toParts[i]) { samePartsLength = i; break; } } var outputParts = []; for (var i = samePartsLength; i < fromParts.length; i++) { outputParts.push('..'); } outputParts = outputParts.concat(toParts.slice(samePartsLength)); return outputParts.join('/'); }; exports.sep = '/'; exports.delimiter = ':'; exports.dirname = function(path) { var result = splitPath(path), root = result[0], dir = result[1]; if (!root && !dir) { // No dirname whatsoever return '.'; } if (dir) { // It has a dirname, strip trailing slash dir = dir.substr(0, dir.length - 1); } return root + dir; }; exports.basename = function(path, ext) { var f = splitPath(path)[2]; // TODO: make this comparison case-insensitive on windows? if (ext && f.substr(-1 * ext.length) === ext) { f = f.substr(0, f.length - ext.length); } return f; }; exports.extname = function(path) { return splitPath(path)[3]; }; function filter (xs, f) { if (xs.filter) return xs.filter(f); var res = []; for (var i = 0; i < xs.length; i++) { if (f(xs[i], i, xs)) res.push(xs[i]); } return res; } // String.prototype.substr - negative index don't work in IE8 var substr = 'ab'.substr(-1) === 'b' ? function (str, start, len) { return str.substr(start, len) } : function (str, start, len) { if (start < 0) start = str.length + start; return str.substr(start, len); } ; }).call(this,require('_process')) },{"_process":22}],22:[function(require,module,exports){ // shim for using process in browser var process = module.exports = {}; var queue = []; var draining = false; function drainQueue() { if (draining) { return; } draining = true; var currentQueue; var len = queue.length; while(len) { currentQueue = queue; queue = []; var i = -1; while (++i < len) { currentQueue[i](); } len = queue.length; } draining = false; } process.nextTick = function (fun) { queue.push(fun); if (!draining) { setTimeout(drainQueue, 0); } }; process.title = 'browser'; process.browser = true; process.env = {}; process.argv = []; process.version = ''; // empty string to avoid regexp issues process.versions = {}; function noop() {} process.on = noop; process.addListener = noop; process.once = noop; process.off = noop; process.removeListener = noop; process.removeAllListeners = noop; process.emit = noop; process.binding = function (name) { throw new Error('process.binding is not supported'); }; // TODO(shtylman) process.cwd = function () { return '/' }; process.chdir = function (dir) { throw new Error('process.chdir is not supported'); }; process.umask = function() { return 0; }; },{}],23:[function(require,module,exports){ /** * This is the web browser implementation of `debug()`. * * Expose `debug()` as the module. */ exports = module.exports = require('./debug'); exports.log = log; exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; /** * Use chrome.storage.local if we are in an app */ var storage; if (typeof chrome !== 'undefined' && typeof chrome.storage !== 'undefined') storage = chrome.storage.local; else storage = localstorage(); /** * Colors. */ exports.colors = [ 'lightseagreen', 'forestgreen', 'goldenrod', 'dodgerblue', 'darkorchid', 'crimson' ]; /** * Currently only WebKit-based Web Inspectors, Firefox >= v31, * and the Firebug extension (any Firefox version) are known * to support "%c" CSS customizations. * * TODO: add a `localStorage` variable to explicitly enable/disable colors */ function useColors() { // is webkit? http://stackoverflow.com/a/16459606/376773 return ('WebkitAppearance' in document.documentElement.style) || // is firebug? http://stackoverflow.com/a/398120/376773 (window.console && (console.firebug || (console.exception && console.table))) || // is firefox >= v31? // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages (navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31); } /** * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. */ exports.formatters.j = function(v) { return JSON.stringify(v); }; /** * Colorize log arguments if enabled. * * @api public */ function formatArgs() { var args = arguments; var useColors = this.useColors; args[0] = (useColors ? '%c' : '') + this.namespace + (useColors ? ' %c' : ' ') + args[0] + (useColors ? '%c ' : ' ') + '+' + exports.humanize(this.diff); if (!useColors) return args; var c = 'color: ' + this.color; args = [args[0], c, 'color: inherit'].concat(Array.prototype.slice.call(args, 1)); // the final "%c" is somewhat tricky, because there could be other // arguments passed either before or after the %c, so we need to // figure out the correct index to insert the CSS into var index = 0; var lastC = 0; args[0].replace(/%[a-z%]/g, function(match) { if ('%%' === match) return; index++; if ('%c' === match) { // we only are interested in the *last* %c // (the user may have provided their own) lastC = index; } }); args.splice(lastC, 0, c); return args; } /** * Invokes `console.log()` when available. * No-op when `console.log` is not a "function". * * @api public */ function log() { // this hackery is required for IE8/9, where // the `console.log` function doesn't have 'apply' return 'object' === typeof console && console.log && Function.prototype.apply.call(console.log, console, arguments); } /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { try { if (null == namespaces) { storage.removeItem('debug'); } else { storage.debug = namespaces; } } catch(e) {} } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { var r; try { r = storage.debug; } catch(e) {} return r; } /** * Enable namespaces listed in `localStorage.debug` initially. */ exports.enable(load()); /** * Localstorage attempts to return the localstorage. * * This is necessary because safari throws * when a user disables cookies/localstorage * and you attempt to access it. * * @return {LocalStorage} * @api private */ function localstorage(){ try { return window.localStorage; } catch (e) {} } },{"./debug":24}],24:[function(require,module,exports){ /** * This is the common logic for both the Node.js and web browser * implementations of `debug()`. * * Expose `debug()` as the module. */ exports = module.exports = debug; exports.coerce = coerce; exports.disable = disable; exports.enable = enable; exports.enabled = enabled; exports.humanize = require('ms'); /** * The currently active debug mode names, and names to skip. */ exports.names = []; exports.skips = []; /** * Map of special "%n" handling functions, for the debug "format" argument. * * Valid key names are a single, lowercased letter, i.e. "n". */ exports.formatters = {}; /** * Previously assigned color. */ var prevColor = 0; /** * Previous log timestamp. */ var prevTime; /** * Select a color. * * @return {Number} * @api private */ function selectColor() { return exports.colors[prevColor++ % exports.colors.length]; } /** * Create a debugger with the given `namespace`. * * @param {String} namespace * @return {Function} * @api public */ function debug(namespace) { // define the `disabled` version function disabled() { } disabled.enabled = false; // define the `enabled` version function enabled() { var self = enabled; // set `diff` timestamp var curr = +new Date(); var ms = curr - (prevTime || curr); self.diff = ms; self.prev = prevTime; self.curr = curr; prevTime = curr; // add the `color` if not set if (null == self.useColors) self.useColors = exports.useColors(); if (null == self.color && self.useColors) self.color = selectColor(); var args = Array.prototype.slice.call(arguments); args[0] = exports.coerce(args[0]); if ('string' !== typeof args[0]) { // anything else let's inspect with %o args = ['%o'].concat(args); } // apply any `formatters` transformations var index = 0; args[0] = args[0].replace(/%([a-z%])/g, function(match, format) { // if we encounter an escaped % then don't increase the array index if (match === '%%') return match; index++; var formatter = exports.formatters[format]; if ('function' === typeof formatter) { var val = args[index]; match = formatter.call(self, val); // now we need to remove `args[index]` since it's inlined in the `format` args.splice(index, 1); index--; } return match; }); if ('function' === typeof exports.formatArgs) { args = exports.formatArgs.apply(self, args); } var logFn = enabled.log || exports.log || console.log.bind(console); logFn.apply(self, args); } enabled.enabled = true; var fn = exports.enabled(namespace) ? enabled : disabled; fn.namespace = namespace; return fn; } /** * Enables a debug mode by namespaces. This can include modes * separated by a colon and wildcards. * * @param {String} namespaces * @api public */ function enable(namespaces) { exports.save(namespaces); var split = (namespaces || '').split(/[\s,]+/); var len = split.length; for (var i = 0; i < len; i++) { if (!split[i]) continue; // ignore empty strings namespaces = split[i].replace(/\*/g, '.*?'); if (namespaces[0] === '-') { exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); } else { exports.names.push(new RegExp('^' + namespaces + '$')); } } } /** * Disable debug output. * * @api public */ function disable() { exports.enable(''); } /** * Returns true if the given mode name is enabled, false otherwise. * * @param {String} name * @return {Boolean} * @api public */ function enabled(name) { var i, len; for (i = 0, len = exports.skips.length; i < len; i++) { if (exports.skips[i].test(name)) { return false; } } for (i = 0, len = exports.names.length; i < len; i++) { if (exports.names[i].test(name)) { return true; } } return false; } /** * Coerce `val`. * * @param {Mixed} val * @return {Mixed} * @api private */ function coerce(val) { if (val instanceof Error) return val.stack || val.message; return val; } },{"ms":25}],25:[function(require,module,exports){ /** * Helpers. */ var s = 1000; var m = s * 60; var h = m * 60; var d = h * 24; var y = d * 365.25; /** * Parse or format the given `val`. * * Options: * * - `long` verbose formatting [false] * * @param {String|Number} val * @param {Object} options * @return {String|Number} * @api public */ module.exports = function(val, options){ options = options || {}; if ('string' == typeof val) return parse(val); return options.long ? long(val) : short(val); }; /** * Parse the given `str` and return milliseconds. * * @param {String} str * @return {Number} * @api private */ function parse(str) { var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(str); if (!match) return; var n = parseFloat(match[1]); var type = (match[2] || 'ms').toLowerCase(); switch (type) { case 'years': case 'year': case 'yrs': case 'yr': case 'y': return n * y; case 'days': case 'day': case 'd': return n * d; case 'hours': case 'hour': case 'hrs': case 'hr': case 'h': return n * h; case 'minutes': case 'minute': case 'mins': case 'min': case 'm': return n * m; case 'seconds': case 'second': case 'secs': case 'sec': case 's': return n * s; case 'milliseconds': case 'millisecond': case 'msecs': case 'msec': case 'ms': return n; } } /** * Short format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function short(ms) { if (ms >= d) return Math.round(ms / d) + 'd'; if (ms >= h) return Math.round(ms / h) + 'h'; if (ms >= m) return Math.round(ms / m) + 'm'; if (ms >= s) return Math.round(ms / s) + 's'; return ms + 'ms'; } /** * Long format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function long(ms) { return plural(ms, d, 'day') || plural(ms, h, 'hour') || plural(ms, m, 'minute') || plural(ms, s, 'second') || ms + ' ms'; } /** * Pluralization helper. */ function plural(ms, n, name) { if (ms < n) return; if (ms < n * 1.5) return Math.floor(ms / n) + ' ' + name; return Math.ceil(ms / n) + ' ' + name + 's'; } },{}],26:[function(require,module,exports){ /*! * expand-brackets <https://github.com/jonschlinkert/expand-brackets> * * Copyright (c) 2015 Jon Schlinkert. * Licensed under the MIT license. */ 'use strict'; /** * POSIX character classes */ var POSIX = { alnum: 'a-zA-Z0-9', alpha: 'a-zA-Z', blank: ' \\s\\t', cntrl: '\\x00-\\x1F\\x7F', digit: '0-9', graph: '\\x21-\\x7E', lower: 'a-z', print: '\\x20-\\x7E', punct: '!"#$%&\'()\\*+,-./:;<=>?@[\\]^_`{|}~', space: ' \\s\\t\\r\\n\\v\\f', upper: 'A-Z', word: 'A-Za-z0-9_', xdigit: 'A-Fa-f0-9', }; /** * Expose `brackets` */ module.exports = brackets; function brackets(str) { var negated = false; if (str.indexOf('[^') !== -1) { negated = true; str = str.split('[^').join('['); } if (str.indexOf('[!') !== -1) { negated = true; str = str.split('[!').join('['); } var a = str.split('['); var b = str.split(']'); var imbalanced = a.length !== b.length; var parts = str.split(/(?::\]\[:|\[?\[:|:\]\]?)/); var len = parts.length, i = 0; var end = '', beg = ''; var res = []; while (len--) { var inner = parts[i++]; if (inner === '^[!' || inner === '[!') { inner = ''; negated = true; } var prefix = negated ? '^' : ''; var ch = POSIX[inner]; if (ch) { res.push('[' + prefix + ch + ']'); } else if (inner) { if (/^\[?\w-\w\]?$/.test(inner)) { if (i === parts.length) { res.push('[' + prefix + inner); } else if (i === 1) { res.push(prefix + inner + ']'); } else { res.push(prefix + inner); } } else { if (i === 1) { beg += inner; } else if (i === parts.length) { end += inner; } else { res.push('[' + prefix + inner + ']'); } } } } var result = res.join('|'); var len = res.length || 1; if (len > 1) { result = '(?:' + result + ')'; len = 1; } if (beg) { len++; if (beg.charAt(0) === '[') { if (imbalanced) { beg = '\\[' + beg.slice(1); } else { beg += ']'; } } result = beg + result; } if (end) { len++; if (end.slice(-1) === ']') { if (imbalanced) { end = end.slice(0, end.length - 1) + '\\]'; } else { end = '[' + end; } } result += end; } if (len > 1) { result = result.split('][').join(']|['); if (result.indexOf('|') !== -1 && !/\(\?/.test(result)) { result = '(?:' + result + ')'; } } result = result.replace(/\[+=|=\]+/g, '\\b'); return result; } brackets.makeRe = function (pattern) { try { return new RegExp(brackets(pattern)); } catch (err) {} }; brackets.isMatch = function (str, pattern) { try { return brackets.makeRe(pattern).test(str); } catch (err) { return false; } }; brackets.match = function (arr, pattern) { var len = arr.length, i = 0; var res = arr.slice(); var re = brackets.makeRe(pattern); while (i < len) { var ele = arr[i++]; if (!re.test(ele)) { continue; } res.splice(i, 1); } return res; }; },{}],27:[function(require,module,exports){ /*! * filename-regex <https://github.com/regexps/filename-regex> * * Copyright (c) 2014-2015, Jon Schlinkert * Licensed under the MIT license. */ module.exports = function filenameRegex() { return /([^\\\/]+)$/; }; },{}],28:[function(require,module,exports){ /*! * is-glob <https://github.com/jonschlinkert/is-glob> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License. */ module.exports = function isGlob(str) { return typeof str === 'string' && /[!*{}?(|)[\]]/.test(str); }; },{}],29:[function(require,module,exports){ (function (Buffer){ var toString = Object.prototype.toString; /** * Get the native `typeof` a value. * * @param {*} `val` * @return {*} Native javascript type */ module.exports = function kindOf(val) { if (val === undefined) { return 'undefined'; } if (val === null) { return 'null'; } if (val === true || val === false || val instanceof Boolean) { return 'boolean'; } if (typeof val !== 'object') { return typeof val; } if (Array.isArray(val)) { return 'array'; } var type = toString.call(val); if (val instanceof RegExp || type === '[object RegExp]') { return 'regexp'; } if (val instanceof Date || type === '[object Date]') { return 'date'; } if (type === '[object Function]') { return 'function'; } if (type === '[object Arguments]') { return 'arguments'; } if (typeof Buffer !== 'undefined' && Buffer.isBuffer(val)) { return 'buffer'; } return type.slice(8, -1).toLowerCase(); }; }).call(this,require("buffer").Buffer) },{"buffer":17}],30:[function(require,module,exports){ /*! * object.omit <https://github.com/jonschlinkert/object.omit> * * Copyright (c) 2014 Jon Schlinkert, contributors. * Licensed under the MIT License */ 'use strict'; var isObject = require('isobject'); var forOwn = require('for-own'); module.exports = function omit(obj, props) { if (obj == null || !isObject(obj)) { return {}; } // Exit as early as possible if (props == null || (Array.isArray(props) && props.length === 0)) { return obj; } if (typeof props === 'string') { props = [].slice.call(arguments, 1); } var o = {}; if (!Object.keys(obj).length) { return o; } forOwn(obj, function (value, key) { if (props.indexOf(key) === -1) { o[key] = value; } }); return o; }; },{"for-own":31,"isobject":33}],31:[function(require,module,exports){ /*! * for-own <https://github.com/jonschlinkert/for-own> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License. */ 'use strict'; var forIn = require('for-in'); var hasOwn = Object.prototype.hasOwnProperty; module.exports = function forOwn(o, fn, thisArg) { forIn(o, function (val, key) { if (hasOwn.call(o, key)) { return fn.call(thisArg, o[key], key, o); } }); }; },{"for-in":32}],32:[function(require,module,exports){ /*! * for-in <https://github.com/jonschlinkert/for-in> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License. */ 'use strict'; module.exports = function forIn(o, fn, thisArg) { for (var key in o) { if (fn.call(thisArg, o[key], key, o) === false) { break; } } }; },{}],33:[function(require,module,exports){ arguments[4][12][0].apply(exports,arguments) },{"dup":12}],34:[function(require,module,exports){ /*! * parse-glob <https://github.com/jonschlinkert/parse-glob> * * Copyright (c) 2015, Jon Schlinkert. * Licensed under the MIT License. */ 'use strict'; var isGlob = require('is-glob'); var findBase = require('glob-base'); var extglob = require('is-extglob'); var dotfile = require('is-dotfile'); /** * Expose `cache` */ var cache = module.exports.cache = {}; /** * Parse a glob pattern into tokens. * * When no paths or '**' are in the glob, we use a * different strategy for parsing the filename, since * file names can contain braces and other difficult * patterns. such as: * * - `*.{a,b}` * - `(**|*.js)` */ module.exports = function parseGlob(glob) { if (cache.hasOwnProperty(glob)) { return cache[glob]; } var tok = {}; tok.orig = glob; tok.is = {}; // unescape dots and slashes in braces/brackets glob = escape(glob); var parsed = findBase(glob); tok.is.glob = parsed.isGlob; tok.glob = parsed.glob; tok.base = parsed.base; var segs = /([^\/]*)$/.exec(glob); tok.path = {}; tok.path.dirname = ''; tok.path.basename = segs[1] || ''; tok.path.dirname = glob.split(tok.path.basename).join('') || ''; var basename = (tok.path.basename || '').split('.') || ''; tok.path.filename = basename[0] || ''; tok.path.extname = basename.slice(1).join('.') || ''; tok.path.ext = ''; if (isGlob(tok.path.dirname) && !tok.path.basename) { if (!/\/$/.test(tok.glob)) { tok.path.basename = tok.glob; } tok.path.dirname = tok.base; } if (glob.indexOf('/') === -1 && !tok.is.globstar) { tok.path.dirname = ''; tok.path.basename = tok.orig; } var dot = tok.path.basename.indexOf('.'); if (dot !== -1) { tok.path.filename = tok.path.basename.slice(0, dot); tok.path.extname = tok.path.basename.slice(dot); } if (tok.path.extname.charAt(0) === '.') { var exts = tok.path.extname.split('.'); tok.path.ext = exts[exts.length - 1]; } // unescape dots and slashes in braces/brackets tok.glob = unescape(tok.glob); tok.path.dirname = unescape(tok.path.dirname); tok.path.basename = unescape(tok.path.basename); tok.path.filename = unescape(tok.path.filename); tok.path.extname = unescape(tok.path.extname); // Booleans var is = (glob && tok.is.glob); tok.is.negated = glob && glob.charAt(0) === '!'; tok.is.extglob = glob && extglob(glob); tok.is.braces = has(is, glob, '{'); tok.is.brackets = has(is, glob, '[:'); tok.is.globstar = has(is, glob, '**'); tok.is.dotfile = dotfile(tok.path.basename); tok.is.dotdir = dotdir(tok.path.dirname); return (cache[glob] = tok); } /** * Returns true if the glob matches dot-directories. * * @param {Object} `tok` The tokens object * @param {Object} `path` The path object * @return {Object} */ function dotdir(base) { if (base.indexOf('/.') !== -1) { return true; } if (base.charAt(0) === '.' && base.charAt(1) !== '/') { return true; } return false; } /** * Returns true if the pattern has the given `ch`aracter(s) * * @param {Object} `glob` The glob pattern. * @param {Object} `ch` The character to test for * @return {Object} */ function has(is, glob, ch) { return is && glob.indexOf(ch) !== -1; } /** * Escape/unescape utils */ function escape(str) { var re = /\{([^{}]*?)}|\(([^()]*?)\)|\[([^\[\]]*?)\]/g; return str.replace(re, function (outter, inner) { if (!inner) { return outter; } return outter.split(inner).join(esc(inner)); }); } function esc(str) { str = str.split('/').join('__SLASH__'); str = str.split('.').join('__DOT__'); return str; } function unescape(str) { str = str.split('__SLASH__').join('/'); str = str.split('__DOT__').join('.'); return str; } },{"glob-base":35,"is-dotfile":37,"is-extglob":38,"is-glob":28}],35:[function(require,module,exports){ /*! * glob-base <https://github.com/jonschlinkert/glob-base> * * Copyright (c) 2015, Jon Schlinkert. * Licensed under the MIT License. */ 'use strict'; var path = require('path'); var parent = require('glob-parent'); module.exports = function globBase(pattern) { if (typeof pattern !== 'string') { throw new TypeError('glob-base expects a string.'); } var res = {}; res.base = parent(pattern); res.isGlob = res.base !== pattern; if (res.base !== '.') { res.glob = pattern.substr(res.base.length); if (res.glob.charAt(0) === '/') { res.glob = res.glob.substr(1); } } else { res.glob = pattern; } if (!res.isGlob) { res.base = dirname(pattern); res.glob = res.base !== '.' ? pattern.substr(res.base.length) : pattern; } if (res.glob.substr(0, 2) === './') { res.glob = res.glob.substr(2); } if (res.glob.charAt(0) === '/') { res.glob = res.glob.substr(1); } return res; }; function dirname(glob) { if (glob.slice(-1) === '/') return glob; return path.dirname(glob); } },{"glob-parent":36,"path":21}],36:[function(require,module,exports){ 'use strict'; var path = require('path'); var isglob = require('is-glob'); module.exports = function globParent(str) { while (isglob(str)) str = path.dirname(str); return str; }; },{"is-glob":28,"path":21}],37:[function(require,module,exports){ /*! * is-dotfile <https://github.com/regexps/is-dotfile> * * Copyright (c) 2015 Jon Schlinkert, contributors. * Licensed under the MIT license. */ module.exports = function(str) { if (str.charCodeAt(0) === 46 /* . */ && str.indexOf('/', 1) === -1) { return true; } var last = str.lastIndexOf('/'); return last !== -1 ? str.charCodeAt(last + 1) === 46 /* . */ : false; }; },{}],38:[function(require,module,exports){ /*! * is-extglob <https://github.com/jonschlinkert/is-extglob> * * Copyright (c) 2014-2015, Jon Schlinkert. * Licensed under the MIT License. */ module.exports = function isExtglob(str) { return typeof str === 'string' && /[@?!+*]\(/.test(str); }; },{}],39:[function(require,module,exports){ /*! * regex-cache <https://github.com/jonschlinkert/regex-cache> * * Copyright (c) 2015 Jon Schlinkert. * Licensed under the MIT license. */ 'use strict'; var toKey = require('to-key'); /** * Expose `regexCache` */ module.exports = regexCache; /** * Memoize the results of a call to the new RegExp constructor. * * @param {Function} fn [description] * @param {String} str [description] * @param {Options} options [description] * @param {Boolean} nocompare [description] * @return {RegExp} */ function regexCache(fn, str, options) { var key = '_default_'; if (!str) { return cache[key] || (cache[key] = fn()); } if (!options) { if (typeof str === 'string') { return cache[str] || (cache[str] = fn(str)); } else { key = toKey(str); return cache[key] || (cache[key] = fn(str)); } } key = str + toKey(options); return cache[key] || (cache[key] = fn(str, options)); } /** * Expose `cache` */ var cache = module.exports.cache = {}; },{"to-key":40}],40:[function(require,module,exports){ (function (Buffer){ /*! * to-key <https://github.com/jonschlinkert/to-key> * * Copyright (c) 2015 Jon Schlinkert. * Licensed under the MIT license. */ 'use strict'; var forIn = require('for-in'); var map = require('arr-map'); module.exports = toKey; function toKey(val) { if (val === undefined || val === null) { return ''; } if (typeof val !== 'object') { return '' + val; } if (Array.isArray(val)) { return map(val, toKey).join(''); } var type = toString.call(val); if (type === '[object Function]') { return ''; } if (val instanceof RegExp || type === '[object RegExp]') { return val.source; } if (val instanceof Date || type === '[object Date]') { return Date.parse(val); } if (Buffer.isBuffer(val)) { return val.toString(); } return toString(val); } function toString(obj) { if (typeof obj !== 'object') { return obj + ''; } var str = ''; if (Array.isArray(obj)) { str += map(obj, toString); } else { forIn(obj, function (val, key) { if (typeof val === 'object') { str += key + toString(val); } else { str += key + val; } }); str = str.split(/[\W\s]/).join(''); } return str; } }).call(this,require("buffer").Buffer) },{"arr-map":41,"buffer":17,"for-in":42}],41:[function(require,module,exports){ /*! * arr-map <https://github.com/jonschlinkert/arr-map> * * Copyright (c) 2015 Jon Schlinkert, contributors. * Licensed under the MIT license. */ 'use strict'; module.exports = function map(arr, fn) { if (arr == null) { return []; } var len = arr.length; var res = new Array(len); var i = -1; while (++i < len) { res[i] = fn(arr[i], i, arr); } return res; }; },{}],42:[function(require,module,exports){ arguments[4][32][0].apply(exports,arguments) },{"dup":32}]},{},[1]);
{ "content_hash": "6d67f4c852b8d6e2a0a10e0e9bcc75bd", "timestamp": "", "source": "github", "line_count": 5483, "max_line_length": 480, "avg_line_length": 23.22159401787343, "alnum_prop": 0.5845637900160221, "repo_name": "mattdesl/micromatch", "id": "5f9ba2ab3a8a4710bf299d380875f497f742404e", "size": "127473", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "browser.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1093911" } ], "symlink_target": "" }
/** * Provides utility and helper classes to easily prepare unit tests in a fluent manner. Whereas {@link * io.tourniquet.junit.Tourniquet} and {@link io.tourniquet.junit.ShrinkwrapHelper} are used for preparation of * the test, {@link io.tourniquet.junit.jcr.JCRAssert} is an enhancement to the JCR assertion of unit tests. */ package io.tourniquet.junit;
{ "content_hash": "adaa50486ccf509998a110cd4c0fb657", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 111, "avg_line_length": 45.375, "alnum_prop": 0.7658402203856749, "repo_name": "tourniquet-io/tourniquet-junit", "id": "95bbc52edbdbdf9e8fc728a04122cb8efafcc1d9", "size": "980", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tourniquet-all/src/main/java/io/tourniquet/junit/package-info.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "123" }, { "name": "Java", "bytes": "1043662" } ], "symlink_target": "" }
<?php $languageStrings = array( 'LBL_SELECT_PICKLIST_IN' => 'Seleccionar lista desplegable en' , 'LBL_ADD_VALUE' => 'Agregar Elemento' , 'LBL_RENAME_VALUE' => 'Renombrar Elemento' , 'LBL_DELETE_VALUE' => 'Eliminar Elemento' , 'LBL_ITEMS' => 'Elementos' , 'LBL_DRAG_ITEMS_TO_RESPOSITION' => 'Arrastra los elementos para reposicionarlos', 'LBL_SELECT_AN_ITEM_TO_RENAME_OR_DELETE' => 'Selecciona un elemento para renombrarlo o eliminarlo', 'LBL_TO_DELETE_MULTIPLE_HOLD_CONTROL_KEY' => 'Para eliminar múltiples elementos, oprime la tecla Ctrl mientras seleccionas los elementos a borrar', 'LBL_ADD_ITEM_TO' => 'Agregar elemento a' , 'LBL_ITEM_VALUE' => 'Valor del elemento' , 'LBL_ITEM_TO_RENAME' => 'Elemento a renombrar' , 'LBL_ENTER_NEW_NAME' => 'Agrega el nuevo nombre' , 'LBL_RENAME_PICKLIST_ITEM' => 'Renombrar elemento de lista desplegable' , 'LBL_DELETE_PICKLIST_ITEMS' => 'Eliminar elementos de lista desplegable' , 'LBL_ITEMS_TO_DELETE' => 'Elementos a eliminar' , 'LBL_REPLACE_IT_WITH' => 'Reemplazar con' , 'LBL_ASSIGN_TO_ROLE' => 'Asignar a Rol' , 'LBL_ALL_ROLES' => 'Todos los roles' , 'LBL_CHOOSE_ROLES' => 'Seleccionar roles' , 'LBL_ALL_VALUES' => 'Todos los Elementos' , 'LBL_VALUES_ASSIGNED_TO_A_ROLE' => 'Elementos asignados al rol' , 'LBL_ASSIGN_VALUE' => 'Asignar Elemento' , 'LBL_SAVE_ORDER' => 'Guardar Orden' , 'LBL_ROLE_NAME' => 'Nombre del Rol' , 'LBL_SELECTED_VALUES_MESSGAE' => 'Se mostrará para el usuario con este rol', 'LBL_ENABLE/DISABLE_MESSGAE' => 'Clic en el elemento para habilitar/deshabilitar. Despues clic en Guardar', 'LBL_ASSIGN_VALUES_TO_ROLES' => 'Asignar Elementos a roles' , 'LBL_SELECTED_VALUES' => 'Elementos seleccionados' , 'NO_PICKLIST_FIELDS' => 'No existen campos de lista desplegable', //Translation for module 'Calendar' => 'Por hacer', ); $jsLanguageStrings = array( 'JS_ITEM_RENAMED_SUCCESSFULLY' => 'Elemento renombrado exitosamente' , 'JS_ITEM_ADDED_SUCCESSFULLY' => 'Elemento agregado exitosamente' , 'JS_NO_ITEM_SELECTED' => 'Elemento no seleccionado' , 'JS_MORE_THAN_ONE_ITEM_SELECTED' => 'Más de un elemento seleccionado' , 'JS_ITEMS_DELETED_SUCCESSFULLY' => 'Elementos eliminados exitosamente' , 'JS_YOU_CANNOT_DELETE_ALL_THE_VALUES' => 'No puedes eliminar todos los elementos', 'JS_ALL_ROLES_SELECTED' => 'Todos los roles seleccionados' , 'JS_LIST_UPDATED_SUCCESSFULLY' => 'Lista actualizada exitosamente' , 'JS_SEQUENCE_UPDATED_SUCCESSFULLY' => 'Secuencia actualizada exitosamente', 'JS_VALUE_ASSIGNED_SUCCESSFULLY' => 'Elemento asignado exitosamente' , 'JS_PLEASE_SELECT_MODULE' => 'Selecciona el módulo' , );
{ "content_hash": "37f8f271d8eb1ec53d3387f322d8ae05", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 149, "avg_line_length": 66.91836734693878, "alnum_prop": 0.5678560536749009, "repo_name": "basiljose1/byjcrm", "id": "0d37223542e75ae2d29fd4424200d5fe7a9dff76", "size": "3972", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "languages/es_mx/Settings/Picklist.php", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "30817" }, { "name": "ApacheConf", "bytes": "1321" }, { "name": "Batchfile", "bytes": "20387" }, { "name": "C", "bytes": "492107" }, { "name": "C++", "bytes": "18023" }, { "name": "CSS", "bytes": "1199491" }, { "name": "CoffeeScript", "bytes": "1232" }, { "name": "Groff", "bytes": "60690" }, { "name": "HTML", "bytes": "1498811" }, { "name": "JavaScript", "bytes": "4770826" }, { "name": "Makefile", "bytes": "8221" }, { "name": "PHP", "bytes": "39287363" }, { "name": "Perl", "bytes": "50950" }, { "name": "Ruby", "bytes": "1074" }, { "name": "Shell", "bytes": "53700" }, { "name": "Smarty", "bytes": "1908263" }, { "name": "XSLT", "bytes": "27654" }, { "name": "Yacc", "bytes": "14820" } ], "symlink_target": "" }
layout: page title: Community Versioning for Rust Content --- Web content never expires. A blog post written in 2011 will live on in search engine results for years. [Rust](http://www.rust-lang.org/) has undergone a _lot_ of change over the last few years, often in completely incompatible ways. This means the Internet is littered with content and posts that were accurate at the time, but have since been obsoleted. To ensure old content gets properly flagged, with no effort needed by the author, we've created a GitHub-driven badge service. See [the user-friendly website](http://steveklabnik.github.io/rust-community-versions/) for more details on the general idea. ## Updating a badge Badges are stored as Jekyll posts. For example, the badge for this blog post: * "Pointers in Rust: a Guide" [http://words.steveklabnik.com/pointers-in-rust-a-guide](http://words.steveklabnik.com/pointers-in-rust-a-guide) Is stored at: * [\_posts/2013-10-18-pointers-in-rust-a-guide.md](http://steveklabnik.github.io/rust-community-versions/_posts/2013-10-18-pointers-in-rust-a-guide.md) Versioning informations is store in the YAML front-matter of that post: {% highlight yaml %} --- layout: post url: http://words.steveklabnik.com/pointers-in-rust-a-guide title: "Pointers in Rust: a Guide" date: 2013-10-18 start_version: 0.8 --- {% endhighlight %} To update the badge (as seen the blog post at words.steveklabnik.com), open a PR changing the YAML front-matter.
{ "content_hash": "f3ce9765087c6892b22ef6a237c9c8e5", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 151, "avg_line_length": 35.75609756097561, "alnum_prop": 0.762619372442019, "repo_name": "steveklabnik/rust-community-versions", "id": "3233a76568983156eafd893903fed57b0b80a67b", "size": "1470", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "index.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "11086" }, { "name": "HTML", "bytes": "3983" } ], "symlink_target": "" }
import argparse import fileinput import hashlib import sys from multiprocessing import Pool def get_hashes_and_lines(raw_line): hash = hashlib.md5(raw_line).hexdigest() return hash, raw_line def main(): parser = argparse.ArgumentParser() parser.add_argument("--workers", type=int, default=10) parser.add_argument("files", nargs="*", help="input files") args = parser.parse_args() seen = set() with fileinput.input(args.files, mode="rb") as h: pool = Pool(args.workers) results = pool.imap_unordered(get_hashes_and_lines, h, 1000) for i, (hash, raw_line) in enumerate(results): if hash not in seen: seen.add(hash) sys.stdout.buffer.write(raw_line) if i % 1000000 == 0: print(i, file=sys.stderr, end="", flush=True) elif i % 100000 == 0: print(".", file=sys.stderr, end="", flush=True) print(file=sys.stderr, flush=True) if __name__ == "__main__": main()
{ "content_hash": "d42449cce57d4c0bf555a57487886bd2", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 68, "avg_line_length": 29.257142857142856, "alnum_prop": 0.59375, "repo_name": "pytorch/fairseq", "id": "50e458328c80b71c42a66d473381ca7e98d294da", "size": "1221", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "examples/backtranslation/deduplicate_lines.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "21106" }, { "name": "Cuda", "bytes": "38166" }, { "name": "Cython", "bytes": "13294" }, { "name": "Lua", "bytes": "4210" }, { "name": "Python", "bytes": "3699357" }, { "name": "Shell", "bytes": "2182" } ], "symlink_target": "" }
Title: Hello Author: Andrew Tag: tags Date: 8 January 2015 @ 10:30pm Body: Hello Web! this is a test page Lorem ipsum, lorem ipsum, etc. This is a sentence in the second paragraph. Now the second paragraph is longer than it was. Three blind mice, three blind mice. See how they run, see how they run! They all flew over the farmer's wife.
{ "content_hash": "fca7939f64f25e5567c39e9d20f80aa3", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 194, "avg_line_length": 35, "alnum_prop": 0.7342857142857143, "repo_name": "frenata/bleedy", "id": "66ae58e55fba96f2b68c2ed07ecd9164a950eab5", "size": "350", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/posts/test.md", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "13247" } ], "symlink_target": "" }
/** * This class is generated by jOOQ */ package net.feminaexlux.player.model.table; import net.feminaexlux.player.model.Key; import net.feminaexlux.player.model.Media; import net.feminaexlux.player.model.table.record.UserPlayedRecord; import org.jooq.Field; import org.jooq.ForeignKey; import org.jooq.Table; import org.jooq.TableField; import org.jooq.UniqueKey; import org.jooq.impl.SQLDataType; import org.jooq.impl.TableImpl; import java.sql.Timestamp; import java.util.Arrays; import java.util.List; /** * This class is generated by jOOQ. */ @javax.annotation.Generated( value = { "http://www.jooq.org", "jOOQ version:3.5.0" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({"all", "unchecked", "rawtypes"}) public class UserPlayed extends TableImpl<UserPlayedRecord> { private static final long serialVersionUID = -1456437082; /** * The reference instance of <code>media.user_played</code> */ public static final UserPlayed USER_PLAYED = new UserPlayed(); /** * The class holding records for this type */ @Override public Class<UserPlayedRecord> getRecordType() { return UserPlayedRecord.class; } /** * The column <code>media.user_played.username</code>. */ public final TableField<UserPlayedRecord, String> USERNAME = createField("username", SQLDataType.VARCHAR.length(255).nullable(false), this, ""); /** * The column <code>media.user_played.resource</code>. */ public final TableField<UserPlayedRecord, String> RESOURCE = createField("resource", SQLDataType.VARCHAR.length(255).nullable(false), this, ""); /** * The column <code>media.user_played.last_played</code>. */ public final TableField<UserPlayedRecord, Timestamp> LAST_PLAYED = createField("last_played", SQLDataType.TIMESTAMP.nullable(false), this, ""); /** * The column <code>media.user_played.rating</code>. */ public final TableField<UserPlayedRecord, Byte> RATING = createField("rating", SQLDataType.TINYINT, this, ""); /** * Create a <code>media.user_played</code> table reference */ public UserPlayed() { this("user_played", null); } /** * Create an aliased <code>media.user_played</code> table reference */ public UserPlayed(String alias) { this(alias, UserPlayed.USER_PLAYED); } private UserPlayed(String alias, Table<UserPlayedRecord> aliased) { this(alias, aliased, null); } private UserPlayed(String alias, Table<UserPlayedRecord> aliased, Field<?>[] parameters) { super(alias, Media.MEDIA, aliased, parameters, ""); } /** * {@inheritDoc} */ @Override public UniqueKey<UserPlayedRecord> getPrimaryKey() { return Key.KEY_USER_PLAYED_PRIMARY; } /** * {@inheritDoc} */ @Override public List<UniqueKey<UserPlayedRecord>> getKeys() { return Arrays.<UniqueKey<UserPlayedRecord>>asList(Key.KEY_USER_PLAYED_PRIMARY); } /** * {@inheritDoc} */ @Override public List<ForeignKey<UserPlayedRecord, ?>> getReferences() { return Arrays.<ForeignKey<UserPlayedRecord, ?>>asList(Key.FK1_USER_PLAYED_USER, Key.FK2_USER_PLAYED_RESOURCE); } /** * {@inheritDoc} */ @Override public UserPlayed as(String alias) { return new UserPlayed(alias, this); } /** * Rename this table */ public UserPlayed rename(String name) { return new UserPlayed(name, null); } }
{ "content_hash": "40d9160beccd57483396a2e77a01c1c3", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 145, "avg_line_length": 25.48062015503876, "alnum_prop": 0.7134164891998783, "repo_name": "feminaexlux/spring-mvc-player", "id": "c2f11f7756988283486bc17776f7e17e55028a9f", "size": "3287", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/net/feminaexlux/player/model/table/UserPlayed.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "9809" }, { "name": "Java", "bytes": "132119" }, { "name": "JavaScript", "bytes": "449" } ], "symlink_target": "" }
#include "sky/engine/config.h" #if !OS(WIN) && !OS(ANDROID) #include "third_party/skia/include/ports/SkFontConfigInterface.h" #endif #include <unicode/locid.h> #include "sky/engine/platform/NotImplemented.h" #include "sky/engine/platform/fonts/AlternateFontFamily.h" #include "sky/engine/platform/fonts/FontCache.h" #include "sky/engine/platform/fonts/FontDescription.h" #include "sky/engine/platform/fonts/FontFaceCreationParams.h" #include "sky/engine/platform/fonts/SimpleFontData.h" #include "sky/engine/public/platform/Platform.h" #include "sky/engine/public/platform/linux/WebSandboxSupport.h" #include "sky/engine/wtf/Assertions.h" #include "sky/engine/wtf/text/AtomicString.h" #include "sky/engine/wtf/text/CString.h" #include "third_party/skia/include/core/SkStream.h" #include "third_party/skia/include/core/SkTypeface.h" #include "third_party/skia/include/ports/SkFontMgr.h" #if !OS(WIN) && !OS(ANDROID) // TODO(bungeman) remove this temporary code ASAP. // This namespace exists to ease transition of SkTypeface from using SkStream to SkStreamAsset. namespace tmp { // Like std::declval but only returns lvalue references, ok since it isn't used on rvalue references. template<typename T> T& declvall(); // The return type of SkFontConfigInterface::openStream(const SkFontConfigInterface::FontIdentity&). using StreamType = decltype(tmp::declvall<SkFontConfigInterface>().openStream(tmp::declvall<const SkFontConfigInterface::FontIdentity&>())); } static tmp::StreamType streamForFontconfigInterfaceId(int fontconfigInterfaceId) { SkAutoTUnref<SkFontConfigInterface> fci(SkFontConfigInterface::RefGlobal()); SkFontConfigInterface::FontIdentity fontIdentity; fontIdentity.fID = fontconfigInterfaceId; return fci->openStream(fontIdentity); } #endif namespace blink { static int toSkiaWeight(FontWeight weight) { switch (weight) { case FontWeight100: return SkFontStyle::kThin_Weight; case FontWeight200: return SkFontStyle::kExtraLight_Weight; case FontWeight300: return SkFontStyle::kLight_Weight; case FontWeight400: return SkFontStyle::kNormal_Weight; case FontWeight500: return SkFontStyle::kMedium_Weight; case FontWeight600: return SkFontStyle::kSemiBold_Weight; case FontWeight700: return SkFontStyle::kBold_Weight; case FontWeight800: return SkFontStyle::kExtraBold_Weight; case FontWeight900: return SkFontStyle::kBlack_Weight; } ASSERT_NOT_REACHED(); return SkFontStyle::kNormal_Weight; } static SkFontStyle::Slant toSkiaSlant(FontStyle style) { switch (style) { case FontStyleNormal: return SkFontStyle::kUpright_Slant; case FontStyleItalic: return SkFontStyle::kItalic_Slant; } ASSERT_NOT_REACHED(); return SkFontStyle::kUpright_Slant; } static int toSkiaWidth(FontStretch stretch) { // Numeric values matching OS/2 & Windows Metrics usWidthClass table. // https://www.microsoft.com/typography/otspec/os2.htm return static_cast<int>(stretch); } static SkFontStyle toSkiaFontStyle(const FontDescription& fontDescription) { return SkFontStyle(toSkiaWeight(fontDescription.weight()), toSkiaWidth(fontDescription.stretch()), toSkiaSlant(fontDescription.style())); } void FontCache::platformInit() { } PassRefPtr<SimpleFontData> FontCache::fallbackOnStandardFontStyle( const FontDescription& fontDescription, UChar32 character) { FontDescription substituteDescription(fontDescription); substituteDescription.setStyle(FontStyleNormal); substituteDescription.setWeight(FontWeightNormal); FontFaceCreationParams creationParams(substituteDescription.family().family()); FontPlatformData* substitutePlatformData = getFontPlatformData(substituteDescription, creationParams); if (substitutePlatformData && substitutePlatformData->fontContainsCharacter(character)) { FontPlatformData platformData = FontPlatformData(*substitutePlatformData); platformData.setSyntheticBold(fontDescription.weight() >= FontWeight600); platformData.setSyntheticItalic(fontDescription.style() == FontStyleItalic); return fontDataFromFontPlatformData(&platformData, DoNotRetain); } return nullptr; } #if !OS(WIN) && !OS(ANDROID) PassRefPtr<SimpleFontData> FontCache::fallbackFontForCharacter(const FontDescription& fontDescription, UChar32 c, const SimpleFontData*) { // First try the specified font with standard style & weight. if (fontDescription.style() == FontStyleItalic || fontDescription.weight() >= FontWeight600) { RefPtr<SimpleFontData> fontData = fallbackOnStandardFontStyle( fontDescription, c); if (fontData) return fontData; } FontCache::PlatformFallbackFont fallbackFont; FontCache::getFontForCharacter(c, "", &fallbackFont); if (fallbackFont.name.isEmpty()) return nullptr; FontFaceCreationParams creationParams; creationParams = FontFaceCreationParams(fallbackFont.filename, fallbackFont.fontconfigInterfaceId, fallbackFont.ttcIndex); // Changes weight and/or italic of given FontDescription depends on // the result of fontconfig so that keeping the correct font mapping // of the given character. See http://crbug.com/32109 for details. bool shouldSetSyntheticBold = false; bool shouldSetSyntheticItalic = false; FontDescription description(fontDescription); if (fallbackFont.isBold && description.weight() < FontWeightBold) description.setWeight(FontWeightBold); if (!fallbackFont.isBold && description.weight() >= FontWeightBold) { shouldSetSyntheticBold = true; description.setWeight(FontWeightNormal); } if (fallbackFont.isItalic && description.style() == FontStyleNormal) description.setStyle(FontStyleItalic); if (!fallbackFont.isItalic && description.style() == FontStyleItalic) { shouldSetSyntheticItalic = true; description.setStyle(FontStyleNormal); } FontPlatformData* substitutePlatformData = getFontPlatformData(description, creationParams); if (!substitutePlatformData) return nullptr; FontPlatformData platformData = FontPlatformData(*substitutePlatformData); platformData.setSyntheticBold(shouldSetSyntheticBold); platformData.setSyntheticItalic(shouldSetSyntheticItalic); return fontDataFromFontPlatformData(&platformData, DoNotRetain); } #endif // !OS(WIN) && !OS(ANDROID) PassRefPtr<SimpleFontData> FontCache::getLastResortFallbackFont(const FontDescription& description, ShouldRetain shouldRetain) { const FontFaceCreationParams fallbackCreationParams(getFallbackFontFamily(description)); const FontPlatformData* fontPlatformData = getFontPlatformData(description, fallbackCreationParams); // We should at least have Sans or Arial which is the last resort fallback of SkFontHost ports. if (!fontPlatformData) { DEFINE_STATIC_LOCAL(const FontFaceCreationParams, sansCreationParams, (AtomicString("Sans", AtomicString::ConstructFromLiteral))); fontPlatformData = getFontPlatformData(description, sansCreationParams); } if (!fontPlatformData) { DEFINE_STATIC_LOCAL(const FontFaceCreationParams, arialCreationParams, (AtomicString("Arial", AtomicString::ConstructFromLiteral))); fontPlatformData = getFontPlatformData(description, arialCreationParams); } ASSERT(fontPlatformData); return fontDataFromFontPlatformData(fontPlatformData, shouldRetain); } PassRefPtr<SkTypeface> FontCache::createTypeface(const FontDescription& fontDescription, const FontFaceCreationParams& creationParams, CString& name) { #if !OS(WIN) && !OS(ANDROID) if (creationParams.creationType() == CreateFontByFciIdAndTtcIndex) { // TODO(dro): crbug.com/381620 Use creationParams.ttcIndex() after // https://code.google.com/p/skia/issues/detail?id=1186 gets fixed. SkTypeface* typeface = nullptr; if (Platform::current()->sandboxSupport()) typeface = SkTypeface::CreateFromStream(streamForFontconfigInterfaceId(creationParams.fontconfigInterfaceId())); else typeface = SkTypeface::CreateFromFile(creationParams.filename().data()); if (typeface) return adoptRef(typeface); else return nullptr; } #endif AtomicString family = creationParams.family(); // If we're creating a fallback font (e.g. "-webkit-monospace"), convert the name into // the fallback name (like "monospace") that fontconfig understands. if (!family.length() || family.startsWith("-webkit-")) { name = getFallbackFontFamily(fontDescription).string().utf8(); } else { // convert the name to utf8 name = family.utf8(); } SkFontStyle style = toSkiaFontStyle(fontDescription); RefPtr<SkFontMgr> fm = adoptRef(SkFontMgr::RefDefault()); RefPtr<SkTypeface> typeface = adoptRef(fm->matchFamilyStyle(name.data(), style)); if (typeface) return typeface.release(); int legacyStyle = SkTypeface::kNormal; if (fontDescription.weight() >= FontWeight600) legacyStyle |= SkTypeface::kBold; if (fontDescription.style()) legacyStyle |= SkTypeface::kItalic; // FIXME: Use fm, SkFontStyle and matchFamilyStyle instead of this legacy // API. To make this work, we need to understand the extra fallback behavior // in CreateFromName. return adoptRef(SkTypeface::CreateFromName(name.data(), static_cast<SkTypeface::Style>(legacyStyle))); } #if !OS(WIN) FontPlatformData* FontCache::createFontPlatformData(const FontDescription& fontDescription, const FontFaceCreationParams& creationParams, float fontSize) { CString name; RefPtr<SkTypeface> tf(createTypeface(fontDescription, creationParams, name)); if (!tf) return 0; FontPlatformData* result = new FontPlatformData(tf, name.data(), fontSize, (fontDescription.weight() >= FontWeight600 && !tf->isBold()) || fontDescription.isSyntheticBold(), (fontDescription.style() && !tf->isItalic()) || fontDescription.isSyntheticItalic(), fontDescription.orientation(), fontDescription.useSubpixelPositioning()); return result; } #endif // !OS(WIN) } // namespace blink
{ "content_hash": "51d4f30b8430857e1f7ae79c17b18d19", "timestamp": "", "source": "github", "line_count": 254, "max_line_length": 153, "avg_line_length": 40.85826771653543, "alnum_prop": 0.7371362497591059, "repo_name": "collinjackson/mojo", "id": "1663dfa2433dd01d58ff66026c27b11f432cd689", "size": "11958", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sky/engine/platform/fonts/skia/FontCacheSkia.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Bison", "bytes": "31162" }, { "name": "C", "bytes": "1870198" }, { "name": "C++", "bytes": "36473977" }, { "name": "CSS", "bytes": "1897" }, { "name": "Dart", "bytes": "508640" }, { "name": "Go", "bytes": "181090" }, { "name": "Groff", "bytes": "29030" }, { "name": "HTML", "bytes": "6258864" }, { "name": "Java", "bytes": "1187123" }, { "name": "JavaScript", "bytes": "204155" }, { "name": "Makefile", "bytes": "402" }, { "name": "Objective-C", "bytes": "74603" }, { "name": "Objective-C++", "bytes": "370763" }, { "name": "Protocol Buffer", "bytes": "1048" }, { "name": "Python", "bytes": "5515876" }, { "name": "Shell", "bytes": "143302" }, { "name": "nesC", "bytes": "18347" } ], "symlink_target": "" }
/* Getting the new tags to behave */ article, aside, audio, canvas, command, datalist, details, embed, figcaption, figure, footer, header, hgroup, keygen, meter, output, progress, section, source, video {display:block} mark, rp, rt, ruby, summary, time {display:inline} /* Left & Right alignment */ .left {float:left} .right {float:right} .wrapper {width:100%;overflow:hidden} .relative {position:relative} * +html .relative {position:static} /* Global properties */ body {background:url(../../bundles/frontend/images/bg_img.jpg) top center no-repeat #000;border:0;font:13px Arial, Helvetica, sans-serif;color:#3a3a3a;line-height:20px;min-width:980px;padding:10px 0} .ic, .ic a {border:0;float:right;background:#fff;color:#f00;width:50%;line-height:10px;font-size:10px;margin:-220% 0 0 0;overflow:hidden;padding:0} .css3 {border-radius:8px;-moz-border-radius:8px;-webkit-border-radius:8px;box-shadow:0 0 4px rgba(0, 0, 0, .4);-moz-box-shadow:0 0 4px rgba(0, 0, 0, .4);-webkit-box-shadow:0 0 4px rgba(0, 0, 0, .4);position:relative} /* Global Structure */ .main {margin:0 auto;width:960px;padding:10px;background:#fff;box-shadow:0 0 7px rgba(0, 0, 0, .2);-moz-box-shadow:0 0 7px rgba(0, 0, 0, .2);-webkit-box-shadow:0 0 7px rgba(0, 0, 0, .2);position:relative} /* main layout */ /*a {color:#1d77e9;text-decoration:underline;outline:none}*/ a:hover {text-decoration:none} h1 {padding:27px 0 0 41px;float:left} h2 {font-size:14px;line-height:1.2em;padding:12px 32px 16px 32px;margin-bottom:14px;background:#f7f7f7;position:relative;letter-spacing:-1px} h2.top {border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;-webkit-border-radius:6px 6px 0 0} h3 {font-size:19px;color:#000;line-height:1.2em;margin-top:-4px;letter-spacing:-1px;padding:0 0 21px 0} h3 span {font-size:20px;color:#767676;line-height:1.2em;display:block;margin-top:-5px} h4 {font-size:20px; color:#000; /* line-height:1.2em; padding:0 0 14px 0;*/ letter-spacing:-1px } p {padding-bottom:18px} /* header */ header {height:163px;width:100%;overflow:hidden;background:url(../../bundles/frontend/images/bg_top.jpg) 0 0 no-repeat} #logo {display:block;background:url(../../bundles/frontend/images/logo.png) 0 0 no-repeat;width:120px;height:104px;text-indent:-9999px} #slogan {font-size:20px;line-height:1.2em;color:#fff;padding:95px 0 0 18px;letter-spacing:-1px} /*#slogan {font-size:20px;line-height:1.2em;color:#fff;float:left;padding:95px 0 0 18px;letter-spacing:-1px}*/ #top_nav {float:right;padding:38px 56px 0 0} #top_nav li {float:left;padding-left:25px;width:50px;text-align:center} #top_nav li a {display:inline-block;padding-top:13px;color:#fff} .nav1 {background:url(../../bundles/frontend/images/img_top1.gif) center 0 no-repeat} .nav2 {background:url(../../bundles/frontend/images/img_top2.gif) center 0 no-repeat} .nav3 {background:url(../../bundles/frontend/images/img_top3.gif) center 0 no-repeat} /*#menu {padding:27px 0 0 42px}*/ /*#menu li {float:left;margin-left:-1px;background:url(../../bundles/frontend/images/menu_line.gif) bottom right no-repeat}*/ /*#menu .end {background:none} #menu li a {display:block;height:71px;font-size:26px;line-height:71px;color:#fff;text-decoration:none;letter-spacing:-1px} #menu li a span {display:block} #menu li a span span {padding:0 32px;margin:0 6px} #menu li a:hover, #menu #menu_active a {background:url(../../bundles/frontend/images/menu_left.png) top left no-repeat;color:#000} #menu li a:hover span, #menu #menu_active a span {background:url(../../bundles/frontend/images/menu_right.png) top right no-repeat} #menu li a:hover span span, #menu #menu_active a span span {background:url(../../bundles/frontend/images/menu_bg.gif) top repeat-x}*/ /* content */ /*#content {width:100%;overflow:hidden;padding-bottom:14px}*/ /*.for_banners {background:url(../../bundles/frontend/images/bg_top_img.gif) 0 0 repeat;width:100%;padding-top:30px;height: 80px}*/ /*.for_banners {background:url(../../bundles/frontend/images/bg_top_img.jpg) 0 0 no-repeat;width:100%;height:342px;padding-top:30px}*/ .pad1 {padding:30px 0;background:url(../../bundles/frontend/images/bg_top2.jpg) 0 0 no-repeat} #page1 .pad1 {background:none} .pad {padding:0 27px 0 32px} .pad_bot1 {padding-bottom:10px} .pad_bot2 {padding-bottom:15px; padding-top: 15px; } .pad_bot3 {padding-bottom:32px} .pad_left1 {padding-left:31px} .pad_top1 {padding-top:11px} .marg_right1 {margin-right:31px} /* tabs begin */ .tabs {position:relative} .tabs ul.nav {position:absolute;top:0} .tabs ul.nav li {float:left;padding-right:1px;width:95px} .tabs ul.nav .end {padding-right:0} .tabs ul.nav li a {color:#fff;text-align:center;display:block;background:url(../../bundles/frontend/images/tabs.gif) 0 0 repeat-x #30c1fd;line-height:42px;text-decoration:none;border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;-webkit-border-radius:6px 6px 0 0; box-shadow:0 0 3px #d0d0d0;-moz-box-shadow:0 0 3px #d0d0d0;-webkit-box-shadow:0 0 3px #d0d0d0;position:relative;overflow:hidden} .tabs ul.nav li a:hover, .tabs ul.nav .selected a {color:#000;background:url(../../bundles/frontend/images/tabs_active.gif) top repeat-x #e7e6e6} .tabs .content {background:#fff;border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px ;-webkit-border-radius:0 0 6px 6px ; box-shadow:0 0 3px #d0d0d0;-moz-box-shadow:0 0 3px #d0d0d0;-webkit-box-shadow:0 0 3px #d0d0d0;position:relative;padding:1px;top:40px;padding-bottom:20px} .tabs .tab-content {} /* tabs end */ /* tabs begin */ .tabs2 {position:relative} .tabs2 ul.nav {position:absolute;top:0} .tabs2 ul.nav li {float:left;padding-right:1px;width:185px} .tabs2 ul.nav .end {padding-right:0} .tabs2 ul.nav li a {color:#fff;text-align:center;display:block;background:url(../../bundles/frontend/images/tabs.gif) 0 0 repeat-x #30c1fd;line-height:42px;text-decoration:none;border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;-webkit-border-radius:6px 6px 0 0; box-shadow:0 0 3px #d0d0d0;-moz-box-shadow:0 0 3px #d0d0d0;-webkit-box-shadow:0 0 3px #d0d0d0;position:relative;overflow:hidden} .tabs2 ul.nav li a:hover, .tabs2 ul.nav .selected a {color:#000;background:url(../../bundles/frontend/images/tabs_active.gif) top repeat-x #e7e6e6} .tabs2 .content {background:#fff;border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px ;-webkit-border-radius:0 0 6px 6px ; box-shadow:0 0 3px #d0d0d0;-moz-box-shadow:0 0 3px #d0d0d0;-webkit-box-shadow:0 0 3px #d0d0d0;position:relative;padding:1px;top:40px;padding-bottom:20px} .tabs2 .tab-content {} /* tabs end */ /* The Nivo Slider styles */ /*#slider {height:277px;overflow:hidden;width:720px !important;margin-top:5px;margin-left:117px;float:left}*/ #slider { height: 163px; overflow: hidden; width: 720px !important; margin-top: -155px; margin-left: 241px; float: left;} .nivoSlider {position:relative} .nivoSlider img {position:absolute;top:0px;left:0px} /* If an image is wrapped in a link */ .nivoSlider a.nivo-imageLink {position:absolute;top:0px;left:0px;width:100%;height:100%;border:0;padding:0;margin:0;z-index:60;display:none} /* The slices in the Slider */ .nivo-slice {display:block;position:absolute;z-index:50;height:100%} /* Tooltips */ .aToolTip {border:1px solid #000;background:url(../../bundles/frontend/images/opacity_50_black.png) repeat;color:#fff;margin:0;padding:2px 10px;font-size:11px} .aToolTip .aToolTipContent {position:relative;margin:0;padding:0} .button1 {display:inline-block;font-size:13px;color:#fff;text-decoration:none;line-height:28px;height:30px;padding:0 19px;background:url(../../bundles/frontend/images/button_1.gif) 0 0px repeat-x #1d77e9;border-radius:5px;-moz-border-radius:5px ;-webkit-border-radius:5px ;position:relative;cursor:pointer} .button1 strong {display:block;padding-right:16px;background:url(../../bundles/frontend/images/marker_2.gif) right 10px no-repeat} .button1:hover {background:url(../../bundles/frontend/images/button_active.gif) 0 0 repeat-x #e7e6e6} .button1:hover strong {background:url(../../bundles/frontend/images/marker_1.gif) right 10px no-repeat;color:#1d77e9} .link1 {padding-left:13px;background:url(../../bundles/frontend/images/marker_1.gif) 0 6px no-repeat;display:inline-block} .box1 {background:#fff;border-radius:6px;-moz-border-radius:6px;-webkit-border-radius:6px;box-shadow:0 0 3px #d0d0d0;-moz-box-shadow:0 0 3px #d0d0d0;-webkit-box-shadow:0 0 3px #d0d0d0;position:relative;padding:1px} .color1 {color:#7f7f7f} .list1 li {width:100%;overflow:hidden} .list1 li a {padding-left:11px;background:url(../../bundles/frontend/images/marker_1.gif) 0 4px no-repeat} .calendar {width:100%;overflow:hidden;font-size:12px;color:#000;line-height:18px;text-align:center;padding-bottom:4px} .calendar .thead li {float:left;width:27px;padding-right:1px;padding-bottom:2px} .calendar .tbody li {float:left;padding-right:1px;width:27px;padding-bottom:1px} .calendar .tbody li a {display:block;border:1px solid #e5e5e5;height:18px;color:#000;text-decoration:none} .calendar .tbody li a.active {border:1px solid #1d77e9;background:#1d77e9;color:#fff} .calendar .tbody li a.selected {background:#cccccc;border:1px solid #b7b7b7} .box2 {background:#f7f7f7;padding:14px 32px;color:#000;margin-bottom:14px;font-size:12px} .box2.top {border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;-webkit-border-radius:6px 6px 0 0} .box2 strong {font-size:13px} /* footer */ footer {padding:33px 0 37px 70px;background:url(../../bundles/frontend/images/bg_footer.gif) top repeat-x #d7dce6;font-size:12px;color:#3a3a3a} footer a {color:#1d77e9} footer a:hover {} #icons {float:left;padding-top:7px} #icons li {float:left;padding-right:9px} .links {padding-left:186px;text-align:center;float:left} /* forms */ .jqTransformInputWrapper {float:left;width:auto !important} .jqTransformInputWrapper div {float:left} /* Radios */ .jqTransformRadioWrapper {float:left;display:block;margin-right:7px;margin-top:4px} .jqTransformRadio {height:12px;width:12px;display:block;background:url(../../bundles/frontend/images/radio.png);background-position:bottom/*display:-moz-inline-block;*/} .jqTransformRadio.jqTransformChecked {background-position:top} /* Checkbox */ .jqTransformCheckboxWrapper {float:left;display:block;margin-right:7px;margin-top:4px} .jqTransformCheckbox {height:12px;width:12px;display:block;background:url(../../bundles/frontend/images/check.gif);background-position:bottom/*display:-moz-inline-block;*/} .jqTransformCheckbox.jqTransformChecked {background-position:top} /* Selects */ .jqTransformSelectWrapper {position:relative} .jqTransformSelectWrapper div {float:left;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;display:block;position:relative;white-space:nowrap;height:19px;line-height:19px;overflow:hidden;cursor:pointer;border:1px solid #e5e5e5;background:#fff} .jqTransformSelectWrapper div span {padding:0 0 0 7px;display:block} a.jqTransformSelectOpen {display:block;position:absolute;top:0px;right:0px;width:17px;height:18px;background:url(../../bundles/frontend/images/select.gif) 4px 6px no-repeat} .jqTransformSelectWrapper ul {position:absolute;top:20px;left:0px;background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;display:none;z-index:10; padding:5px 0 2px 0;height:50px;overflow:auto;min-height:20px} .jqTransformSelectWrapper ul a {display:block;padding:0 7px;text-decoration:none;color:#7f7f7f} .jqTransformSelectWrapper ul a.selected {color:#7f7f7f} .jqTransformSelectWrapper ul a:hover, .jqTransformSelectWrapper ul a.selected:hover {color:#fff;background:#1d77e9; } /* Hidden - used to hide the original form elements */ .jqTransformHidden {display:none} #form_1 .right.relative {margin-top:9px;margin-right:32px} #form_1 .link1 {margin-left:32px;margin-top:13px} #form_1 .radio {background:#f7f7f7;padding:14px 32px;margin-bottom:14px} #form_1 .radio .left {width:88px;padding-left:0} #form_1 .row {min-height:25px;width:100%;overflow:hidden} #form_1 .left {width:64px;padding-left:31px} #form_1 .input, #form_1 .input1, #form_1 .input2 {background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;float:left;padding:2px 7px;height:14px;width:142px !important} #form_1 .input1 {width:60px !important} #form_1 .input2 {width:11px !important} #form_1 .col1 {width:177px;padding-left:0} #form_1 .marg_top1 {margin-top:12px} #form_1 .pad_left1 {padding-left:9px} #form_2 .radio {background:#f7f7f7;padding:14px 32px;margin-bottom:14px} #form_2 .left {width:64px;padding-left:31px} #form_2 .row {min-height:25px;width:100%;overflow:hidden} #form_2 .input, #form_2 .input1, #form_2 .input2 {background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;float:left;padding:2px 7px;height:14px;width:142px !important} #form_2 .input1 {width:60px !important} #form_2 .input2 {width:11px !important} #form_2 .right.relative {margin-top:9px;margin-right:32px} #form_2 .link1 {margin-left:32px;margin-top:13px} #form_2 .pad_left1 {padding-left:9px} #form_2 .help {float:left;margin-left:5px;width:20px;height:20px;background:url(../../bundles/frontend/images/help.gif) 0 0 no-repeat} #form_3 .radio {background:#f7f7f7;padding:14px 32px;margin-bottom:14px} #form_3 .radio .left {width:88px;padding-left:0} #form_3 .row {min-height:25px;width:100%;overflow:hidden} #form_3 .row_select {min-height:25px;width:100%} #form_3 .left {width:94px;padding-left:31px} #form_3 .input, #form_3 .input1, #form_3 .input2 {background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;float:left;padding:2px 7px;height:14px;width:112px !important} #form_3 .input1 {width:60px !important} #form_3 .input2 {width:31px !important;margin-left:5px} #form_3 .jqTransformSelectWrapper {float:left;width:128px !important} #form_3 .jqTransformSelectWrapper span {float:left;width:119px !important} #form_3 .pad_left1 {padding-left:32px} #form_3 .select1 {height:30px} #form_3 .select1 .jqTransformSelectWrapper {float:left;width:221px !important} #form_3 .select1 .jqTransformSelectWrapper span {float:left;width:212px !important} #form_3 .right.relative {margin-top:9px;margin-right:32px} #form_4 {padding-top:1px;padding-bottom:19px} #form_4 .right.relative {margin-top:9px;margin-right:32px} #form_4 .link1 {margin-left:32px;margin-top:13px} #form_4 .row {min-height:25px;width:100%;overflow:hidden} #form_4 .left {width:95px;padding-left:31px} #form_4 .input, #form_4 .input1, #form_4 .input2 {background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;float:left;padding:2px 7px;height:14px;width:111px !important} #form_4 .input1 {width:60px !important} #form_4 .input2 {width:11px !important} #form_4 .col1 {width:177px;padding-left:0} #form_4 .marg_top1 {margin-top:12px} #form_4 .pad_left1 {padding-left:9px} .form_5 .right.relative {margin-top:16px;margin-right:4px} .form_5 .link1 {margin-left:32px;margin-top:13px} .form_5 .radio {background:#f7f7f7;padding:14px 32px;margin-bottom:14px} .form_5 .radio .left {width:108px;padding-left:0} .form_5 .row {min-height:25px;width:100%;overflow:hidden} .form_5 .left {width:74px} .form_5 .input, .form_5 .input1, .form_5 .input2 {background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;float:left;padding:2px 7px;height:14px;width:112px !important} .form_5 .input1 {width:60px !important;margin-right:6px} .form_5 .input2 {width:11px !important} .form_5 .col1 {width:247px;padding-left:0} .form_5 .check_box {padding-left:20px;float:left;padding-top:25px} .form_5 .check_box span {float:left} .form_5 .marg_top1 {margin-top:5px} .form_5 .pad_left1 {padding-left:9px} .form_5 .help {float:left;margin-left:5px;width:20px;height:20px;background:url(../../bundles/frontend/images/help.gif) 0 0 no-repeat} .form_5 .under {border-bottom:1px solid #f2f2f2;padding-bottom:14px;margin-bottom:13px} .form_5 .cols {width:196px} .form_5 .marg_right1 {margin-right:27px} .form_5 h6 {line-height:23px;padding-left:32px;background:url(../../bundles/frontend/images/marker_3.gif) 0 0 no-repeat;font-weight:normal;padding-bottom:18px} .form_5 h5 {line-height:23px;padding-left:32px;background:url(../../bundles/frontend/images/marker_4.gif) 0 0 no-repeat;font-weight:normal;padding-bottom:18px} .form_5 .marker_left {float:left;background:url(../../bundles/frontend/images/marker_left.gif) 0 0 no-repeat;width:20px;height:20px;margin-right:17px} .form_5 .marker_right {float:left;background:url(../../bundles/frontend/images/marker_right.gif) 0 0 no-repeat;width:20px;height:20px;margin-left:17px} .form_5 .select1 {height:25px} .form_5 .select1 .jqTransformSelectWrapper {float:left;width:121px !important} .form_5 .select1 .jqTransformSelectWrapper span {float:left;width:112px !important} .form_5 .cols .left {width:49px;padding-left:9px} .form_5 .cols .select1 {height:25px} .form_5 .cols .select1 .left {width:45px;padding-left:0px} #form_8 .col2 {width:300px;padding-left:0} #form_8 .col2 .left {width:55px;padding-left:9px} #form_8 .pad_bot2 {padding-bottom:20px} #form_8 .markers {padding:14px 0 5px 0;width:100%;overflow:hidden} #form_8 .markers span {float:left;padding:0 13px 0 9px;font-size:12px} #form_8 .markers span.end {padding-right:0} #form_8 .markers strong {float:left;width:18px;height:18px;border:1px solid #e5e5e5} #form_8 .markers strong.active {background:#1d77e9;border:1px solid #1d77e9} #form_8 .markers strong.selected {background:#ccc;border:1px solid #b7b7b7} #form_8 {padding-bottom:23px} #ContactForm {padding-top:3px} #ContactForm span {width:65px;float:left} #ContactForm .wrapper {min-height:25px} #ContactForm .textarea_box {min-height:470px;padding-bottom:6px;width:100%;overflow:hidden} #ContactForm {} #ContactForm a {margin-left:10px;float:right} #ContactForm .input {float:left;width:219px;background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;float:left;padding:2px 7px;height:14px} #ContactForm textarea {overflow:auto;width:479px;background:#fff;border:1px solid #e5e5e5;font:12px Arial, Helvetica, sans-serif;color:#7f7f7f;float:left;padding:2px 7px;height:457px;margin:0;float:left} .subtitulo1{ color: #FFF; background: url(../../bundles/frontend/images/back-subtitulo1.png) no-repeat; /* width: 99%; */ height: 27px; font-size: 18px; font-weight: bold; padding: 2px 3px 3px 29px; margin: -4px 0 8px 0; position: relative; left: 4px; } .subtitulo2{ color: #FFF; background: url(../../bundles/frontend/images/back-subtitulo1.png) no-repeat; width: 86%; height: 27px; font-size: 18px; font-weight: bold; padding: 2px 3px 3px 29px; margin: -4px 0 8px 0; position: relative; left: 50px; text-align: center; }
{ "content_hash": "debad5420d600834f14cbcf1e5cd4cd5", "timestamp": "", "source": "github", "line_count": 269, "max_line_length": 397, "avg_line_length": 69.05204460966543, "alnum_prop": 0.7538088829071332, "repo_name": "manuelj555/projectsynfony", "id": "9bd5c888c9e559d5b14b3a04a708994d611693f3", "size": "18575", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web/css/frontend_style_14.css", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "755986" }, { "name": "JavaScript", "bytes": "1203701" }, { "name": "PHP", "bytes": "42650" } ], "symlink_target": "" }
namespace Valve.VR { using System; using UnityEngine; public partial class SteamVR_Actions { private static SteamVR_Input_ActionSet_default p__default; private static SteamVR_Input_ActionSet_platformer p_platformer; private static SteamVR_Input_ActionSet_buggy p_buggy; private static SteamVR_Input_ActionSet_mixedreality p_mixedreality; public static SteamVR_Input_ActionSet_default _default { get { return SteamVR_Actions.p__default.GetCopy<SteamVR_Input_ActionSet_default>(); } } public static SteamVR_Input_ActionSet_platformer platformer { get { return SteamVR_Actions.p_platformer.GetCopy<SteamVR_Input_ActionSet_platformer>(); } } public static SteamVR_Input_ActionSet_buggy buggy { get { return SteamVR_Actions.p_buggy.GetCopy<SteamVR_Input_ActionSet_buggy>(); } } public static SteamVR_Input_ActionSet_mixedreality mixedreality { get { return SteamVR_Actions.p_mixedreality.GetCopy<SteamVR_Input_ActionSet_mixedreality>(); } } private static void StartPreInitActionSets() { SteamVR_Actions.p__default = ((SteamVR_Input_ActionSet_default)(SteamVR_ActionSet.Create<SteamVR_Input_ActionSet_default>("/actions/default"))); SteamVR_Actions.p_platformer = ((SteamVR_Input_ActionSet_platformer)(SteamVR_ActionSet.Create<SteamVR_Input_ActionSet_platformer>("/actions/platformer"))); SteamVR_Actions.p_buggy = ((SteamVR_Input_ActionSet_buggy)(SteamVR_ActionSet.Create<SteamVR_Input_ActionSet_buggy>("/actions/buggy"))); SteamVR_Actions.p_mixedreality = ((SteamVR_Input_ActionSet_mixedreality)(SteamVR_ActionSet.Create<SteamVR_Input_ActionSet_mixedreality>("/actions/mixedreality"))); Valve.VR.SteamVR_Input.actionSets = new Valve.VR.SteamVR_ActionSet[] { SteamVR_Actions._default, SteamVR_Actions.platformer, SteamVR_Actions.buggy, SteamVR_Actions.mixedreality}; } } }
{ "content_hash": "39d1a80ac60896ba696362dfe2d7ab89", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 175, "avg_line_length": 37.77777777777778, "alnum_prop": 0.5978991596638655, "repo_name": "davidezordan/MixedRealitySamples", "id": "8c9f80278ebe71b54574538c1ebc163300d5c95d", "size": "2777", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SteamVR Demo/Assets/SteamVR_Input/SteamVR_Input_ActionSets.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "17515940" }, { "name": "GLSL", "bytes": "37630" }, { "name": "HLSL", "bytes": "102589" }, { "name": "JavaScript", "bytes": "43789" }, { "name": "Mathematica", "bytes": "46437" }, { "name": "ShaderLab", "bytes": "586118" }, { "name": "Smalltalk", "bytes": "6" } ], "symlink_target": "" }
package io.gatling.core.check.extractor.css import java.util.{ List => JList } import scala.collection._ import scala.collection.JavaConverters._ import io.gatling.core.config.GatlingConfiguration import io.gatling.core.util.cache.Cache import com.github.benmanes.caffeine.cache.LoadingCache import jodd.csselly.{ CSSelly, CssSelector } import jodd.lagarto.dom.NodeSelector import jodd.log.LoggerFactory import jodd.log.impl.Slf4jLogger class CssSelectors(implicit configuration: GatlingConfiguration) { LoggerFactory.setLoggerProvider(Slf4jLogger.PROVIDER) private val domBuilder = Jodd.newLagartoDomBuilder private val selectorCache: LoadingCache[String, JList[JList[CssSelector]]] = Cache.newConcurrentLoadingCache(configuration.core.extract.css.cacheMaxCapacity, CSSelly.parse) def parse(chars: Array[Char]) = new NodeSelector(domBuilder.parse(chars)) def parse(string: String) = new NodeSelector(domBuilder.parse(string)) def extractAll[X: NodeConverter](selector: NodeSelector, criterion: (String, Option[String])): Vector[X] = { val (query, nodeAttribute) = criterion val selectors = selectorCache.get(query) selector.select(selectors).asScala.flatMap { node => NodeConverter[X].convert(node, nodeAttribute) }(breakOut) } }
{ "content_hash": "b4f318f4caa634e9855be50aa032d282", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 110, "avg_line_length": 32.92307692307692, "alnum_prop": 0.7827102803738317, "repo_name": "MykolaB/gatling", "id": "c38de1893d1d8e615f72e31a6e03a78f2c54222a", "size": "1901", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gatling-core/src/main/scala/io/gatling/core/check/extractor/css/CssSelectors.scala", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5186" }, { "name": "CSS", "bytes": "10422" }, { "name": "HTML", "bytes": "181019" }, { "name": "JavaScript", "bytes": "3690" }, { "name": "Python", "bytes": "7732" }, { "name": "Scala", "bytes": "1937307" }, { "name": "Shell", "bytes": "6257" } ], "symlink_target": "" }
module SpreeGiftWrapping class Engine < ::Rails::Engine isolate_namespace SpreeGiftWrapping initializer "spree.register.calculators_for_gift_wrappings" do |app| app.config.spree.calculators.gift_wrappings = [ Spree::Calculator::FlatPercentItemTotal, Spree::Calculator::FlatRate, #Spree::Calculator::FlexiRate, Spree::Calculator::PerItem, #Spree::Calculator::PriceSack ] end config.to_prepare do Dir.glob File.expand_path("../../../app/**/*_decorator.rb", __FILE__) do |c| Rails.configuration.cache_classes ? require(c) : load(c) end Dir.glob File.expand_path("../../..//app/overrides/**/*.rb", __FILE__) do |c| Rails.application.config.cache_classes ? require(c) : load(c) end end end end
{ "content_hash": "fe7322636e60bf692828372289a2803f", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 83, "avg_line_length": 32.32, "alnum_prop": 0.629950495049505, "repo_name": "xmpolaris/spree_gift_wrapping", "id": "831a003f55937fd0790c17c744c5aa920ca3e084", "size": "808", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/spree_gift_wrapping/engine.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "10479" } ], "symlink_target": "" }
import config from 'config'; import RadarrAPI from 'sonarr-api'; // Radarr API is identical to radarr for now import serverConfig from '~/api/config.js'; import {PROVIDER_TYPE} from '~/api/getProvider.js'; /** * @typedef {Object} MediaResult * @property {String} title * @property {String} slug * @property {Number} year * @property {String} tmdbId * @property {String} imdbId * @property {Array} images * @property {String} [status] * @property {String} [quality] */ let _radarr, _qualityProfiles; export default function radarr() { if (!_radarr) { _radarr = new RadarrAPI(serverConfig(PROVIDER_TYPE.MOVIES)); } return _radarr; } /** * Returns the list of movies currently being tracked by radarr. * * @param {String} [title] -- Optional title to filter the list of movies by. * @returns {Array<MediaResult>} */ export async function list(title) { await loadQualityProfiles(); const resp = await radarr().get('movie'); const movies = resp.map(mapToMediaResult); if (title) { return movies.filter((movie) => { return movie.title.toLowerCase().indexOf(title.toLowerCase()) !== -1; }); } return movies; } /** * Searches for a new movie not in the library. * * @param {String} query -- Search query to use when looking for the movie. * @returns {Array<MediaResult>} */ export async function search(query) { await loadQualityProfiles(); const resp = await radarr().get('movies/lookup', {term: query}); return resp.map(mapToMediaResult); } /** * Adds a movie to the library. * * @param {MediaResult} movie -- movie to add to the library. * @returns {Object} -- radarr response object */ export async function add(movie) { const [rootFolderResp] = await radarr().get('rootfolder'); const preferredQuality = getPreferredQuality(); const qualities = await loadQualityProfiles(); const quality = qualities.find((qt) => { return qt.name === preferredQuality; }); return await radarr().post('movie', { tmdbId: movie.tmdbId, title: movie.title, titleSlug: movie.slug, images: movie.images, qualityProfileId: quality ? quality.id : 1, // Default to 'Any' if no profile set in config rootFolderPath: rootFolderResp.path, addOptions: { searchForMovie: true } }); } async function loadQualityProfiles() { if (!_qualityProfiles) { _qualityProfiles = await radarr().get('profile'); } return _qualityProfiles; } function mapToMediaResult(movie) { const preferredQuality = getPreferredQuality(); const quality = _qualityProfiles.find((profile) => { return profile.id === movie.qualityProfileId || profile.name === preferredQuality; }); return { title: movie.title, slug: movie.titleSlug, year: movie.year, tmdbId: movie.tmdbId, imdbId: movie.imdbId, images: movie.images, status: movie.status, quality: quality ? quality.name : 'Any' }; } function getPreferredQuality() { const path = `alexa-libby.${PROVIDER_TYPE.MOVIES}.quality`; if (config.has(path)) { return config.get(path); } return null; }
{ "content_hash": "1c4904748eeb5db62832e2865d747734", "timestamp": "", "source": "github", "line_count": 124, "max_line_length": 95, "avg_line_length": 24.911290322580644, "alnum_prop": 0.6756231790223374, "repo_name": "josephschmitt/alexa-libby", "id": "d7b751dbe5daec38aca1b7ef04642a5340f5366a", "size": "3089", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/api/radarr.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "108262" } ], "symlink_target": "" }
import sys # # Handler # defines an interface for handling requests and # optionally implements the successor link # class Handler: def __init__(self): self._successor = None def setHandler(self, successor): self._successor = successor def handleRequest(self): if (self._successor is not None): self._successor.handleRequest(); # # Concrete Handlers # handle requests they are responsible for # class ConcreteHandler1(Handler): def __init__(self): Handler.__init__(self) self._can_handle = False def handleRequest(self): if (self._can_handle): print( "Handled by Concrete Handler 1.") else: print("Cannot be handled by Handler 1.") super().handleRequest() class ConcreteHandler2(Handler): def __init__(self): Handler.__init__(self) self._can_handle = True def handleRequest(self): if (self._can_handle): print( "Handled by Concrete Handler 2.") else: print("Cannot be handled by Handler 2.") super().handleRequest() if __name__ == "__main__": handler1 = ConcreteHandler1() handler2 = ConcreteHandler2() handler1.setHandler(handler2) handler1.handleRequest()
{ "content_hash": "99cd97fa33a198ba28671ee9f9b87503", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 48, "avg_line_length": 21.962962962962962, "alnum_prop": 0.6602023608768971, "repo_name": "JakubVojvoda/design-patterns-python", "id": "5a54c5e1daebeef7a07aa8512209b44cd2e4788d", "size": "1379", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "chain-of-responsibility/ChainOfResponsibility.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "32357" } ], "symlink_target": "" }
set(CMAKE_C_COMPILER "/Library/Developer/CommandLineTools/usr/bin/cc") set(CMAKE_C_COMPILER_ARG1 "") set(CMAKE_C_COMPILER_ID "AppleClang") set(CMAKE_C_COMPILER_VERSION "7.3.0.7030031") set(CMAKE_C_COMPILER_WRAPPER "") set(CMAKE_C_STANDARD_COMPUTED_DEFAULT "11") set(CMAKE_C_COMPILE_FEATURES "c_function_prototypes;c_restrict;c_variadic_macros;c_static_assert") set(CMAKE_C90_COMPILE_FEATURES "c_function_prototypes") set(CMAKE_C99_COMPILE_FEATURES "c_restrict;c_variadic_macros") set(CMAKE_C11_COMPILE_FEATURES "c_static_assert") set(CMAKE_C_PLATFORM_ID "Darwin") set(CMAKE_C_SIMULATE_ID "") set(CMAKE_C_SIMULATE_VERSION "") set(CMAKE_AR "/Library/Developer/CommandLineTools/usr/bin/ar") set(CMAKE_RANLIB "/Library/Developer/CommandLineTools/usr/bin/ranlib") set(CMAKE_LINKER "/Library/Developer/CommandLineTools/usr/bin/ld") set(CMAKE_COMPILER_IS_GNUCC ) set(CMAKE_C_COMPILER_LOADED 1) set(CMAKE_C_COMPILER_WORKS TRUE) set(CMAKE_C_ABI_COMPILED TRUE) set(CMAKE_COMPILER_IS_MINGW ) set(CMAKE_COMPILER_IS_CYGWIN ) if(CMAKE_COMPILER_IS_CYGWIN) set(CYGWIN 1) set(UNIX 1) endif() set(CMAKE_C_COMPILER_ENV_VAR "CC") if(CMAKE_COMPILER_IS_MINGW) set(MINGW 1) endif() set(CMAKE_C_COMPILER_ID_RUN 1) set(CMAKE_C_SOURCE_FILE_EXTENSIONS c;m) set(CMAKE_C_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) set(CMAKE_C_LINKER_PREFERENCE 10) # Save compiler ABI information. set(CMAKE_C_SIZEOF_DATA_PTR "8") set(CMAKE_C_COMPILER_ABI "") set(CMAKE_C_LIBRARY_ARCHITECTURE "") if(CMAKE_C_SIZEOF_DATA_PTR) set(CMAKE_SIZEOF_VOID_P "${CMAKE_C_SIZEOF_DATA_PTR}") endif() if(CMAKE_C_COMPILER_ABI) set(CMAKE_INTERNAL_PLATFORM_ABI "${CMAKE_C_COMPILER_ABI}") endif() if(CMAKE_C_LIBRARY_ARCHITECTURE) set(CMAKE_LIBRARY_ARCHITECTURE "") endif() set(CMAKE_C_CL_SHOWINCLUDES_PREFIX "") if(CMAKE_C_CL_SHOWINCLUDES_PREFIX) set(CMAKE_CL_SHOWINCLUDES_PREFIX "${CMAKE_C_CL_SHOWINCLUDES_PREFIX}") endif() set(CMAKE_C_IMPLICIT_LINK_LIBRARIES "/Library/Developer/CommandLineTools/usr/lib/clang/7.3.0/lib/darwin/libclang_rt.osx.a") set(CMAKE_C_IMPLICIT_LINK_DIRECTORIES "/usr/lib;/usr/local/lib") set(CMAKE_C_IMPLICIT_LINK_FRAMEWORK_DIRECTORIES "/Library/Frameworks;/System/Library/Frameworks")
{ "content_hash": "3cfa61a553d11c8480c43ec3c0e2f664", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 123, "avg_line_length": 32.55223880597015, "alnum_prop": 0.7579092159559835, "repo_name": "dwildmark/CppLab", "id": "406da9846e06c8726ca41582e578bbddbd1875e0", "size": "2181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Lab3/build/CMakeFiles/3.6.2/CMakeCCompiler.cmake", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "17554" }, { "name": "C++", "bytes": "55218" }, { "name": "CMake", "bytes": "14035" }, { "name": "Makefile", "bytes": "17776" } ], "symlink_target": "" }
from __future__ import print_function import datetime import os import unittest import mock import nose import six from airflow import DAG, configuration, operators configuration.load_test_config() DEFAULT_DATE = datetime.datetime(2015, 1, 1) DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10] if 'AIRFLOW_RUNALL_TESTS' in os.environ: import airflow.hooks.hive_hooks import airflow.operators.presto_to_mysql class HiveServer2Test(unittest.TestCase): def setUp(self): configuration.load_test_config() self.nondefault_schema = "nondefault" def test_select_conn(self): from airflow.hooks.hive_hooks import HiveServer2Hook sql = "select 1" hook = HiveServer2Hook() hook.get_records(sql) def test_multi_statements(self): from airflow.hooks.hive_hooks import HiveServer2Hook sqls = [ "CREATE TABLE IF NOT EXISTS test_multi_statements (i INT)", "DROP TABLE test_multi_statements", ] hook = HiveServer2Hook() hook.get_records(sqls) def test_get_metastore_databases(self): if six.PY2: from airflow.hooks.hive_hooks import HiveMetastoreHook hook = HiveMetastoreHook() hook.get_databases() def test_to_csv(self): from airflow.hooks.hive_hooks import HiveServer2Hook sql = "select 1" hook = HiveServer2Hook() hook.to_csv(hql=sql, csv_filepath="/tmp/test_to_csv") def connect_mock(self, host, port, auth_mechanism, kerberos_service_name, user, database): self.assertEqual(database, self.nondefault_schema) @mock.patch('HiveServer2Hook.connect', return_value="foo") def test_select_conn_with_schema(self, connect_mock): from airflow.hooks.hive_hooks import HiveServer2Hook # Configure hook = HiveServer2Hook() # Run hook.get_conn(self.nondefault_schema) # Verify assert connect_mock.called (args, kwargs) = connect_mock.call_args_list[0] assert kwargs['database'] == self.nondefault_schema def test_get_results_with_schema(self): from airflow.hooks.hive_hooks import HiveServer2Hook from unittest.mock import MagicMock # Configure sql = "select 1" schema = "notdefault" hook = HiveServer2Hook() cursor_mock = MagicMock( __enter__=cursor_mock, __exit__=None, execute=None, fetchall=[], ) get_conn_mock = MagicMock( __enter__=get_conn_mock, __exit__=None, cursor=cursor_mock, ) hook.get_conn = get_conn_mock # Run hook.get_results(sql, schema) # Verify get_conn_mock.assert_called_with(self.nondefault_schema) @mock.patch('HiveServer2Hook.get_results', return_value={'data': []}) def test_get_records_with_schema(self, get_results_mock): from airflow.hooks.hive_hooks import HiveServer2Hook # Configure sql = "select 1" hook = HiveServer2Hook() # Run hook.get_records(sql, self.nondefault_schema) # Verify assert self.connect_mock.called (args, kwargs) = self.connect_mock.call_args_list[0] assert args[0] == sql assert kwargs['schema'] == self.nondefault_schema @mock.patch('HiveServer2Hook.get_results', return_value={'data': []}) def test_get_pandas_df_with_schema(self, get_results_mock): from airflow.hooks.hive_hooks import HiveServer2Hook # Configure sql = "select 1" hook = HiveServer2Hook() # Run hook.get_pandas_df(sql, self.nondefault_schema) # Verify assert self.connect_mock.called (args, kwargs) = self.connect_mock.call_args_list[0] assert args[0] == sql assert kwargs['schema'] == self.nondefault_schema class HivePrestoTest(unittest.TestCase): def setUp(self): configuration.load_test_config() args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} dag = DAG('test_dag_id', default_args=args) self.dag = dag self.hql = """ USE airflow; DROP TABLE IF EXISTS static_babynames_partitioned; CREATE TABLE IF NOT EXISTS static_babynames_partitioned ( state string, year string, name string, gender string, num int) PARTITIONED BY (ds string); INSERT OVERWRITE TABLE static_babynames_partitioned PARTITION(ds='{{ ds }}') SELECT state, year, name, gender, num FROM static_babynames; """ def test_hive(self): import airflow.operators.hive_operator t = operators.hive_operator.HiveOperator( task_id='basic_hql', hql=self.hql, dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hive_queues(self): import airflow.operators.hive_operator t = operators.hive_operator.HiveOperator( task_id='test_hive_queues', hql=self.hql, mapred_queue='default', mapred_queue_priority='HIGH', mapred_job_name='airflow.test_hive_queues', dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hive_dryrun(self): import airflow.operators.hive_operator t = operators.hive_operator.HiveOperator( task_id='dry_run_basic_hql', hql=self.hql, dag=self.dag) t.dry_run() def test_beeline(self): import airflow.operators.hive_operator t = operators.hive_operator.HiveOperator( task_id='beeline_hql', hive_cli_conn_id='beeline_default', hql=self.hql, dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_presto(self): sql = """ SELECT count(1) FROM airflow.static_babynames_partitioned; """ import airflow.operators.presto_check_operator t = operators.presto_check_operator.PrestoCheckOperator( task_id='presto_check', sql=sql, dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_presto_to_mysql(self): import airflow.operators.presto_to_mysql t = operators.presto_to_mysql.PrestoToMySqlTransfer( task_id='presto_to_mysql_check', sql=""" SELECT name, count(*) as ccount FROM airflow.static_babynames GROUP BY name """, mysql_table='test_static_babynames', mysql_preoperator='TRUNCATE TABLE test_static_babynames;', dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hdfs_sensor(self): t = operators.sensors.HdfsSensor( task_id='hdfs_sensor_check', filepath='hdfs://user/hive/warehouse/airflow.db/static_babynames', dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_webhdfs_sensor(self): t = operators.sensors.WebHdfsSensor( task_id='webhdfs_sensor_check', filepath='hdfs://user/hive/warehouse/airflow.db/static_babynames', timeout=120, dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_sql_sensor(self): t = operators.sensors.SqlSensor( task_id='hdfs_sensor_check', conn_id='presto_default', sql="SELECT 'x' FROM airflow.static_babynames LIMIT 1;", dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hive_stats(self): import airflow.operators.hive_stats_operator t = operators.hive_stats_operator.HiveStatsCollectionOperator( task_id='hive_stats_check', table="airflow.static_babynames_partitioned", partition={'ds': DEFAULT_DATE_DS}, dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_named_hive_partition_sensor(self): t = operators.sensors.NamedHivePartitionSensor( task_id='hive_partition_check', partition_names=[ "airflow.static_babynames_partitioned/ds={{ds}}" ], dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_named_hive_partition_sensor_succeeds_on_multiple_partitions(self): t = operators.sensors.NamedHivePartitionSensor( task_id='hive_partition_check', partition_names=[ "airflow.static_babynames_partitioned/ds={{ds}}", "airflow.static_babynames_partitioned/ds={{ds}}" ], dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_named_hive_partition_sensor_parses_partitions_with_periods(self): t = operators.sensors.NamedHivePartitionSensor.parse_partition_name( partition="schema.table/part1=this.can.be.an.issue/part2=ok") self.assertEqual(t[0], "schema") self.assertEqual(t[1], "table") self.assertEqual(t[2], "part1=this.can.be.an.issue/part2=this_should_be_ok") @nose.tools.raises(airflow.exceptions.AirflowSensorTimeout) def test_named_hive_partition_sensor_times_out_on_nonexistent_partition(self): t = operators.sensors.NamedHivePartitionSensor( task_id='hive_partition_check', partition_names=[ "airflow.static_babynames_partitioned/ds={{ds}}", "airflow.static_babynames_partitioned/ds=nonexistent" ], poke_interval=0.1, timeout=1, dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hive_partition_sensor(self): t = operators.sensors.HivePartitionSensor( task_id='hive_partition_check', table='airflow.static_babynames_partitioned', dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hive_metastore_sql_sensor(self): t = operators.sensors.MetastorePartitionSensor( task_id='hive_partition_check', table='airflow.static_babynames_partitioned', partition_name='ds={}'.format(DEFAULT_DATE_DS), dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hive2samba(self): import airflow.operators.hive_to_samba_operator t = operators.hive_to_samba_operator.Hive2SambaOperator( task_id='hive2samba_check', samba_conn_id='tableau_samba', hql="SELECT * FROM airflow.static_babynames LIMIT 10000", destination_filepath='test_airflow.csv', dag=self.dag) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) def test_hive_to_mysql(self): import airflow.operators.hive_to_mysql t = operators.hive_to_mysql.HiveToMySqlTransfer( mysql_conn_id='airflow_db', task_id='hive_to_mysql_check', create=True, sql=""" SELECT name FROM airflow.static_babynames LIMIT 100 """, mysql_table='test_static_babynames', mysql_preoperator=[ 'DROP TABLE IF EXISTS test_static_babynames;', 'CREATE TABLE test_static_babynames (name VARCHAR(500))', ], dag=self.dag) t.clear(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
{ "content_hash": "c34eae1335378a4333913baeb3d07a5b", "timestamp": "", "source": "github", "line_count": 340, "max_line_length": 88, "avg_line_length": 39.46764705882353, "alnum_prop": 0.5565243311722184, "repo_name": "gritlogic/incubator-airflow", "id": "fec5e69e916a4e5c5ddcb0789a97b0f14b471dd8", "size": "13986", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/operators/hive_operator.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "56963" }, { "name": "HTML", "bytes": "144956" }, { "name": "JavaScript", "bytes": "1370838" }, { "name": "Mako", "bytes": "1037" }, { "name": "Python", "bytes": "1745301" }, { "name": "Shell", "bytes": "19298" } ], "symlink_target": "" }
include_recipe 'masala_base::default' # Apply ulmits (see attributes/default.rb) include_recipe "ulimit" #primary_if = node['network']['interfaces'][node['system']['primary_interface']] #primary_addrs = primary_if['addresses'] #primary_addrs_ipv4 = primary_addrs.select { |_addr, attrs| attrs['family'] == 'inet' } #primary_ip = primary_addrs_ipv4.keys.first #node.default['apache_spark']['standalone']['worker_bind_ip'] = primary_ip include_recipe 'apache_spark::spark-standalone-worker' include_recipe 'masala_spark::cron'
{ "content_hash": "6d8485a35fbedacdcd319777bdd84106", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 87, "avg_line_length": 40.61538461538461, "alnum_prop": 0.7310606060606061, "repo_name": "PaytmLabs/masala_spark", "id": "549bec2c361fadac2fb4fed7c44ffdbea6058ca4", "size": "1157", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "recipes/worker.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Ruby", "bytes": "6954" } ], "symlink_target": "" }
<component name="libraryTable"> <library name="espresso-core-2.2.2"> <CLASSES> <root url="file://$PROJECT_DIR$/app/build/intermediates/exploded-aar/com.android.support.test.espresso/espresso-core/2.2.2/res" /> <root url="jar://$PROJECT_DIR$/app/build/intermediates/exploded-aar/com.android.support.test.espresso/espresso-core/2.2.2/jars/classes.jar!/" /> </CLASSES> <JAVADOC /> <SOURCES> <root url="jar://D:/develop/SDK/extras/android/m2repository/com/android/support/test/espresso/espresso-core/2.2.2/espresso-core-2.2.2-sources.jar!/" /> </SOURCES> </library> </component>
{ "content_hash": "0a9fa251c80fb023aded3783d6ee013f", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 157, "avg_line_length": 51.333333333333336, "alnum_prop": 0.6931818181818182, "repo_name": "chenjf0515/DouBiWeahter", "id": "14f6a08abc07b4075d36a4cf82a1a6e61deb0232", "size": "616", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": ".idea/libraries/espresso_core_2_2_2.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "36161" } ], "symlink_target": "" }
require 'test_helper' class StatsControllerTest < ActionController::TestCase context "on GET to index" do setup do @number_of_gems = 1337 @number_of_users = 101 @number_of_downloads = 42 rails_cinco = create(:rubygem, name: 'rails_cinco', number: 1) Rubygem.stubs(:total_count).returns @number_of_gems User.stubs(:count).returns @number_of_users create(:gem_download, count: @number_of_downloads) rails_cinco.gem_download.update(count: 1) get :index end should respond_with :success should render_template :index should "display number of gems" do assert page.has_content?("1,337") end should "display number of users" do assert page.has_content?("101") end should "display number of downloads" do assert page.has_content?("42") end should "display the top gem" do assert page.has_content?("rails_cinco") end should "load up the number of gems, users, and downloads" do assert_received(User, :count) assert_received(Rubygem, :total_count) end end context "on GET to index with no downloads" do setup do get :index end should respond_with :success end context "on GET to index with multiple gems" do setup do create(:gem_download, count: 0) rg1 = create(:rubygem, downloads: 10, number: "1") rg2 = create(:rubygem, downloads: 20, number: "1") rg3 = create(:rubygem, downloads: 30, number: "1") n = 10 data = [rg1, rg2, rg3].map { |r| [r.versions.last.full_name, n += 10] } GemDownload.bulk_update(data) get :index end should "not have width greater than 100%" do assert_select ".stats__graph__gem__meter" do |element| element.map { |h| h[:style] }.each do |width| width =~ /width\: (\d+[,.]\d+)%/ assert Regexp.last_match(1).to_f <= 100, "#{Regexp.last_match(1)} is greater than 100" end end end end end
{ "content_hash": "eb407e0147ac3ba08e10175c7dfb1a33", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 96, "avg_line_length": 26.866666666666667, "alnum_prop": 0.6158808933002481, "repo_name": "spk/rubygems.org", "id": "d9358dba08a9aa31373d943eb6223e2211563ab3", "size": "2015", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/functional/stats_controller_test.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "59731" }, { "name": "HTML", "bytes": "65066" }, { "name": "JavaScript", "bytes": "25887" }, { "name": "Ruby", "bytes": "478396" }, { "name": "Shell", "bytes": "7211" } ], "symlink_target": "" }
package com.amazonaws.services.elasticloadbalancingv2.model; import javax.annotation.Generated; /** * <p> * A target group with the specified name already exists. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DuplicateTargetGroupNameException extends com.amazonaws.services.elasticloadbalancingv2.model.AmazonElasticLoadBalancingException { private static final long serialVersionUID = 1L; /** * Constructs a new DuplicateTargetGroupNameException with the specified error message. * * @param message * Describes the error encountered. */ public DuplicateTargetGroupNameException(String message) { super(message); } }
{ "content_hash": "23c4c5679ee93c2a952222c4cd215ce5", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 144, "avg_line_length": 28.6, "alnum_prop": 0.737062937062937, "repo_name": "aws/aws-sdk-java", "id": "298971ee6bb213cf67dec6a538308932244d6dd1", "size": "1295", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-java-sdk-elasticloadbalancingv2/src/main/java/com/amazonaws/services/elasticloadbalancingv2/model/DuplicateTargetGroupNameException.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package io.druid.segment.loading; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; public class LocalDataSegmentKillerTest { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Test public void testKill() throws Exception { LocalDataSegmentKiller killer = new LocalDataSegmentKiller(new LocalDataSegmentPusherConfig()); // Create following segments and then delete them in this order and assert directory deletions // /tmp/dataSource/interval1/v1/0/index.zip // /tmp/dataSource/interval1/v1/1/index.zip // /tmp/dataSource/interval1/v2/0/index.zip // /tmp/dataSource/interval2/v1/0/index.zip final File dataSourceDir = temporaryFolder.newFolder(); File interval1Dir = new File(dataSourceDir, "interval1"); File version11Dir = new File(interval1Dir, "v1"); File partition011Dir = new File(version11Dir, "0"); File partition111Dir = new File(version11Dir, "1"); makePartitionDirWithIndex(partition011Dir); makePartitionDirWithIndex(partition111Dir); File version21Dir = new File(interval1Dir, "v2"); File partition021Dir = new File(version21Dir, "0"); makePartitionDirWithIndex(partition021Dir); File interval2Dir = new File(dataSourceDir, "interval2"); File version12Dir = new File(interval2Dir, "v1"); File partition012Dir = new File(version12Dir, "0"); makePartitionDirWithIndex(partition012Dir); killer.kill(getSegmentWithPath(new File(partition011Dir, "index.zip").toString())); Assert.assertFalse(partition011Dir.exists()); Assert.assertTrue(partition111Dir.exists()); Assert.assertTrue(partition021Dir.exists()); Assert.assertTrue(partition012Dir.exists()); killer.kill(getSegmentWithPath(new File(partition111Dir, "index.zip").toString())); Assert.assertFalse(version11Dir.exists()); Assert.assertTrue(partition021Dir.exists()); Assert.assertTrue(partition012Dir.exists()); killer.kill(getSegmentWithPath(new File(partition021Dir, "index.zip").toString())); Assert.assertFalse(interval1Dir.exists()); Assert.assertTrue(partition012Dir.exists()); killer.kill(getSegmentWithPath(new File(partition012Dir, "index.zip").toString())); Assert.assertFalse(dataSourceDir.exists()); } private void makePartitionDirWithIndex(File path) throws IOException { Assert.assertTrue(path.mkdirs()); Assert.assertTrue(new File(path, "index.zip").createNewFile()); } private DataSegment getSegmentWithPath(String path) { return new DataSegment( "dataSource", Interval.parse("2000/3000"), "ver", ImmutableMap.<String, Object>of( "type", "local", "path", path ), ImmutableList.of("product"), ImmutableList.of("visited_sum", "unique_hosts"), NoneShardSpec.instance(), 9, 12334 ); } }
{ "content_hash": "4bdf594d86522af62b2201d94ca2e83f", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 99, "avg_line_length": 31.519607843137255, "alnum_prop": 0.7216174183514774, "repo_name": "solimant/druid", "id": "8240adf2e291aec2ed11f4bc09b75ad9c8c47e8f", "size": "4020", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "1406" }, { "name": "CSS", "bytes": "11623" }, { "name": "HTML", "bytes": "26739" }, { "name": "Java", "bytes": "16545418" }, { "name": "JavaScript", "bytes": "295150" }, { "name": "Makefile", "bytes": "659" }, { "name": "PostScript", "bytes": "5" }, { "name": "Protocol Buffer", "bytes": "729" }, { "name": "R", "bytes": "17002" }, { "name": "Roff", "bytes": "3617" }, { "name": "Shell", "bytes": "4892" }, { "name": "TeX", "bytes": "399444" }, { "name": "Thrift", "bytes": "199" } ], "symlink_target": "" }
<!doctype html> <html> <head> <title>JavaScript</title> <script src="../js/shCore.js"></script> <script src="../js/shBrushJScript.js"></script> <link href="../css/styles.css" rel="stylesheet" /> <link href="../css/shCore.css" rel="stylesheet" /> <link href="../css/shThemeDefault.css" rel="stylesheet" /> </head> <body> <header data-section="exercise-07"><h1></h1><nav></nav></header> <main class="scroll-wrapper"> <div class="scrollable"> <p>This exercise will reinforce the following concepts:</p> <ul> <li>Creating elements within the page (p. 542-547)</li> <li>Copying elements within HTML <code>script</code> tags</li> <li>Using window properties</li> <li>Modifying element classes</li> </ul> </div> </main> <script src="../js/nav.js"></script> <script type="text/javascript"> SyntaxHighlighter.all(); </script> </body> </html>
{ "content_hash": "dcfff3fff934109bed8221dd4c500caa", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 74, "avg_line_length": 25, "alnum_prop": 0.6021052631578947, "repo_name": "nstublen/jccc-javascript-tutorial", "id": "10588cbd8e989afa5f17ba9ecc69ea4dadca6aed", "size": "950", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "17-exercise-07/intro.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "102908" }, { "name": "HTML", "bytes": "183901" }, { "name": "JavaScript", "bytes": "24109" } ], "symlink_target": "" }
package com.viewpagerindicator; import android.support.v4.view.ViewPager; /** * A PageIndicator is responsible to show an visual indicator on the total views * number and the current visible view. */ public interface PageIndicator extends ViewPager.OnPageChangeListener { /** * Bind the indicator to a ViewPager. * * @param view */ void setViewPager(ViewPager view); /** * Bind the indicator to a ViewPager. * * @param view * @param initialPosition */ void setViewPager(ViewPager view, int initialPosition); /** * <p>Set the current page of both the ViewPager and indicator.</p> * * <p>This <strong>must</strong> be used if you need to set the page before * the views are drawn on screen (e.g., default start page).</p> * * @param item */ void setCurrentItem(int item); /** * <p>Get the current page of both the ViewPager and indicator.</p> * */ int getCurrentItem(); /** * Set a page change listener which will receive forwarded events. * * @param listener */ void setOnPageChangeListener(ViewPager.OnPageChangeListener listener); /** * Notify the indicator that the fragment list has changed. */ void notifyDataSetChanged(); }
{ "content_hash": "a8b6a534a55ce52e40306523e156aa0a", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 80, "avg_line_length": 24.333333333333332, "alnum_prop": 0.6385083713850838, "repo_name": "ivanovpv/darksms", "id": "5b6f1d505dcaa22e7ef3fd310d87a354cc9ae61f", "size": "1643", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "psm/src/main/java/com/viewpagerindicator/PageIndicator.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2172268" }, { "name": "C++", "bytes": "213333" }, { "name": "HTML", "bytes": "22428" }, { "name": "Java", "bytes": "2671718" }, { "name": "Makefile", "bytes": "7209" }, { "name": "Perl", "bytes": "16352" } ], "symlink_target": "" }
package approvers import ( "bytes" "encoding/json" "fmt" "math/rand" "path/filepath" "sort" "strings" "text/template" "github.com/golang/glog" "k8s.io/kubernetes/pkg/util/sets" "k8s.io/test-infra/mungegithub/features" c "k8s.io/test-infra/mungegithub/mungers/matchers/comment" ) const ( ownersFileName = "OWNERS" ApprovalNotificationName = "ApprovalNotifier" ) type RepoInterface interface { Approvers(path string) sets.String LeafApprovers(path string) sets.String FindApproverOwnersForPath(path string) string } type RepoAlias struct { repo RepoInterface alias features.Aliases } func NewRepoAlias(repo RepoInterface, alias features.Aliases) *RepoAlias { return &RepoAlias{ repo: repo, alias: alias, } } func (r *RepoAlias) Approvers(path string) sets.String { return r.alias.Expand(r.repo.Approvers(path)) } func (r *RepoAlias) LeafApprovers(path string) sets.String { return r.alias.Expand(r.repo.LeafApprovers(path)) } func (r *RepoAlias) FindApproverOwnersForPath(path string) string { return r.repo.FindApproverOwnersForPath(path) } type Owners struct { filenames []string repo RepoInterface seed int64 } func NewOwners(filenames []string, r RepoInterface, s int64) Owners { return Owners{filenames: filenames, repo: r, seed: s} } // GetApprovers returns a map from ownersFiles -> people that are approvers in them func (o Owners) GetApprovers() map[string]sets.String { ownersToApprovers := map[string]sets.String{} for fn := range o.GetOwnersSet() { ownersToApprovers[fn] = o.repo.Approvers(fn) } return ownersToApprovers } // GetLeafApprovers returns a map from ownersFiles -> people that are approvers in them (only the leaf) func (o Owners) GetLeafApprovers() map[string]sets.String { ownersToApprovers := map[string]sets.String{} for fn := range o.GetOwnersSet() { ownersToApprovers[fn] = o.repo.LeafApprovers(fn) } return ownersToApprovers } // GetAllPotentialApprovers returns the people from relevant owners files needed to get the PR approved func (o Owners) GetAllPotentialApprovers() []string { approversOnly := []string{} for _, approverList := range o.GetLeafApprovers() { for approver := range approverList { approversOnly = append(approversOnly, approver) } } sort.Strings(approversOnly) return approversOnly } // GetReverseMap returns a map from people -> OWNERS files for which they are an approver func (o Owners) GetReverseMap(approvers map[string]sets.String) map[string]sets.String { approverOwnersfiles := map[string]sets.String{} for ownersFile, approvers := range approvers { for approver := range approvers { if _, ok := approverOwnersfiles[approver]; ok { approverOwnersfiles[approver].Insert(ownersFile) } else { approverOwnersfiles[approver] = sets.NewString(ownersFile) } } } return approverOwnersfiles } func findMostCoveringApprover(allApprovers []string, reverseMap map[string]sets.String, unapproved sets.String) string { maxCovered := 0 var bestPerson string for _, approver := range allApprovers { filesCanApprove := reverseMap[approver] if filesCanApprove.Intersection(unapproved).Len() > maxCovered { maxCovered = len(filesCanApprove) bestPerson = approver } } return bestPerson } // temporaryUnapprovedFiles returns the list of files that wouldn't be // approved by the given set of approvers. func (o Owners) temporaryUnapprovedFiles(approvers sets.String) sets.String { ap := NewApprovers(o) for approver := range approvers { ap.AddApprover(approver, "", false) } return ap.UnapprovedFiles() } // KeepCoveringApprovers finds who we should keep as suggested approvers given a pre-selection // knownApprovers must be a subset of potentialApprovers. func (o Owners) KeepCoveringApprovers(reverseMap map[string]sets.String, knownApprovers sets.String, potentialApprovers []string) sets.String { keptApprovers := sets.NewString() unapproved := o.temporaryUnapprovedFiles(knownApprovers) for _, suggestedApprover := range o.GetSuggestedApprovers(reverseMap, potentialApprovers).List() { if reverseMap[suggestedApprover].Intersection(unapproved).Len() != 0 { keptApprovers.Insert(suggestedApprover) } } return keptApprovers } // GetSuggestedApprovers solves the exact cover problem, finding an approver capable of // approving every OWNERS file in the PR func (o Owners) GetSuggestedApprovers(reverseMap map[string]sets.String, potentialApprovers []string) sets.String { ap := NewApprovers(o) for !ap.IsApproved() { newApprover := findMostCoveringApprover(potentialApprovers, reverseMap, ap.UnapprovedFiles()) if newApprover == "" { glog.Errorf("Couldn't find/suggest approvers for each files. Unapproved: %s", ap.UnapprovedFiles()) return ap.GetCurrentApproversSet() } ap.AddApprover(newApprover, "", false) } return ap.GetCurrentApproversSet() } // GetOwnersSet returns a set containing all the Owners files necessary to get the PR approved func (o Owners) GetOwnersSet() sets.String { owners := sets.NewString() for _, fn := range o.filenames { owners.Insert(o.repo.FindApproverOwnersForPath(fn)) } return removeSubdirs(owners.List()) } // Shuffles the potential approvers so that we don't always suggest the same people func (o Owners) GetShuffledApprovers() []string { approversList := o.GetAllPotentialApprovers() order := rand.New(rand.NewSource(o.seed)).Perm(len(approversList)) people := make([]string, 0, len(approversList)) for _, i := range order { people = append(people, approversList[i]) } return people } // removeSubdirs takes a list of directories as an input and returns a set of directories with all // subdirectories removed. E.g. [/a,/a/b/c,/d/e,/d/e/f] -> [/a, /d/e] func removeSubdirs(dirList []string) sets.String { toDel := sets.String{} for i := 0; i < len(dirList)-1; i++ { for j := i + 1; j < len(dirList); j++ { // ex /a/b has prefix /a so if remove /a/b since its already covered if strings.HasPrefix(dirList[i], dirList[j]) { toDel.Insert(dirList[i]) } else if strings.HasPrefix(dirList[j], dirList[i]) { toDel.Insert(dirList[j]) } } } finalSet := sets.NewString(dirList...) finalSet.Delete(toDel.List()...) return finalSet } // Approval has the information about each approval on a PR type Approval struct { Login string // Login of the approver How string // How did the approver approved Reference string // Where did the approver approved NoIssue bool // Approval also accepts missing associated issue } // String creates a link for the approval. Use `Login` if you just want the name. func (a Approval) String() string { return fmt.Sprintf( `*<a href="%s" title="%s">%s</a>*`, a.Reference, a.How, a.Login, ) } type Approvers struct { owners Owners approvers map[string]Approval assignees sets.String AssociatedIssue int } // IntersectSetsCase runs the intersection between to sets.String in a // case-insensitive way. It returns the name with the case of "one". func IntersectSetsCase(one, other sets.String) sets.String { lower := sets.NewString() for item := range other { lower.Insert(strings.ToLower(item)) } intersection := sets.NewString() for item := range one { if lower.Has(strings.ToLower(item)) { intersection.Insert(item) } } return intersection } // NewApprovers create a new "Approvers" with no approval. func NewApprovers(owners Owners) Approvers { return Approvers{ owners: owners, approvers: map[string]Approval{}, assignees: sets.NewString(), } } // shouldNotOverrideApproval decides whether or not we should keep the // original approval: // If someone approves a PR multiple times, we only want to keep the // latest approval, unless a previous approval was "no-issue", and the // most recent isn't. func (ap *Approvers) shouldNotOverrideApproval(login string, noIssue bool) bool { approval, alreadyApproved := ap.approvers[login] return alreadyApproved && approval.NoIssue && !noIssue } // AddLGTMer adds a new LGTM Approver func (ap *Approvers) AddLGTMer(login, reference string, noIssue bool) { if ap.shouldNotOverrideApproval(login, noIssue) { return } ap.approvers[login] = Approval{ Login: login, How: "LGTM", Reference: reference, NoIssue: noIssue, } } // AddApprover adds a new Approver func (ap *Approvers) AddApprover(login, reference string, noIssue bool) { if ap.shouldNotOverrideApproval(login, noIssue) { return } ap.approvers[login] = Approval{ Login: login, How: "Approved", Reference: reference, NoIssue: noIssue, } } // AddSAuthorSelfApprover adds the author self approval func (ap *Approvers) AddAuthorSelfApprover(login, reference string) { if ap.shouldNotOverrideApproval(login, false) { return } ap.approvers[login] = Approval{ Login: login, How: "Author self-approved", Reference: reference, NoIssue: false, } } // RemoveApprover removes an approver from the list. func (ap *Approvers) RemoveApprover(login string) { delete(ap.approvers, login) } // AddAssignees adds assignees to the list func (ap *Approvers) AddAssignees(logins ...string) { ap.assignees.Insert(logins...) } // GetCurrentApproversSet returns the set of approvers (login only) func (ap Approvers) GetCurrentApproversSet() sets.String { currentApprovers := sets.NewString() for approver := range ap.approvers { currentApprovers.Insert(approver) } return currentApprovers } // GetNoIssueApproversSet returns the set of "no-issue" approvers (login // only) func (ap Approvers) GetNoIssueApproversSet() sets.String { approvers := sets.NewString() for approver := range ap.NoIssueApprovers() { approvers.Insert(approver) } return approvers } // GetFilesApprovers returns a map from files -> list of current approvers. func (ap Approvers) GetFilesApprovers() map[string]sets.String { filesApprovers := map[string]sets.String{} currentApprovers := ap.GetCurrentApproversSet() for fn, potentialApprovers := range ap.owners.GetApprovers() { // The order of parameter matters here: // - currentApprovers is the list of github handle that have approved // - potentialApprovers is the list of handle in OWNERSa // files that can approve each file. // // We want to keep the syntax of the github handle // rather than the potential mis-cased username found in // the OWNERS file, that's why it's the first parameter. filesApprovers[fn] = IntersectSetsCase(currentApprovers, potentialApprovers) } return filesApprovers } // NoIssueApprovers returns the list of people who have "no-issue" // approved the pull-request. They are included in the list iff they can // approve one of the files. func (ap Approvers) NoIssueApprovers() map[string]Approval { nia := map[string]Approval{} reverseMap := ap.owners.GetReverseMap(ap.owners.GetApprovers()) for _, approver := range ap.approvers { if !approver.NoIssue { continue } if len(reverseMap[approver.Login]) == 0 { continue } nia[approver.Login] = approver } return nia } // UnapprovedFiles returns owners files that still need approval func (ap Approvers) UnapprovedFiles() sets.String { unapproved := sets.NewString() for fn, approvers := range ap.GetFilesApprovers() { if len(approvers) == 0 { unapproved.Insert(fn) } } return unapproved } // UnapprovedFiles returns owners files that still need approval func (ap Approvers) GetFiles(org, project string) []File { allOwnersFiles := []File{} filesApprovers := ap.GetFilesApprovers() for _, fn := range ap.owners.GetOwnersSet().List() { if len(filesApprovers[fn]) == 0 { allOwnersFiles = append(allOwnersFiles, UnapprovedFile{fn, org, project}) } else { allOwnersFiles = append(allOwnersFiles, ApprovedFile{fn, filesApprovers[fn], org, project}) } } return allOwnersFiles } // GetCCs gets the list of suggested approvers for a pull-request. It // now considers current assignees as potential approvers. Here is how // it works: // - We find suggested approvers from all potential approvers, but // remove those that are not useful considering current approvers and // assignees. This only uses leave approvers to find approvers the // closest to the changes. // - We find a subset of suggested approvers from from current // approvers, suggested approvers and assignees, but we remove thoses // that are not useful considering suggestd approvers and current // approvers. This uses the full approvers list, and will result in root // approvers to be suggested when they are assigned. // We return the union of the two sets: suggested and suggested // assignees. // The goal of this second step is to only keep the assignees that are // the most useful. func (ap Approvers) GetCCs() []string { randomizedApprovers := ap.owners.GetShuffledApprovers() currentApprovers := ap.GetCurrentApproversSet() approversAndAssignees := currentApprovers.Union(ap.assignees) leafReverseMap := ap.owners.GetReverseMap(ap.owners.GetLeafApprovers()) suggested := ap.owners.KeepCoveringApprovers(leafReverseMap, approversAndAssignees, randomizedApprovers) approversAndSuggested := currentApprovers.Union(suggested) everyone := approversAndSuggested.Union(ap.assignees) fullReverseMap := ap.owners.GetReverseMap(ap.owners.GetApprovers()) keepAssignees := ap.owners.KeepCoveringApprovers(fullReverseMap, approversAndSuggested, everyone.List()) return suggested.Union(keepAssignees).List() } // IsApproved returns a bool indicating whether or not the PR is approved func (ap Approvers) IsApproved() bool { return ap.UnapprovedFiles().Len() == 0 } // IsApprovedWithIssue verifies that the PR is approved, and has an // associated issue or a valid "no-issue" approver. func (ap Approvers) IsApprovedWithIssue() bool { return ap.IsApproved() && (ap.AssociatedIssue != 0 || len(ap.NoIssueApprovers()) != 0) } // ListApprovals returns the list of approvals func (ap Approvers) ListApprovals() []Approval { approvals := []Approval{} for _, approver := range ap.GetCurrentApproversSet().List() { approvals = append(approvals, ap.approvers[approver]) } return approvals } // ListNoIssueApprovals returns the list of "no-issue" approvals func (ap Approvers) ListNoIssueApprovals() []Approval { approvals := []Approval{} for _, approver := range ap.GetNoIssueApproversSet().List() { approvals = append(approvals, ap.approvers[approver]) } return approvals } type File interface { String() string } type ApprovedFile struct { filepath string approvers sets.String org string project string } type UnapprovedFile struct { filepath string org string project string } func (a ApprovedFile) String() string { fullOwnersPath := filepath.Join(a.filepath, ownersFileName) link := fmt.Sprintf("https://github.com/%s/%s/blob/master/%v", a.org, a.project, fullOwnersPath) return fmt.Sprintf("- ~~[%s](%s)~~ [%v]\n", fullOwnersPath, link, strings.Join(a.approvers.List(), ",")) } func (ua UnapprovedFile) String() string { fullOwnersPath := filepath.Join(ua.filepath, ownersFileName) link := fmt.Sprintf("https://github.com/%s/%s/blob/master/%v", ua.org, ua.project, fullOwnersPath) return fmt.Sprintf("- **[%s](%s)**\n", fullOwnersPath, link) } // GenerateTemplateOrFail takes a template, name and data, and generates // the corresping string. nil is returned if it fails. An error is // logged. func GenerateTemplateOrFail(templ, name string, data interface{}) *string { buf := bytes.NewBufferString("") if messageTempl, err := template.New(name).Parse(templ); err != nil { glog.Errorf("Failed to generate template for %s: %s", name, err) return nil } else if err := messageTempl.Execute(buf, data); err != nil { glog.Errorf("Failed to execute template for %s: %s", name, err) return nil } message := buf.String() return &message } // getMessage returns the comment body that we want the approval-handler to display on PRs // The comment shows: // - a list of approvers files (and links) needed to get the PR approved // - a list of approvers files with strikethroughs that already have an approver's approval // - a suggested list of people from each OWNERS files that can fully approve the PR // - how an approver can indicate their approval // - how an approver can cancel their approval func GetMessage(ap Approvers, org, project string) *string { message := GenerateTemplateOrFail(`This pull-request has been approved by: {{range $index, $approval := .ap.ListApprovals}}{{if $index}}, {{end}}{{$approval}}{{end}} {{- if not .ap.IsApproved}} We suggest the following additional approver{{if ne 1 (len .ap.GetCCs)}}s{{end}}: {{range $index, $cc := .ap.GetCCs}}{{if $index}}, {{end}}**{{$cc}}**{{end}} Assign the PR to them by writing `+"`/assign {{range $index, $cc := .ap.GetCCs}}{{if $index}} {{end}}@{{$cc}}{{end}}`"+` in a comment when ready. {{- end}} {{if .ap.AssociatedIssue -}} Associated issue: *{{.ap.AssociatedIssue}}* {{- else if len .ap.NoIssueApprovers -}} Associated issue requirement bypassed by: {{range $index, $approval := .ap.ListNoIssueApprovals}}{{if $index}}, {{end}}{{$approval}}{{end}} {{- else -}} *No associated issue*. Update pull-request body to add a reference to an issue, or get approval with `+"`/approve no-issue`"+` {{- end}} The full list of commands accepted by this bot can be found [here](https://github.com/kubernetes/test-infra/blob/master/commands.md). <details {{if not .ap.IsApproved}}open{{end}}> Needs approval from an approver in each of these OWNERS Files: {{range .ap.GetFiles .org .project}}{{.}}{{end}} You can indicate your approval by writing `+"`/approve`"+` in a comment You can cancel your approval by writing `+"`/approve cancel`"+` in a comment </details>`, "message", map[string]interface{}{"ap": ap, "org": org, "project": project}) *message += getGubernatorMetadata(ap.GetCCs()) title := GenerateTemplateOrFail("This PR is **{{if not .IsApprovedWithIssue}}NOT {{end}}APPROVED**", "title", ap) if title == nil || message == nil { return nil } notif := (&c.Notification{ApprovalNotificationName, *title, *message}).String() return &notif } // getGubernatorMetadata returns a JSON string with machine-readable information about approvers. // This MUST be kept in sync with gubernator/github/classifier.py, particularly get_approvers. func getGubernatorMetadata(toBeAssigned []string) string { bytes, err := json.Marshal(map[string][]string{"approvers": toBeAssigned}) if err == nil { return fmt.Sprintf("\n<!-- META=%s -->", bytes) } return "" }
{ "content_hash": "5294c3cb4a179aa164a57606d533e2c2", "timestamp": "", "source": "github", "line_count": 572, "max_line_length": 166, "avg_line_length": 32.44580419580419, "alnum_prop": 0.729457406110243, "repo_name": "mikedanese/test-infra", "id": "4db9490b8ba74a93fd12e656737548b65d17d074", "size": "19128", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mungegithub/mungers/approvers/owners.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "9356" }, { "name": "Go", "bytes": "1735510" }, { "name": "HTML", "bytes": "52444" }, { "name": "JavaScript", "bytes": "71009" }, { "name": "Makefile", "bytes": "36654" }, { "name": "Nginx", "bytes": "1533" }, { "name": "Protocol Buffer", "bytes": "6028" }, { "name": "Python", "bytes": "757053" }, { "name": "Roff", "bytes": "13936" }, { "name": "Shell", "bytes": "126742" } ], "symlink_target": "" }
<?php /** * A "safe" script module. No inline JS is allowed, and pointed to JS * files must match whitelist. */ class HTMLPurifier_HTMLModule_SafeScripting extends HTMLPurifier_HTMLModule { public $name = 'SafeScripting'; public function setup($config) { // These definitions are not intrinsically safe: the attribute transforms // are a vital part of ensuring safety. $allowed = $config->get('HTML.SafeScripting'); $script = $this->addElement( 'script', 'Inline', 'Empty', null, array( // While technically not required by the spec, we're forcing // it to this value. 'type' => 'Enum#text/javascript', 'src*' => new HTMLPurifier_AttrDef_Enum(array_keys($allowed)) ) ); $script->attr_transform_pre[] = $script->attr_transform_post[] = new HTMLPurifier_AttrTransform_ScriptRequired(); } } // vim: et sw=4 sts=4
{ "content_hash": "a2a48b0b2544f6910ab51f8c4238424e", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 89, "avg_line_length": 28.72972972972973, "alnum_prop": 0.5531514581373471, "repo_name": "acurrieclark/clocktower", "id": "68efc2bc2614fa5507a2e6238abd50f24730d0cd", "size": "1063", "binary": false, "copies": "73", "ref": "refs/heads/master", "path": "library/HTMLPurifier/HTMLPurifier/HTMLModule/SafeScripting.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "578" }, { "name": "CSS", "bytes": "6152" }, { "name": "JavaScript", "bytes": "4666" }, { "name": "PHP", "bytes": "1670277" }, { "name": "Shell", "bytes": "591" } ], "symlink_target": "" }
package archive // import "github.com/docker/docker/pkg/archive" import ( "archive/tar" "bufio" "bytes" "compress/bzip2" "compress/gzip" "context" "encoding/binary" "fmt" "io" "os" "path/filepath" "runtime" "strconv" "strings" "syscall" "time" "github.com/containerd/containerd/pkg/userns" "github.com/docker/docker/pkg/fileutils" "github.com/docker/docker/pkg/idtools" "github.com/docker/docker/pkg/ioutils" "github.com/docker/docker/pkg/pools" "github.com/docker/docker/pkg/system" "github.com/klauspost/compress/zstd" "github.com/pkg/errors" "github.com/sirupsen/logrus" exec "golang.org/x/sys/execabs" ) type ( // Compression is the state represents if compressed or not. Compression int // WhiteoutFormat is the format of whiteouts unpacked WhiteoutFormat int // TarOptions wraps the tar options. TarOptions struct { IncludeFiles []string ExcludePatterns []string Compression Compression NoLchown bool IDMap idtools.IdentityMapping ChownOpts *idtools.Identity IncludeSourceDir bool // WhiteoutFormat is the expected on disk format for whiteout files. // This format will be converted to the standard format on pack // and from the standard format on unpack. WhiteoutFormat WhiteoutFormat // When unpacking, specifies whether overwriting a directory with a // non-directory is allowed and vice versa. NoOverwriteDirNonDir bool // For each include when creating an archive, the included name will be // replaced with the matching name from this map. RebaseNames map[string]string InUserNS bool } ) // Archiver implements the Archiver interface and allows the reuse of most utility functions of // this package with a pluggable Untar function. Also, to facilitate the passing of specific id // mappings for untar, an Archiver can be created with maps which will then be passed to Untar operations. type Archiver struct { Untar func(io.Reader, string, *TarOptions) error IDMapping idtools.IdentityMapping } // NewDefaultArchiver returns a new Archiver without any IdentityMapping func NewDefaultArchiver() *Archiver { return &Archiver{Untar: Untar} } // breakoutError is used to differentiate errors related to breaking out // When testing archive breakout in the unit tests, this error is expected // in order for the test to pass. type breakoutError error const ( // Uncompressed represents the uncompressed. Uncompressed Compression = iota // Bzip2 is bzip2 compression algorithm. Bzip2 // Gzip is gzip compression algorithm. Gzip // Xz is xz compression algorithm. Xz // Zstd is zstd compression algorithm. Zstd ) const ( // AUFSWhiteoutFormat is the default format for whiteouts AUFSWhiteoutFormat WhiteoutFormat = iota // OverlayWhiteoutFormat formats whiteout according to the overlay // standard. OverlayWhiteoutFormat ) const ( modeISDIR = 040000 // Directory modeISFIFO = 010000 // FIFO modeISREG = 0100000 // Regular file modeISLNK = 0120000 // Symbolic link modeISBLK = 060000 // Block special file modeISCHR = 020000 // Character special file modeISSOCK = 0140000 // Socket ) // IsArchivePath checks if the (possibly compressed) file at the given path // starts with a tar file header. func IsArchivePath(path string) bool { file, err := os.Open(path) if err != nil { return false } defer file.Close() rdr, err := DecompressStream(file) if err != nil { return false } defer rdr.Close() r := tar.NewReader(rdr) _, err = r.Next() return err == nil } const ( zstdMagicSkippableStart = 0x184D2A50 zstdMagicSkippableMask = 0xFFFFFFF0 ) var ( bzip2Magic = []byte{0x42, 0x5A, 0x68} gzipMagic = []byte{0x1F, 0x8B, 0x08} xzMagic = []byte{0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00} zstdMagic = []byte{0x28, 0xb5, 0x2f, 0xfd} ) type matcher = func([]byte) bool func magicNumberMatcher(m []byte) matcher { return func(source []byte) bool { return bytes.HasPrefix(source, m) } } // zstdMatcher detects zstd compression algorithm. // Zstandard compressed data is made of one or more frames. // There are two frame formats defined by Zstandard: Zstandard frames and Skippable frames. // See https://tools.ietf.org/id/draft-kucherawy-dispatch-zstd-00.html#rfc.section.2 for more details. func zstdMatcher() matcher { return func(source []byte) bool { if bytes.HasPrefix(source, zstdMagic) { // Zstandard frame return true } // skippable frame if len(source) < 8 { return false } // magic number from 0x184D2A50 to 0x184D2A5F. if binary.LittleEndian.Uint32(source[:4])&zstdMagicSkippableMask == zstdMagicSkippableStart { return true } return false } } // DetectCompression detects the compression algorithm of the source. func DetectCompression(source []byte) Compression { compressionMap := map[Compression]matcher{ Bzip2: magicNumberMatcher(bzip2Magic), Gzip: magicNumberMatcher(gzipMagic), Xz: magicNumberMatcher(xzMagic), Zstd: zstdMatcher(), } for _, compression := range []Compression{Bzip2, Gzip, Xz, Zstd} { fn := compressionMap[compression] if fn(source) { return compression } } return Uncompressed } func xzDecompress(ctx context.Context, archive io.Reader) (io.ReadCloser, error) { args := []string{"xz", "-d", "-c", "-q"} return cmdStream(exec.CommandContext(ctx, args[0], args[1:]...), archive) } func gzDecompress(ctx context.Context, buf io.Reader) (io.ReadCloser, error) { if noPigzEnv := os.Getenv("MOBY_DISABLE_PIGZ"); noPigzEnv != "" { noPigz, err := strconv.ParseBool(noPigzEnv) if err != nil { logrus.WithError(err).Warn("invalid value in MOBY_DISABLE_PIGZ env var") } if noPigz { logrus.Debugf("Use of pigz is disabled due to MOBY_DISABLE_PIGZ=%s", noPigzEnv) return gzip.NewReader(buf) } } unpigzPath, err := exec.LookPath("unpigz") if err != nil { logrus.Debugf("unpigz binary not found, falling back to go gzip library") return gzip.NewReader(buf) } logrus.Debugf("Using %s to decompress", unpigzPath) return cmdStream(exec.CommandContext(ctx, unpigzPath, "-d", "-c"), buf) } func wrapReadCloser(readBuf io.ReadCloser, cancel context.CancelFunc) io.ReadCloser { return ioutils.NewReadCloserWrapper(readBuf, func() error { cancel() return readBuf.Close() }) } // DecompressStream decompresses the archive and returns a ReaderCloser with the decompressed archive. func DecompressStream(archive io.Reader) (io.ReadCloser, error) { p := pools.BufioReader32KPool buf := p.Get(archive) bs, err := buf.Peek(10) if err != nil && err != io.EOF { // Note: we'll ignore any io.EOF error because there are some odd // cases where the layer.tar file will be empty (zero bytes) and // that results in an io.EOF from the Peek() call. So, in those // cases we'll just treat it as a non-compressed stream and // that means just create an empty layer. // See Issue 18170 return nil, err } compression := DetectCompression(bs) switch compression { case Uncompressed: readBufWrapper := p.NewReadCloserWrapper(buf, buf) return readBufWrapper, nil case Gzip: ctx, cancel := context.WithCancel(context.Background()) gzReader, err := gzDecompress(ctx, buf) if err != nil { cancel() return nil, err } readBufWrapper := p.NewReadCloserWrapper(buf, gzReader) return wrapReadCloser(readBufWrapper, cancel), nil case Bzip2: bz2Reader := bzip2.NewReader(buf) readBufWrapper := p.NewReadCloserWrapper(buf, bz2Reader) return readBufWrapper, nil case Xz: ctx, cancel := context.WithCancel(context.Background()) xzReader, err := xzDecompress(ctx, buf) if err != nil { cancel() return nil, err } readBufWrapper := p.NewReadCloserWrapper(buf, xzReader) return wrapReadCloser(readBufWrapper, cancel), nil case Zstd: zstdReader, err := zstd.NewReader(buf) if err != nil { return nil, err } readBufWrapper := p.NewReadCloserWrapper(buf, zstdReader) return readBufWrapper, nil default: return nil, fmt.Errorf("Unsupported compression format %s", (&compression).Extension()) } } // CompressStream compresses the dest with specified compression algorithm. func CompressStream(dest io.Writer, compression Compression) (io.WriteCloser, error) { p := pools.BufioWriter32KPool buf := p.Get(dest) switch compression { case Uncompressed: writeBufWrapper := p.NewWriteCloserWrapper(buf, buf) return writeBufWrapper, nil case Gzip: gzWriter := gzip.NewWriter(dest) writeBufWrapper := p.NewWriteCloserWrapper(buf, gzWriter) return writeBufWrapper, nil case Bzip2, Xz: // archive/bzip2 does not support writing, and there is no xz support at all // However, this is not a problem as docker only currently generates gzipped tars return nil, fmt.Errorf("Unsupported compression format %s", (&compression).Extension()) default: return nil, fmt.Errorf("Unsupported compression format %s", (&compression).Extension()) } } // TarModifierFunc is a function that can be passed to ReplaceFileTarWrapper to // modify the contents or header of an entry in the archive. If the file already // exists in the archive the TarModifierFunc will be called with the Header and // a reader which will return the files content. If the file does not exist both // header and content will be nil. type TarModifierFunc func(path string, header *tar.Header, content io.Reader) (*tar.Header, []byte, error) // ReplaceFileTarWrapper converts inputTarStream to a new tar stream. Files in the // tar stream are modified if they match any of the keys in mods. func ReplaceFileTarWrapper(inputTarStream io.ReadCloser, mods map[string]TarModifierFunc) io.ReadCloser { pipeReader, pipeWriter := io.Pipe() go func() { tarReader := tar.NewReader(inputTarStream) tarWriter := tar.NewWriter(pipeWriter) defer inputTarStream.Close() defer tarWriter.Close() modify := func(name string, original *tar.Header, modifier TarModifierFunc, tarReader io.Reader) error { header, data, err := modifier(name, original, tarReader) switch { case err != nil: return err case header == nil: return nil } if header.Name == "" { header.Name = name } header.Size = int64(len(data)) if err := tarWriter.WriteHeader(header); err != nil { return err } if len(data) != 0 { if _, err := tarWriter.Write(data); err != nil { return err } } return nil } var err error var originalHeader *tar.Header for { originalHeader, err = tarReader.Next() if err == io.EOF { break } if err != nil { pipeWriter.CloseWithError(err) return } modifier, ok := mods[originalHeader.Name] if !ok { // No modifiers for this file, copy the header and data if err := tarWriter.WriteHeader(originalHeader); err != nil { pipeWriter.CloseWithError(err) return } if _, err := pools.Copy(tarWriter, tarReader); err != nil { pipeWriter.CloseWithError(err) return } continue } delete(mods, originalHeader.Name) if err := modify(originalHeader.Name, originalHeader, modifier, tarReader); err != nil { pipeWriter.CloseWithError(err) return } } // Apply the modifiers that haven't matched any files in the archive for name, modifier := range mods { if err := modify(name, nil, modifier, nil); err != nil { pipeWriter.CloseWithError(err) return } } pipeWriter.Close() }() return pipeReader } // Extension returns the extension of a file that uses the specified compression algorithm. func (compression *Compression) Extension() string { switch *compression { case Uncompressed: return "tar" case Bzip2: return "tar.bz2" case Gzip: return "tar.gz" case Xz: return "tar.xz" case Zstd: return "tar.zst" } return "" } // nosysFileInfo hides the system-dependent info of the wrapped FileInfo to // prevent tar.FileInfoHeader from introspecting it and potentially calling into // glibc. type nosysFileInfo struct { os.FileInfo } func (fi nosysFileInfo) Sys() interface{} { // A Sys value of type *tar.Header is safe as it is system-independent. // The tar.FileInfoHeader function copies the fields into the returned // header without performing any OS lookups. if sys, ok := fi.FileInfo.Sys().(*tar.Header); ok { return sys } return nil } // sysStat, if non-nil, populates hdr from system-dependent fields of fi. var sysStat func(fi os.FileInfo, hdr *tar.Header) error // FileInfoHeaderNoLookups creates a partially-populated tar.Header from fi. // // Compared to the archive/tar.FileInfoHeader function, this function is safe to // call from a chrooted process as it does not populate fields which would // require operating system lookups. It behaves identically to // tar.FileInfoHeader when fi is a FileInfo value returned from // tar.Header.FileInfo(). // // When fi is a FileInfo for a native file, such as returned from os.Stat() and // os.Lstat(), the returned Header value differs from one returned from // tar.FileInfoHeader in the following ways. The Uname and Gname fields are not // set as OS lookups would be required to populate them. The AccessTime and // ChangeTime fields are not currently set (not yet implemented) although that // is subject to change. Callers which require the AccessTime or ChangeTime // fields to be zeroed should explicitly zero them out in the returned Header // value to avoid any compatibility issues in the future. func FileInfoHeaderNoLookups(fi os.FileInfo, link string) (*tar.Header, error) { hdr, err := tar.FileInfoHeader(nosysFileInfo{fi}, link) if err != nil { return nil, err } if sysStat != nil { return hdr, sysStat(fi, hdr) } return hdr, nil } // FileInfoHeader creates a populated Header from fi. // // Compared to the archive/tar package, this function fills in less information // but is safe to call from a chrooted process. The AccessTime and ChangeTime // fields are not set in the returned header, ModTime is truncated to one-second // precision, and the Uname and Gname fields are only set when fi is a FileInfo // value returned from tar.Header.FileInfo(). Also, regardless of Go version, // this function fills file type bits (e.g. hdr.Mode |= modeISDIR), which have // been deleted since Go 1.9 archive/tar. func FileInfoHeader(name string, fi os.FileInfo, link string) (*tar.Header, error) { hdr, err := FileInfoHeaderNoLookups(fi, link) if err != nil { return nil, err } hdr.Format = tar.FormatPAX hdr.ModTime = hdr.ModTime.Truncate(time.Second) hdr.AccessTime = time.Time{} hdr.ChangeTime = time.Time{} hdr.Mode = fillGo18FileTypeBits(int64(chmodTarEntry(os.FileMode(hdr.Mode))), fi) hdr.Name = canonicalTarName(name, fi.IsDir()) return hdr, nil } // fillGo18FileTypeBits fills type bits which have been removed on Go 1.9 archive/tar // https://github.com/golang/go/commit/66b5a2f func fillGo18FileTypeBits(mode int64, fi os.FileInfo) int64 { fm := fi.Mode() switch { case fm.IsRegular(): mode |= modeISREG case fi.IsDir(): mode |= modeISDIR case fm&os.ModeSymlink != 0: mode |= modeISLNK case fm&os.ModeDevice != 0: if fm&os.ModeCharDevice != 0 { mode |= modeISCHR } else { mode |= modeISBLK } case fm&os.ModeNamedPipe != 0: mode |= modeISFIFO case fm&os.ModeSocket != 0: mode |= modeISSOCK } return mode } // ReadSecurityXattrToTarHeader reads security.capability xattr from filesystem // to a tar header func ReadSecurityXattrToTarHeader(path string, hdr *tar.Header) error { const ( // Values based on linux/include/uapi/linux/capability.h xattrCapsSz2 = 20 versionOffset = 3 vfsCapRevision2 = 2 vfsCapRevision3 = 3 ) capability, _ := system.Lgetxattr(path, "security.capability") if capability != nil { length := len(capability) if capability[versionOffset] == vfsCapRevision3 { // Convert VFS_CAP_REVISION_3 to VFS_CAP_REVISION_2 as root UID makes no // sense outside the user namespace the archive is built in. capability[versionOffset] = vfsCapRevision2 length = xattrCapsSz2 } hdr.Xattrs = make(map[string]string) hdr.Xattrs["security.capability"] = string(capability[:length]) } return nil } type tarWhiteoutConverter interface { ConvertWrite(*tar.Header, string, os.FileInfo) (*tar.Header, error) ConvertRead(*tar.Header, string) (bool, error) } type tarAppender struct { TarWriter *tar.Writer Buffer *bufio.Writer // for hardlink mapping SeenFiles map[uint64]string IdentityMapping idtools.IdentityMapping ChownOpts *idtools.Identity // For packing and unpacking whiteout files in the // non standard format. The whiteout files defined // by the AUFS standard are used as the tar whiteout // standard. WhiteoutConverter tarWhiteoutConverter } func newTarAppender(idMapping idtools.IdentityMapping, writer io.Writer, chownOpts *idtools.Identity) *tarAppender { return &tarAppender{ SeenFiles: make(map[uint64]string), TarWriter: tar.NewWriter(writer), Buffer: pools.BufioWriter32KPool.Get(nil), IdentityMapping: idMapping, ChownOpts: chownOpts, } } // canonicalTarName provides a platform-independent and consistent posix-style // path for files and directories to be archived regardless of the platform. func canonicalTarName(name string, isDir bool) string { name = CanonicalTarNameForPath(name) // suffix with '/' for directories if isDir && !strings.HasSuffix(name, "/") { name += "/" } return name } // addTarFile adds to the tar archive a file from `path` as `name` func (ta *tarAppender) addTarFile(path, name string) error { fi, err := os.Lstat(path) if err != nil { return err } var link string if fi.Mode()&os.ModeSymlink != 0 { var err error link, err = os.Readlink(path) if err != nil { return err } } hdr, err := FileInfoHeader(name, fi, link) if err != nil { return err } if err := ReadSecurityXattrToTarHeader(path, hdr); err != nil { return err } // if it's not a directory and has more than 1 link, // it's hard linked, so set the type flag accordingly if !fi.IsDir() && hasHardlinks(fi) { inode, err := getInodeFromStat(fi.Sys()) if err != nil { return err } // a link should have a name that it links too // and that linked name should be first in the tar archive if oldpath, ok := ta.SeenFiles[inode]; ok { hdr.Typeflag = tar.TypeLink hdr.Linkname = oldpath hdr.Size = 0 // This Must be here for the writer math to add up! } else { ta.SeenFiles[inode] = name } } // check whether the file is overlayfs whiteout // if yes, skip re-mapping container ID mappings. isOverlayWhiteout := fi.Mode()&os.ModeCharDevice != 0 && hdr.Devmajor == 0 && hdr.Devminor == 0 // handle re-mapping container ID mappings back to host ID mappings before // writing tar headers/files. We skip whiteout files because they were written // by the kernel and already have proper ownership relative to the host if !isOverlayWhiteout && !strings.HasPrefix(filepath.Base(hdr.Name), WhiteoutPrefix) && !ta.IdentityMapping.Empty() { fileIDPair, err := getFileUIDGID(fi.Sys()) if err != nil { return err } hdr.Uid, hdr.Gid, err = ta.IdentityMapping.ToContainer(fileIDPair) if err != nil { return err } } // explicitly override with ChownOpts if ta.ChownOpts != nil { hdr.Uid = ta.ChownOpts.UID hdr.Gid = ta.ChownOpts.GID } if ta.WhiteoutConverter != nil { wo, err := ta.WhiteoutConverter.ConvertWrite(hdr, path, fi) if err != nil { return err } // If a new whiteout file exists, write original hdr, then // replace hdr with wo to be written after. Whiteouts should // always be written after the original. Note the original // hdr may have been updated to be a whiteout with returning // a whiteout header if wo != nil { if err := ta.TarWriter.WriteHeader(hdr); err != nil { return err } if hdr.Typeflag == tar.TypeReg && hdr.Size > 0 { return fmt.Errorf("tar: cannot use whiteout for non-empty file") } hdr = wo } } if err := ta.TarWriter.WriteHeader(hdr); err != nil { return err } if hdr.Typeflag == tar.TypeReg && hdr.Size > 0 { // We use system.OpenSequential to ensure we use sequential file // access on Windows to avoid depleting the standby list. // On Linux, this equates to a regular os.Open. file, err := system.OpenSequential(path) if err != nil { return err } ta.Buffer.Reset(ta.TarWriter) defer ta.Buffer.Reset(nil) _, err = io.Copy(ta.Buffer, file) file.Close() if err != nil { return err } err = ta.Buffer.Flush() if err != nil { return err } } return nil } func createTarFile(path, extractDir string, hdr *tar.Header, reader io.Reader, Lchown bool, chownOpts *idtools.Identity, inUserns bool) error { // hdr.Mode is in linux format, which we can use for sycalls, // but for os.Foo() calls we need the mode converted to os.FileMode, // so use hdrInfo.Mode() (they differ for e.g. setuid bits) hdrInfo := hdr.FileInfo() switch hdr.Typeflag { case tar.TypeDir: // Create directory unless it exists as a directory already. // In that case we just want to merge the two if fi, err := os.Lstat(path); !(err == nil && fi.IsDir()) { if err := os.Mkdir(path, hdrInfo.Mode()); err != nil { return err } } case tar.TypeReg, tar.TypeRegA: // Source is regular file. We use system.OpenFileSequential to use sequential // file access to avoid depleting the standby list on Windows. // On Linux, this equates to a regular os.OpenFile file, err := system.OpenFileSequential(path, os.O_CREATE|os.O_WRONLY, hdrInfo.Mode()) if err != nil { return err } if _, err := io.Copy(file, reader); err != nil { file.Close() return err } file.Close() case tar.TypeBlock, tar.TypeChar: if inUserns { // cannot create devices in a userns return nil } // Handle this is an OS-specific way if err := handleTarTypeBlockCharFifo(hdr, path); err != nil { return err } case tar.TypeFifo: // Handle this is an OS-specific way if err := handleTarTypeBlockCharFifo(hdr, path); err != nil { return err } case tar.TypeLink: // #nosec G305 -- The target path is checked for path traversal. targetPath := filepath.Join(extractDir, hdr.Linkname) // check for hardlink breakout if !strings.HasPrefix(targetPath, extractDir) { return breakoutError(fmt.Errorf("invalid hardlink %q -> %q", targetPath, hdr.Linkname)) } if err := os.Link(targetPath, path); err != nil { return err } case tar.TypeSymlink: // path -> hdr.Linkname = targetPath // e.g. /extractDir/path/to/symlink -> ../2/file = /extractDir/path/2/file targetPath := filepath.Join(filepath.Dir(path), hdr.Linkname) // #nosec G305 -- The target path is checked for path traversal. // the reason we don't need to check symlinks in the path (with FollowSymlinkInScope) is because // that symlink would first have to be created, which would be caught earlier, at this very check: if !strings.HasPrefix(targetPath, extractDir) { return breakoutError(fmt.Errorf("invalid symlink %q -> %q", path, hdr.Linkname)) } if err := os.Symlink(hdr.Linkname, path); err != nil { return err } case tar.TypeXGlobalHeader: logrus.Debug("PAX Global Extended Headers found and ignored") return nil default: return fmt.Errorf("unhandled tar header type %d", hdr.Typeflag) } // Lchown is not supported on Windows. if Lchown && runtime.GOOS != "windows" { if chownOpts == nil { chownOpts = &idtools.Identity{UID: hdr.Uid, GID: hdr.Gid} } if err := os.Lchown(path, chownOpts.UID, chownOpts.GID); err != nil { msg := "failed to Lchown %q for UID %d, GID %d" if errors.Is(err, syscall.EINVAL) && userns.RunningInUserNS() { msg += " (try increasing the number of subordinate IDs in /etc/subuid and /etc/subgid)" } return errors.Wrapf(err, msg, path, hdr.Uid, hdr.Gid) } } var errors []string for key, value := range hdr.Xattrs { if err := system.Lsetxattr(path, key, []byte(value), 0); err != nil { if err == syscall.ENOTSUP || err == syscall.EPERM { // We ignore errors here because not all graphdrivers support // xattrs *cough* old versions of AUFS *cough*. However only // ENOTSUP should be emitted in that case, otherwise we still // bail. // EPERM occurs if modifying xattrs is not allowed. This can // happen when running in userns with restrictions (ChromeOS). errors = append(errors, err.Error()) continue } return err } } if len(errors) > 0 { logrus.WithFields(logrus.Fields{ "errors": errors, }).Warn("ignored xattrs in archive: underlying filesystem doesn't support them") } // There is no LChmod, so ignore mode for symlink. Also, this // must happen after chown, as that can modify the file mode if err := handleLChmod(hdr, path, hdrInfo); err != nil { return err } aTime := hdr.AccessTime if aTime.Before(hdr.ModTime) { // Last access time should never be before last modified time. aTime = hdr.ModTime } // system.Chtimes doesn't support a NOFOLLOW flag atm if hdr.Typeflag == tar.TypeLink { if fi, err := os.Lstat(hdr.Linkname); err == nil && (fi.Mode()&os.ModeSymlink == 0) { if err := system.Chtimes(path, aTime, hdr.ModTime); err != nil { return err } } } else if hdr.Typeflag != tar.TypeSymlink { if err := system.Chtimes(path, aTime, hdr.ModTime); err != nil { return err } } else { ts := []syscall.Timespec{timeToTimespec(aTime), timeToTimespec(hdr.ModTime)} if err := system.LUtimesNano(path, ts); err != nil && err != system.ErrNotSupportedPlatform { return err } } return nil } // Tar creates an archive from the directory at `path`, and returns it as a // stream of bytes. func Tar(path string, compression Compression) (io.ReadCloser, error) { return TarWithOptions(path, &TarOptions{Compression: compression}) } // TarWithOptions creates an archive from the directory at `path`, only including files whose relative // paths are included in `options.IncludeFiles` (if non-nil) or not in `options.ExcludePatterns`. func TarWithOptions(srcPath string, options *TarOptions) (io.ReadCloser, error) { // Fix the source path to work with long path names. This is a no-op // on platforms other than Windows. srcPath = fixVolumePathPrefix(srcPath) pm, err := fileutils.NewPatternMatcher(options.ExcludePatterns) if err != nil { return nil, err } pipeReader, pipeWriter := io.Pipe() compressWriter, err := CompressStream(pipeWriter, options.Compression) if err != nil { return nil, err } whiteoutConverter, err := getWhiteoutConverter(options.WhiteoutFormat, options.InUserNS) if err != nil { return nil, err } go func() { ta := newTarAppender( options.IDMap, compressWriter, options.ChownOpts, ) ta.WhiteoutConverter = whiteoutConverter defer func() { // Make sure to check the error on Close. if err := ta.TarWriter.Close(); err != nil { logrus.Errorf("Can't close tar writer: %s", err) } if err := compressWriter.Close(); err != nil { logrus.Errorf("Can't close compress writer: %s", err) } if err := pipeWriter.Close(); err != nil { logrus.Errorf("Can't close pipe writer: %s", err) } }() // this buffer is needed for the duration of this piped stream defer pools.BufioWriter32KPool.Put(ta.Buffer) // In general we log errors here but ignore them because // during e.g. a diff operation the container can continue // mutating the filesystem and we can see transient errors // from this stat, err := os.Lstat(srcPath) if err != nil { return } if !stat.IsDir() { // We can't later join a non-dir with any includes because the // 'walk' will error if "file/." is stat-ed and "file" is not a // directory. So, we must split the source path and use the // basename as the include. if len(options.IncludeFiles) > 0 { logrus.Warn("Tar: Can't archive a file with includes") } dir, base := SplitPathDirEntry(srcPath) srcPath = dir options.IncludeFiles = []string{base} } if len(options.IncludeFiles) == 0 { options.IncludeFiles = []string{"."} } seen := make(map[string]bool) for _, include := range options.IncludeFiles { rebaseName := options.RebaseNames[include] var ( parentMatchInfo []fileutils.MatchInfo parentDirs []string ) walkRoot := getWalkRoot(srcPath, include) filepath.Walk(walkRoot, func(filePath string, f os.FileInfo, err error) error { if err != nil { logrus.Errorf("Tar: Can't stat file %s to tar: %s", srcPath, err) return nil } relFilePath, err := filepath.Rel(srcPath, filePath) if err != nil || (!options.IncludeSourceDir && relFilePath == "." && f.IsDir()) { // Error getting relative path OR we are looking // at the source directory path. Skip in both situations. return nil } if options.IncludeSourceDir && include == "." && relFilePath != "." { relFilePath = strings.Join([]string{".", relFilePath}, string(filepath.Separator)) } skip := false // If "include" is an exact match for the current file // then even if there's an "excludePatterns" pattern that // matches it, don't skip it. IOW, assume an explicit 'include' // is asking for that file no matter what - which is true // for some files, like .dockerignore and Dockerfile (sometimes) if include != relFilePath { for len(parentDirs) != 0 { lastParentDir := parentDirs[len(parentDirs)-1] if strings.HasPrefix(relFilePath, lastParentDir+string(os.PathSeparator)) { break } parentDirs = parentDirs[:len(parentDirs)-1] parentMatchInfo = parentMatchInfo[:len(parentMatchInfo)-1] } var matchInfo fileutils.MatchInfo if len(parentMatchInfo) != 0 { skip, matchInfo, err = pm.MatchesUsingParentResults(relFilePath, parentMatchInfo[len(parentMatchInfo)-1]) } else { skip, matchInfo, err = pm.MatchesUsingParentResults(relFilePath, fileutils.MatchInfo{}) } if err != nil { logrus.Errorf("Error matching %s: %v", relFilePath, err) return err } if f.IsDir() { parentDirs = append(parentDirs, relFilePath) parentMatchInfo = append(parentMatchInfo, matchInfo) } } if skip { // If we want to skip this file and its a directory // then we should first check to see if there's an // excludes pattern (e.g. !dir/file) that starts with this // dir. If so then we can't skip this dir. // Its not a dir then so we can just return/skip. if !f.IsDir() { return nil } // No exceptions (!...) in patterns so just skip dir if !pm.Exclusions() { return filepath.SkipDir } dirSlash := relFilePath + string(filepath.Separator) for _, pat := range pm.Patterns() { if !pat.Exclusion() { continue } if strings.HasPrefix(pat.String()+string(filepath.Separator), dirSlash) { // found a match - so can't skip this dir return nil } } // No matching exclusion dir so just skip dir return filepath.SkipDir } if seen[relFilePath] { return nil } seen[relFilePath] = true // Rename the base resource. if rebaseName != "" { var replacement string if rebaseName != string(filepath.Separator) { // Special case the root directory to replace with an // empty string instead so that we don't end up with // double slashes in the paths. replacement = rebaseName } relFilePath = strings.Replace(relFilePath, include, replacement, 1) } if err := ta.addTarFile(filePath, relFilePath); err != nil { logrus.Errorf("Can't add file %s to tar: %s", filePath, err) // if pipe is broken, stop writing tar stream to it if err == io.ErrClosedPipe { return err } } return nil }) } }() return pipeReader, nil } // Unpack unpacks the decompressedArchive to dest with options. func Unpack(decompressedArchive io.Reader, dest string, options *TarOptions) error { tr := tar.NewReader(decompressedArchive) trBuf := pools.BufioReader32KPool.Get(nil) defer pools.BufioReader32KPool.Put(trBuf) var dirs []*tar.Header rootIDs := options.IDMap.RootPair() whiteoutConverter, err := getWhiteoutConverter(options.WhiteoutFormat, options.InUserNS) if err != nil { return err } // Iterate through the files in the archive. loop: for { hdr, err := tr.Next() if err == io.EOF { // end of tar archive break } if err != nil { return err } // ignore XGlobalHeader early to avoid creating parent directories for them if hdr.Typeflag == tar.TypeXGlobalHeader { logrus.Debugf("PAX Global Extended Headers found for %s and ignored", hdr.Name) continue } // Normalize name, for safety and for a simple is-root check // This keeps "../" as-is, but normalizes "/../" to "/". Or Windows: // This keeps "..\" as-is, but normalizes "\..\" to "\". hdr.Name = filepath.Clean(hdr.Name) for _, exclude := range options.ExcludePatterns { if strings.HasPrefix(hdr.Name, exclude) { continue loop } } // After calling filepath.Clean(hdr.Name) above, hdr.Name will now be in // the filepath format for the OS on which the daemon is running. Hence // the check for a slash-suffix MUST be done in an OS-agnostic way. if !strings.HasSuffix(hdr.Name, string(os.PathSeparator)) { // Not the root directory, ensure that the parent directory exists parent := filepath.Dir(hdr.Name) parentPath := filepath.Join(dest, parent) if _, err := os.Lstat(parentPath); err != nil && os.IsNotExist(err) { err = idtools.MkdirAllAndChownNew(parentPath, 0755, rootIDs) if err != nil { return err } } } // #nosec G305 -- The joined path is checked for path traversal. path := filepath.Join(dest, hdr.Name) rel, err := filepath.Rel(dest, path) if err != nil { return err } if strings.HasPrefix(rel, ".."+string(os.PathSeparator)) { return breakoutError(fmt.Errorf("%q is outside of %q", hdr.Name, dest)) } // If path exits we almost always just want to remove and replace it // The only exception is when it is a directory *and* the file from // the layer is also a directory. Then we want to merge them (i.e. // just apply the metadata from the layer). if fi, err := os.Lstat(path); err == nil { if options.NoOverwriteDirNonDir && fi.IsDir() && hdr.Typeflag != tar.TypeDir { // If NoOverwriteDirNonDir is true then we cannot replace // an existing directory with a non-directory from the archive. return fmt.Errorf("cannot overwrite directory %q with non-directory %q", path, dest) } if options.NoOverwriteDirNonDir && !fi.IsDir() && hdr.Typeflag == tar.TypeDir { // If NoOverwriteDirNonDir is true then we cannot replace // an existing non-directory with a directory from the archive. return fmt.Errorf("cannot overwrite non-directory %q with directory %q", path, dest) } if fi.IsDir() && hdr.Name == "." { continue } if !(fi.IsDir() && hdr.Typeflag == tar.TypeDir) { if err := os.RemoveAll(path); err != nil { return err } } } trBuf.Reset(tr) if err := remapIDs(options.IDMap, hdr); err != nil { return err } if whiteoutConverter != nil { writeFile, err := whiteoutConverter.ConvertRead(hdr, path) if err != nil { return err } if !writeFile { continue } } if err := createTarFile(path, dest, hdr, trBuf, !options.NoLchown, options.ChownOpts, options.InUserNS); err != nil { return err } // Directory mtimes must be handled at the end to avoid further // file creation in them to modify the directory mtime if hdr.Typeflag == tar.TypeDir { dirs = append(dirs, hdr) } } for _, hdr := range dirs { // #nosec G305 -- The header was checked for path traversal before it was appended to the dirs slice. path := filepath.Join(dest, hdr.Name) if err := system.Chtimes(path, hdr.AccessTime, hdr.ModTime); err != nil { return err } } return nil } // Untar reads a stream of bytes from `archive`, parses it as a tar archive, // and unpacks it into the directory at `dest`. // The archive may be compressed with one of the following algorithms: // identity (uncompressed), gzip, bzip2, xz. // // FIXME: specify behavior when target path exists vs. doesn't exist. func Untar(tarArchive io.Reader, dest string, options *TarOptions) error { return untarHandler(tarArchive, dest, options, true) } // UntarUncompressed reads a stream of bytes from `archive`, parses it as a tar archive, // and unpacks it into the directory at `dest`. // The archive must be an uncompressed stream. func UntarUncompressed(tarArchive io.Reader, dest string, options *TarOptions) error { return untarHandler(tarArchive, dest, options, false) } // Handler for teasing out the automatic decompression func untarHandler(tarArchive io.Reader, dest string, options *TarOptions, decompress bool) error { if tarArchive == nil { return fmt.Errorf("Empty archive") } dest = filepath.Clean(dest) if options == nil { options = &TarOptions{} } if options.ExcludePatterns == nil { options.ExcludePatterns = []string{} } r := tarArchive if decompress { decompressedArchive, err := DecompressStream(tarArchive) if err != nil { return err } defer decompressedArchive.Close() r = decompressedArchive } return Unpack(r, dest, options) } // TarUntar is a convenience function which calls Tar and Untar, with the output of one piped into the other. // If either Tar or Untar fails, TarUntar aborts and returns the error. func (archiver *Archiver) TarUntar(src, dst string) error { archive, err := TarWithOptions(src, &TarOptions{Compression: Uncompressed}) if err != nil { return err } defer archive.Close() options := &TarOptions{ IDMap: archiver.IDMapping, } return archiver.Untar(archive, dst, options) } // UntarPath untar a file from path to a destination, src is the source tar file path. func (archiver *Archiver) UntarPath(src, dst string) error { archive, err := os.Open(src) if err != nil { return err } defer archive.Close() options := &TarOptions{ IDMap: archiver.IDMapping, } return archiver.Untar(archive, dst, options) } // CopyWithTar creates a tar archive of filesystem path `src`, and // unpacks it at filesystem path `dst`. // The archive is streamed directly with fixed buffering and no // intermediary disk IO. func (archiver *Archiver) CopyWithTar(src, dst string) error { srcSt, err := os.Stat(src) if err != nil { return err } if !srcSt.IsDir() { return archiver.CopyFileWithTar(src, dst) } // if this Archiver is set up with ID mapping we need to create // the new destination directory with the remapped root UID/GID pair // as owner rootIDs := archiver.IDMapping.RootPair() // Create dst, copy src's content into it if err := idtools.MkdirAllAndChownNew(dst, 0755, rootIDs); err != nil { return err } return archiver.TarUntar(src, dst) } // CopyFileWithTar emulates the behavior of the 'cp' command-line // for a single file. It copies a regular file from path `src` to // path `dst`, and preserves all its metadata. func (archiver *Archiver) CopyFileWithTar(src, dst string) (err error) { srcSt, err := os.Stat(src) if err != nil { return err } if srcSt.IsDir() { return fmt.Errorf("Can't copy a directory") } // Clean up the trailing slash. This must be done in an operating // system specific manner. if dst[len(dst)-1] == os.PathSeparator { dst = filepath.Join(dst, filepath.Base(src)) } // Create the holding directory if necessary if err := system.MkdirAll(filepath.Dir(dst), 0700); err != nil { return err } r, w := io.Pipe() errC := make(chan error, 1) go func() { defer close(errC) errC <- func() error { defer w.Close() srcF, err := os.Open(src) if err != nil { return err } defer srcF.Close() hdr, err := FileInfoHeaderNoLookups(srcSt, "") if err != nil { return err } hdr.Format = tar.FormatPAX hdr.ModTime = hdr.ModTime.Truncate(time.Second) hdr.AccessTime = time.Time{} hdr.ChangeTime = time.Time{} hdr.Name = filepath.Base(dst) hdr.Mode = int64(chmodTarEntry(os.FileMode(hdr.Mode))) if err := remapIDs(archiver.IDMapping, hdr); err != nil { return err } tw := tar.NewWriter(w) defer tw.Close() if err := tw.WriteHeader(hdr); err != nil { return err } if _, err := io.Copy(tw, srcF); err != nil { return err } return nil }() }() defer func() { if er := <-errC; err == nil && er != nil { err = er } }() err = archiver.Untar(r, filepath.Dir(dst), nil) if err != nil { r.CloseWithError(err) } return err } // IdentityMapping returns the IdentityMapping of the archiver. func (archiver *Archiver) IdentityMapping() idtools.IdentityMapping { return archiver.IDMapping } func remapIDs(idMapping idtools.IdentityMapping, hdr *tar.Header) error { ids, err := idMapping.ToHost(idtools.Identity{UID: hdr.Uid, GID: hdr.Gid}) hdr.Uid, hdr.Gid = ids.UID, ids.GID return err } // cmdStream executes a command, and returns its stdout as a stream. // If the command fails to run or doesn't complete successfully, an error // will be returned, including anything written on stderr. func cmdStream(cmd *exec.Cmd, input io.Reader) (io.ReadCloser, error) { cmd.Stdin = input pipeR, pipeW := io.Pipe() cmd.Stdout = pipeW var errBuf bytes.Buffer cmd.Stderr = &errBuf // Run the command and return the pipe if err := cmd.Start(); err != nil { return nil, err } // Ensure the command has exited before we clean anything up done := make(chan struct{}) // Copy stdout to the returned pipe go func() { if err := cmd.Wait(); err != nil { pipeW.CloseWithError(fmt.Errorf("%s: %s", err, errBuf.String())) } else { pipeW.Close() } close(done) }() return ioutils.NewReadCloserWrapper(pipeR, func() error { // Close pipeR, and then wait for the command to complete before returning. We have to close pipeR first, as // cmd.Wait waits for any non-file stdout/stderr/stdin to close. err := pipeR.Close() <-done return err }), nil } // NewTempArchive reads the content of src into a temporary file, and returns the contents // of that file as an archive. The archive can only be read once - as soon as reading completes, // the file will be deleted. func NewTempArchive(src io.Reader, dir string) (*TempArchive, error) { f, err := os.CreateTemp(dir, "") if err != nil { return nil, err } if _, err := io.Copy(f, src); err != nil { return nil, err } if _, err := f.Seek(0, 0); err != nil { return nil, err } st, err := f.Stat() if err != nil { return nil, err } size := st.Size() return &TempArchive{File: f, Size: size}, nil } // TempArchive is a temporary archive. The archive can only be read once - as soon as reading completes, // the file will be deleted. type TempArchive struct { *os.File Size int64 // Pre-computed from Stat().Size() as a convenience read int64 closed bool } // Close closes the underlying file if it's still open, or does a no-op // to allow callers to try to close the TempArchive multiple times safely. func (archive *TempArchive) Close() error { if archive.closed { return nil } archive.closed = true return archive.File.Close() } func (archive *TempArchive) Read(data []byte) (int, error) { n, err := archive.File.Read(data) archive.read += int64(n) if err != nil || archive.read == archive.Size { archive.Close() os.Remove(archive.File.Name()) } return n, err }
{ "content_hash": "f14f6cf02c072e2450f375f47b7fb825", "timestamp": "", "source": "github", "line_count": 1447, "max_line_length": 143, "avg_line_length": 30.12854181064271, "alnum_prop": 0.6932975502339664, "repo_name": "nawawi/docker", "id": "fe8709305438e4c39dd474eb2036200b155d9d08", "size": "43596", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pkg/archive/archive.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "81" }, { "name": "C", "bytes": "4815" }, { "name": "Dockerfile", "bytes": "47177" }, { "name": "Go", "bytes": "9187250" }, { "name": "HCL", "bytes": "582" }, { "name": "Makefile", "bytes": "10222" }, { "name": "PowerShell", "bytes": "31031" }, { "name": "Python", "bytes": "7179" }, { "name": "Shell", "bytes": "181741" } ], "symlink_target": "" }
// Copyright 2020 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package p type T[_ any] int func f[_ any]() {} func g[_, _ any]() {} func _() { _ = f[T /* ERROR without instantiation */ ] _ = g[T /* ERROR without instantiation */ , T /* ERROR without instantiation */ ] }
{ "content_hash": "4b009a65d6a89837b6130466ca5b5f83", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 82, "avg_line_length": 24.8, "alnum_prop": 0.6397849462365591, "repo_name": "CAFxX/go", "id": "58d0f69f6503164328feb8990baa1d4bbf201c52", "size": "372", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "src/cmd/compile/internal/types2/testdata/fixedbugs/issue40684.go", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "2544097" }, { "name": "Awk", "bytes": "450" }, { "name": "Batchfile", "bytes": "8202" }, { "name": "C", "bytes": "107970" }, { "name": "C++", "bytes": "917" }, { "name": "Dockerfile", "bytes": "506" }, { "name": "Fortran", "bytes": "394" }, { "name": "Go", "bytes": "44135746" }, { "name": "HTML", "bytes": "2621340" }, { "name": "JavaScript", "bytes": "20486" }, { "name": "Makefile", "bytes": "748" }, { "name": "Perl", "bytes": "31365" }, { "name": "Python", "bytes": "15702" }, { "name": "Shell", "bytes": "54296" } ], "symlink_target": "" }
__author__ = 'yinjun' """ Definition of ListNode class ListNode(object): def __init__(self, val, next=None): self.val = val self.next = next """ class Solution: """ @param head: The first node of linked list. @param n: An integer. @return: The head of linked list. """ def removeNthFromEnd(self, head, n): # write your code here if head == None: return head pre = ListNode(0) pre.next = head p = pre l = self.nodelen(head) for i in range(l-n): if pre.next != None: pre = pre.next else: return head if pre.next!=None and pre.next.next!=None: pre.next = pre.next.next else: pre.next = None return p.next def nodelen(self, head): h = head l = 0 while h != None: l += 1 h = h.next return l
{ "content_hash": "b992920a5d3972e39b7c645d48189681", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 50, "avg_line_length": 19.46, "alnum_prop": 0.47687564234326824, "repo_name": "shootsoft/practice", "id": "1e92be259aaeb9790b5944419f10ffc6e332644e", "size": "973", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lintcode/NineChapters/06/remove-nth-node-from-end-of-list.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "722333" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>mathcomp-analysis: Black list 🏴‍☠️</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.5.1 / mathcomp-analysis - 0.3.6</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> mathcomp-analysis <small> 0.3.6 <span class="label label-default">Black list 🏴‍☠️</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-10-22 13:17:09 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-22 13:17:09 UTC)</em><p> <div class="alert alert-info" role="alert"> <p>This result is black-listed as it is considered as too hard to reproduce / to solve. If you find a way to fix this package, please make a pull-request to <a class="alert-link" href="https://github.com/coq/opam-coq-archive">github.com/coq/opam-coq-archive</a>. The list of black-listed packages is in <a class="alert-link" href="https://github.com/coq-bench/make-html/blob/master/black_list.rb">black_list.rb</a>.</p> </div> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-num base Num library distributed with the OCaml compiler base-ocamlbuild base OCamlbuild binary and libraries distributed with the OCaml compiler base-threads base base-unix base camlp5 7.14 Preprocessor-pretty-printer of OCaml conf-findutils 1 Virtual package relying on findutils conf-perl 2 Virtual package relying on perl coq 8.5.1 Formal proof management system num 0 The Num library for arbitrary-precision integer and rational arithmetic ocaml 4.02.3 The OCaml compiler (virtual package) ocaml-base-compiler 4.02.3 Official 4.02.3 release ocaml-config 1 OCaml Switch Configuration # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;Reynald Affeldt &lt;[email protected]&gt;&quot; homepage: &quot;https://github.com/math-comp/analysis&quot; dev-repo: &quot;git+https://github.com/math-comp/analysis.git&quot; bug-reports: &quot;https://github.com/math-comp/analysis/issues&quot; license: &quot;CECILL-C&quot; synopsis: &quot;An analysis library for mathematical components&quot; description: &quot;&quot;&quot; This repository contains an experimental library for real analysis for the Coq proof-assistant and using the Mathematical Components library.&quot;&quot;&quot; build: [make &quot;-j%{jobs}%&quot; ] install: [make &quot;install&quot;] depends: [ &quot;coq&quot; { (&gt;= &quot;8.11&quot; &amp; &lt; &quot;8.14~&quot;) | (= &quot;dev&quot;) } &quot;coq-mathcomp-ssreflect&quot; { (&gt;= &quot;1.12.0&quot; &amp; &lt; &quot;1.13~&quot;) | (= &quot;dev&quot;) } &quot;coq-mathcomp-fingroup&quot; { (&gt;= &quot;1.12.0&quot; &amp; &lt; &quot;1.13~&quot;) | (= &quot;dev&quot;) } &quot;coq-mathcomp-algebra&quot; { (&gt;= &quot;1.12.0&quot; &amp; &lt; &quot;1.13~&quot;) | (= &quot;dev&quot;) } &quot;coq-mathcomp-solvable&quot; { (&gt;= &quot;1.12.0&quot; &amp; &lt; &quot;1.13~&quot;) | (= &quot;dev&quot;) } &quot;coq-mathcomp-field&quot; { (&gt;= &quot;1.12.0&quot; &amp; &lt; &quot;1.13~&quot;) | (= &quot;dev&quot;) } &quot;coq-mathcomp-finmap&quot; { (&gt;= &quot;1.5.1&quot; &amp; &lt; &quot;1.6~&quot;) | (= &quot;dev&quot;) } &quot;coq-mathcomp-bigenough&quot; { (&gt;= &quot;1.0.0&quot;) } &quot;coq-hierarchy-builder&quot; { (&gt;= &quot;0.10.0&quot; &amp; &lt; &quot;1.1.0&quot;) | (= &quot;dev&quot;) } ] tags: [ &quot;keyword:analysis&quot; &quot;keyword:topology&quot; &quot;keyword:real numbers&quot; &quot;date:2021-03-04&quot; &quot;logpath:mathcomp.analysis&quot; ] authors: [ &quot;Reynald Affeldt&quot; &quot;Cyril Cohen&quot; &quot;Marie Kerjean&quot; &quot;Assia Mahboubi&quot; &quot;Damien Rouhling&quot; &quot;Kazuhiko Sakaguchi&quot; &quot;Pierre-Yves Strub&quot; ] url { http: &quot;https://github.com/math-comp/analysis/archive/0.3.6.tar.gz&quot; checksum: &quot;sha512=6f19f26bfdfeff6a9e56d27e278141c851cbbd5e584af523ef153b70b9326ab396ab8f582a56762e33a7053a4130b144feb4ba92a8acac62eb5e47193f3450a2&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-mathcomp-analysis.0.3.6 coq.8.5.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.5.1). The following dependencies couldn&#39;t be met: - coq-mathcomp-analysis -&gt; coq &gt;= dev -&gt; ocaml &gt;= 4.05.0 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-mathcomp-analysis.0.3.6</code></dd> <dt>Return code</dt> <dd>15360</dd> <dt>Output</dt> <dd><pre>The following actions will be performed: - remove coq 8.5.1 &lt;&gt;&lt;&gt; Processing actions &lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt;&lt;&gt; [WARNING] While removing coq.8.5.1: not removing files that changed since: - share/emacs/site-lisp/gallina.el - share/emacs/site-lisp/gallina-syntax.el - share/emacs/site-lisp/gallina-db.el - share/emacs/site-lisp/coqdoc.sty - share/emacs/site-lisp/coq-inferior.el - share/emacs/site-lisp/coq-font-lock.el - man/man1/gallina.1 - man/man1/coqwc.1 - man/man1/coqtop.opt.1 - man/man1/coqtop.byte.1 - man/man1/coqtop.1 - man/man1/coqmktop.1 - man/man1/coqide.1 - man/man1/coqdoc.1 - man/man1/coqdep.1 - man/man1/coqchk.1 - man/man1/coqc.1 - man/man1/coq_makefile.1 - man/man1/coq-tex.1 - bin/coqtop.byte [NOTE] While removing coq.8.5.1: not removing non-empty directories: - share/emacs/site-lisp -&gt; removed coq.8.5.1 Done. # Run eval $(opam env) to update the current shell environment [ERROR] Sorry, resolution of the request timed out. Try to specify a simpler request, use a different solver, or increase the allowed time by setting OPAMSOLVERTIMEOUT to a bigger value (currently, it is set to 600.0 seconds). </pre></dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "2af494aff6f793588e456bb5a530a67d", "timestamp": "", "source": "github", "line_count": 220, "max_line_length": 433, "avg_line_length": 46.877272727272725, "alnum_prop": 0.5661786095219625, "repo_name": "coq-bench/coq-bench.github.io", "id": "f4eb555ca6e5edde0dd4cccffb4490031b67efe4", "size": "10350", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.02.3-2.0.6/released/8.5.1/mathcomp-analysis/0.3.6.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
namespace blink { class WebAudioSourceProviderClient; } namespace media { class MediaLog; // WebAudioSourceProviderImpl is either one of two things (but not both): // - a connection between a RestartableAudioRendererSink (the |sink_|) passed in // constructor and an AudioRendererSink::RenderCallback passed on Initialize() // by means of an internal AudioRendererSink::RenderCallback. // - a connection between the said AudioRendererSink::RenderCallback and a // blink::WebAudioSourceProviderClient passed via setClient() (the |client_|), // again using the internal AudioRendererSink::RenderCallback. Blink calls // provideInput() periodically to fetch the appropriate data. // // In either case, the internal RenderCallback allows for delivering a copy of // the data if a listener is configured. WASPImpl is also a // RestartableAudioRendererSink itself in order to be controlled (Play(), // Pause() etc). // // All calls are protected by a lock. class MEDIA_BLINK_EXPORT WebAudioSourceProviderImpl : NON_EXPORTED_BASE(public blink::WebAudioSourceProvider), NON_EXPORTED_BASE(public SwitchableAudioRendererSink) { public: using CopyAudioCB = base::Callback<void(std::unique_ptr<AudioBus>, uint32_t frames_delayed, int sample_rate)>; WebAudioSourceProviderImpl(scoped_refptr<SwitchableAudioRendererSink> sink, scoped_refptr<MediaLog> media_log); // blink::WebAudioSourceProvider implementation. void setClient(blink::WebAudioSourceProviderClient* client) override; void provideInput(const blink::WebVector<float*>& audio_data, size_t number_of_frames) override; // RestartableAudioRendererSink implementation. void Initialize(const AudioParameters& params, RenderCallback* renderer) override; void Start() override; void Stop() override; void Play() override; void Pause() override; bool SetVolume(double volume) override; OutputDeviceInfo GetOutputDeviceInfo() override; bool CurrentThreadIsRenderingThread() override; void SwitchOutputDevice(const std::string& device_id, const url::Origin& security_origin, const OutputDeviceStatusCB& callback) override; // These methods allow a client to get a copy of the rendered audio. void SetCopyAudioCallback(const CopyAudioCB& callback); void ClearCopyAudioCallback(); int RenderForTesting(AudioBus* audio_bus); protected: virtual scoped_refptr<SwitchableAudioRendererSink> CreateFallbackSink(); ~WebAudioSourceProviderImpl() override; private: friend class WebAudioSourceProviderImplTest; // Calls setFormat() on |client_| from the Blink renderer thread. void OnSetFormat(); // Used to keep the volume across reconfigurations. double volume_; // Tracks the current playback state. enum PlaybackState { kStopped, kStarted, kPlaying }; PlaybackState state_; // Closure that calls OnSetFormat() on |client_| on the renderer thread. base::Closure set_format_cb_; // When set via setClient() it overrides |sink_| for consuming audio. blink::WebAudioSourceProviderClient* client_; // Where audio ends up unless overridden by |client_|. base::Lock sink_lock_; scoped_refptr<SwitchableAudioRendererSink> sink_; std::unique_ptr<AudioBus> bus_wrapper_; // An inner class acting as a T filter where actual data can be tapped. class TeeFilter; const std::unique_ptr<TeeFilter> tee_filter_; const scoped_refptr<MediaLog> media_log_; // NOTE: Weak pointers must be invalidated before all other member variables. base::WeakPtrFactory<WebAudioSourceProviderImpl> weak_factory_; DISALLOW_IMPLICIT_CONSTRUCTORS(WebAudioSourceProviderImpl); }; } // namespace media #endif // MEDIA_BLINK_WEBAUDIOSOURCEPROVIDER_IMPL_H_
{ "content_hash": "f795c95c6be59410e48b5c35fc2d3882", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 80, "avg_line_length": 38.84, "alnum_prop": 0.73043254376931, "repo_name": "google-ar/WebARonARCore", "id": "aa4359547d86cc900dc0018d13b936b290db97b8", "size": "4536", "binary": false, "copies": "3", "ref": "refs/heads/webarcore_57.0.2987.5", "path": "media/blink/webaudiosourceprovider_impl.h", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<html lang="en"> <head> <title>Extern - Using as</title> <meta http-equiv="Content-Type" content="text/html"> <meta name="description" content="Using as"> <meta name="generator" content="makeinfo 4.11"> <link title="Top" rel="start" href="index.html#Top"> <link rel="up" href="Pseudo-Ops.html#Pseudo-Ops" title="Pseudo Ops"> <link rel="prev" href="Exitm.html#Exitm" title="Exitm"> <link rel="next" href="Fail.html#Fail" title="Fail"> <link href="http://www.gnu.org/software/texinfo/" rel="generator-home" title="Texinfo Homepage"> <!-- This file documents the GNU Assembler "as". Copyright (C) 1991-2013 Free Software Foundation, Inc. Permission is granted to copy, distribute and/or modify this document under the terms of the GNU Free Documentation License, Version 1.3 or any later version published by the Free Software Foundation; with no Invariant Sections, with no Front-Cover Texts, and with no Back-Cover Texts. A copy of the license is included in the section entitled ``GNU Free Documentation License''. --> <meta http-equiv="Content-Style-Type" content="text/css"> <style type="text/css"><!-- pre.display { font-family:inherit } pre.format { font-family:inherit } pre.smalldisplay { font-family:inherit; font-size:smaller } pre.smallformat { font-family:inherit; font-size:smaller } pre.smallexample { font-size:smaller } pre.smalllisp { font-size:smaller } span.sc { font-variant:small-caps } span.roman { font-family:serif; font-weight:normal; } span.sansserif { font-family:sans-serif; font-weight:normal; } --></style> </head> <body> <div class="node"> <p> <a name="Extern"></a> Next:&nbsp;<a rel="next" accesskey="n" href="Fail.html#Fail">Fail</a>, Previous:&nbsp;<a rel="previous" accesskey="p" href="Exitm.html#Exitm">Exitm</a>, Up:&nbsp;<a rel="up" accesskey="u" href="Pseudo-Ops.html#Pseudo-Ops">Pseudo Ops</a> <hr> </div> <h3 class="section">7.51 <code>.extern</code></h3> <p><a name="index-g_t_0040code_007bextern_007d-directive-333"></a><code>.extern</code> is accepted in the source program&mdash;for compatibility with other assemblers&mdash;but it is ignored. <samp><span class="command">as</span></samp> treats all undefined symbols as external. </body></html>
{ "content_hash": "ec07eccb256cd7a90f9fc0ce9ed3b1cb", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 144, "avg_line_length": 40.527272727272724, "alnum_prop": 0.7160161507402423, "repo_name": "trfiladelfo/tdk", "id": "302c35d2a72462705e005df0abfbd22509c7017e", "size": "2229", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "gcc-arm-none-eabi/share/doc/gcc-arm-none-eabi/html/as.html/Extern.html", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "614531" }, { "name": "Batchfile", "bytes": "101839" }, { "name": "C", "bytes": "12540389" }, { "name": "C++", "bytes": "13332391" }, { "name": "CSS", "bytes": "140569" }, { "name": "HTML", "bytes": "23954553" }, { "name": "Logos", "bytes": "8877" }, { "name": "Makefile", "bytes": "129672" }, { "name": "Perl", "bytes": "9844" }, { "name": "Python", "bytes": "180880" }, { "name": "Scheme", "bytes": "3970" }, { "name": "Shell", "bytes": "10777" }, { "name": "Tcl", "bytes": "128365" }, { "name": "XC", "bytes": "8384" }, { "name": "XS", "bytes": "8334" }, { "name": "XSLT", "bytes": "221100" } ], "symlink_target": "" }
title: RDF Data Cube Vocabulary slug: rdf-data-cube-vocabulary description: <p>The standard provides a means to publish multi-dimensional data, such as statistics, on the web in such a way that it can be linked to related data sets and concepts using the W3C RDF (Resource Description Framework) standard. The model underpinning the Data Cube vocabulary is compatible with the cube model that underlies SDMX (Statistical Data and Metadata eXchange), an ISO standard for exchanging and sharing statistical data and metadata among organizations.</p> website: http://www.w3.org/TR/vocab-data-cube/ subjects: - general disciplines: - statistics sponsors: - name: http://www.w3.org url: http://www.w3.org - name: Government Linked Data Working Group url: http://www.w3.org/2011/gld/ layout: standard type: standard ---
{ "content_hash": "7100503bbd883a33c476f4f03a21b093", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 88, "avg_line_length": 37.68181818181818, "alnum_prop": 0.7720144752714113, "repo_name": "mtpain/metadata-directory", "id": "0a1a78582431f1dad4a834b82184d35dec1fe502", "size": "833", "binary": false, "copies": "2", "ref": "refs/heads/gh-pages", "path": "standards/rdf-data-cube-vocabulary.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "13804" }, { "name": "HTML", "bytes": "42392" }, { "name": "JavaScript", "bytes": "37269" }, { "name": "Ruby", "bytes": "3087" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "ee25476a0691065cb27184163fe26188", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "f245c0e04562317af3300dbdc6128ad3bc50b2f5", "size": "197", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Commelinales/Commelinaceae/Floscopa/Floscopa peruviana/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
require 'spec_helper' describe Sprig::ProcessNotifier do it_behaves_like "a logging entity" do subject { described_class.new } end describe "#success" do let(:seed) { double('Seed', success_log_text: 'I am a teapot.') } it "logs the seed's success message" do log_should_receive(:info, with: 'I am a teapot.') subject.success(seed) end end describe "#error" do let(:errors) { double('Errors', messages: 'error messages') } let(:seed_record) { double('Record', to_s: 'Seed Record', errors: errors) } let(:seed) { double('Seed', error_log_text: 'I am a teapot.', record: seed_record) } it "logs the seed's error message and error details" do log_should_receive(:error, with: 'I am a teapot.').ordered log_should_receive(:error, with: 'Seed Record').ordered log_should_receive(:error, with: 'error messages').ordered subject.error(seed) end end describe "#finished" do it "logs a complete message" do log_should_receive(:debug, with: 'Seeding complete.') subject.finished end context "when records are saved successfully" do let(:seed) { double('Seed', success_log_text: 'I am a teapot.') } before do subject.success(seed) end it "logs a summery of successful saves" do log_should_receive(:info, with: '1 seed successfully planted.') subject.finished end end context "when no records are saved successfully" do it "logs a summery of successful saves" do log_should_receive(:error, with: '0 seeds successfully planted.') subject.finished end end context "when there is an error saving a record" do let(:errors) { double('Errors', messages: 'error messages') } let(:seed_record) { double('Record', to_s: 'Seed Record', errors: errors) } let(:seed) { double('Seed', error_log_text: 'I am a teapot.', record: seed_record) } before do subject.error(seed) end it "logs a summary of errors" do log_should_receive(:error, with: '0 seeds successfully planted.').ordered log_should_receive(:error, with: "1 seed couldn't be planted:").ordered log_should_receive(:error, with: 'Seed Record').ordered log_should_receive(:error, with: "error messages\n").ordered subject.finished end end end describe "#in_progress" do let(:seed) { Sprig::Seed::Entry.new(Post, { title: "Hello World!", content: "Stuff", sprig_id: 1 }, {}) } it "logs an in-progress message" do log_should_receive(:debug, with: "Planting Post with sprig_id 1") subject.in_progress(seed) end end end
{ "content_hash": "1046c4d874741da098ab2c26036eb634", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 109, "avg_line_length": 30.033333333333335, "alnum_prop": 0.633370329263781, "repo_name": "ericbrooke/sprig", "id": "3dc037553e754c3386276cf971e14114a538a558", "size": "2703", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/lib/sprig/process_notifier_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "59472" } ], "symlink_target": "" }
from pages.internal_page import InternalPage from pages.blocks.film_form import FilmForm from selenium.webdriver.support.select import Select class CreateFilmPage(InternalPage): def __init__(self, driver, base_url): super(CreateFilmPage, self).__init__(driver, base_url) self.film_form = FilmForm(self.driver, self.base_url)
{ "content_hash": "6da6ff6052ddc09ad35513b337fc713c", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 62, "avg_line_length": 34.7, "alnum_prop": 0.7435158501440923, "repo_name": "katyast/Se-Python-17-Stoliarova", "id": "7c1edc400bc973a5d02c89c90407cb345e7caf28", "size": "347", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pages/create_film_page.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "14437" } ], "symlink_target": "" }
package com.greenpepper.phpsud.exceptions; import java.io.IOException; /** * <p>PHPTimeoutException class.</p> * * @author Bertrand Paquet * @version $Id: $Id */ public class PHPTimeoutException extends IOException { private static final long serialVersionUID = 6952107565934001419L; }
{ "content_hash": "f3bc260263999c93fc2e92cc1c614be0", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 67, "avg_line_length": 18.5625, "alnum_prop": 0.7508417508417509, "repo_name": "strator-dev/greenpepper-open", "id": "da2469f2625f5508b67b03bf6676ff1aba5f3f93", "size": "1209", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "extensions-external/php/src/main/java/com/greenpepper/phpsud/exceptions/PHPTimeoutException.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "678261" }, { "name": "Java", "bytes": "572376" }, { "name": "JavaScript", "bytes": "63878" }, { "name": "Shell", "bytes": "147" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "0bd285d1030e7212e5739a382f1a470b", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "ac97bd122f70bf50704a9c1420b88fdda10c1fac", "size": "187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Sphaerocephalus/Sphaerocephalus chamaecephalus/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
Contributors to repositories hosted in `openregister` are expected to follow the Contributor Covenant Code of Conduct, and those working within Government are also expected to follow the Civil Service Code ## Civil Service Code The [Civil Service Code](https://www.gov.uk/government/publications/civil-service-code/the-civil-service-code) ## Contributor Covenant Code of Conduct > Note: > * where the code of conduct says "project" we mean GDS, `openregister` and all repositories hosted within it. > * where the code of conduct says "maintainer" we mean `openregister` organisation owners > * where the code of conduct says "leadership" we mean both `openregister` organisation owners, line managers, and other leadership within GDS ### Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. ### Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ### Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ### Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ### Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [email protected]. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ### Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org
{ "content_hash": "f9d67541ff302e1c0b03dfef5d29a325", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 143, "avg_line_length": 45.77906976744186, "alnum_prop": 0.8138176276352552, "repo_name": "openregister/openregister-java", "id": "b5fd473383d2dd79e36fe0261b1d8ed02e696c92", "size": "3975", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "CODE_OF_CONDUCT.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "28218" }, { "name": "HTML", "bytes": "23329" }, { "name": "Java", "bytes": "920716" }, { "name": "JavaScript", "bytes": "1058" }, { "name": "PLpgSQL", "bytes": "816" }, { "name": "Python", "bytes": "907" }, { "name": "Shell", "bytes": "6476" }, { "name": "TSQL", "bytes": "51" } ], "symlink_target": "" }
/*****************************************************************************/ /* Include files */ /*****************************************************************************/ #include "simplelink.h" #ifndef __NETAPP_H__ #define __NETAPP_H__ #ifdef __cplusplus extern "C" { #endif /*! \addtogroup netapp @{ */ /*****************************************************************************/ /* Macro declarations */ /*****************************************************************************/ /*ERROR code*/ #define SL_ERROR_NETAPP_RX_BUFFER_LENGTH_ERROR (-230) /* Http Server interface */ #define MAX_INPUT_STRING (64) /* because of WPA */ #define MAX_AUTH_NAME_LEN (20) #define MAX_AUTH_PASSWORD_LEN (20) #define MAX_AUTH_REALM_LEN (20) #define MAX_DEVICE_URN_LEN (15+1) #define MAX_DOMAIN_NAME_LEN (24+1) #define MAX_ACTION_LEN (30) /* Important: in case the max len is changed, make sure the struct sl_NetAppHttpServerSendToken_t in protocol.h is padded correctly! */ #define MAX_TOKEN_NAME_LEN (20) #define MAX_TOKEN_VALUE_LEN MAX_INPUT_STRING #define NETAPP_MAX_SERVICE_TEXT_SIZE (256) #define NETAPP_MAX_SERVICE_NAME_SIZE (60) #define NETAPP_MAX_SERVICE_HOST_NAME_SIZE (64) /* Server Responses */ #define SL_NETAPP_RESPONSE_NONE (0) #define SL_NETAPP_HTTPSETTOKENVALUE (1) #define SL_NETAPP_FAMILY_MASK (0x80) /* mDNS types */ #define SL_NET_APP_MASK_IPP_TYPE_OF_SERVICE (0x00000001) #define SL_NET_APP_MASK_DEVICE_INFO_TYPE_OF_SERVICE (0x00000002) #define SL_NET_APP_MASK_HTTP_TYPE_OF_SERVICE (0x00000004) #define SL_NET_APP_MASK_HTTPS_TYPE_OF_SERVICE (0x00000008) #define SL_NET_APP_MASK_WORKSATION_TYPE_OF_SERVICE (0x00000010) #define SL_NET_APP_MASK_GUID_TYPE_OF_SERVICE (0x00000020) #define SL_NET_APP_MASK_H323_TYPE_OF_SERVICE (0x00000040) #define SL_NET_APP_MASK_NTP_TYPE_OF_SERVICE (0x00000080) #define SL_NET_APP_MASK_OBJECITVE_TYPE_OF_SERVICE (0x00000100) #define SL_NET_APP_MASK_RDP_TYPE_OF_SERVICE (0x00000200) #define SL_NET_APP_MASK_REMOTE_TYPE_OF_SERVICE (0x00000400) #define SL_NET_APP_MASK_RTSP_TYPE_OF_SERVICE (0x00000800) #define SL_NET_APP_MASK_SIP_TYPE_OF_SERVICE (0x00001000) #define SL_NET_APP_MASK_SMB_TYPE_OF_SERVICE (0x00002000) #define SL_NET_APP_MASK_SOAP_TYPE_OF_SERVICE (0x00004000) #define SL_NET_APP_MASK_SSH_TYPE_OF_SERVICE (0x00008000) #define SL_NET_APP_MASK_TELNET_TYPE_OF_SERVICE (0x00010000) #define SL_NET_APP_MASK_TFTP_TYPE_OF_SERVICE (0x00020000) #define SL_NET_APP_MASK_XMPP_CLIENT_TYPE_OF_SERVICE (0x00040000) #define SL_NET_APP_MASK_RAOP_TYPE_OF_SERVICE (0x00080000) #define SL_NET_APP_MASK_ALL_TYPE_OF_SERVICE (0xFFFFFFFF) /********************************************************************************************************/ /* sl_NetAppDnsGetHostByName error codes */ #define SL_NET_APP_DNS_QUERY_NO_RESPONSE (-159) /* DNS query failed, no response */ #define SL_NET_APP_DNS_ERROR (-160) /* DNS internal error */ #define SL_NET_APP_DNS_NO_SERVER (-161) /* No DNS server was specified */ #define SL_NET_APP_DNS_TIMEOUT (-162) /* DNS timeout occured */ #define SL_NET_APP_DNS_QUERY_FAILED (-163) /* DNS query failed; no DNS server sent an 'answer' */ #define SL_NET_APP_DNS_BAD_ADDRESS_ERROR (-164) /* Improperly formatted IPv4 or IPv6 address */ #define SL_NET_APP_DNS_SIZE_ERROR (-165) /* DNS destination size is too small */ #define SL_NET_APP_DNS_MALFORMED_PACKET (-166) /* Improperly formed or corrupted DNS packet received */ #define SL_NET_APP_DNS_BAD_ID_ERROR (-167) /* DNS packet from server does not match query ID */ #define SL_NET_APP_DNS_PARAM_ERROR (-168) /* Invalid non pointer input to API */ #define SL_NET_APP_DNS_SERVER_NOT_FOUND (-169) /* Server not found in Client list of DNS servers */ #define SL_NET_APP_DNS_PACKET_CREATE_ERROR (-170) /* Error creating DNS packet */ #define SL_NET_APP_DNS_EMPTY_DNS_SERVER_LIST (-171) /* DNS Client's list of DNS servers is empty */ #define SL_NET_APP_DNS_SERVER_AUTH_ERROR (-172) /* Server not able to authenticate answer/authority data*/ #define SL_NET_APP_DNS_ZERO_GATEWAY_IP_ADDRESS (-173) /* DNS Client IP instance has a zero gateway IP address */ #define SL_NET_APP_DNS_MISMATCHED_RESPONSE (-174) /* Server response type does not match the query request*/ #define SL_NET_APP_DNS_DUPLICATE_ENTRY (-175) /* Duplicate entry exists in DNS server table */ #define SL_NET_APP_DNS_RETRY_A_QUERY (-176) /* SOA status returned; web site only exists as IPv4 */ #define SL_NET_APP_DNS_NO_ANSWER (-177) /* No response for one-shot query */ #define SL_NET_APP_DNS_NO_KNOWN_ANSWER (-178) /* No known answer for query */ #define SL_NET_APP_DNS_NAME_MISMATCH (-179) /* Illegal service name according to the RFC */ #define SL_NET_APP_DNS_NOT_STARTED (-180) /* mDNS is not running */ #define SL_NET_APP_DNS_HOST_NAME_ERROR (-181) /* Host name error. Host name format is not allowed according to RFC 1033,1034,1035, 6763 */ #define SL_NET_APP_DNS_NO_MORE_ENTRIES (-182) /* No more entries be found. */ #define SL_NET_APP_DNS_MAX_SERVICES_ERROR (-200) /* Maximum advertise services are already configured */ #define SL_NET_APP_DNS_IDENTICAL_SERVICES_ERROR (-201) /* Trying to register a service that is already exists */ #define SL_NET_APP_DNS_NOT_EXISTED_SERVICE_ERROR (-203) /* Trying to delete service that does not existed */ #define SL_NET_APP_DNS_ERROR_SERVICE_NAME_ERROR (-204) /* Illegal service name according to the RFC */ #define SL_NET_APP_DNS_RX_PACKET_ALLOCATION_ERROR (-205) /* Retry request */ #define SL_NET_APP_DNS_BUFFER_SIZE_ERROR (-206) /* List size buffer is bigger than internally allowed in the NWP */ #define SL_NET_APP_DNS_NET_APP_SET_ERROR (-207) /* Illegal length of one of the mDNS Set functions */ #define SL_NET_APP_DNS_GET_SERVICE_LIST_FLAG_ERROR (-208) #define SL_NET_APP_DNS_NO_CONFIGURATION_ERROR (-209) /* Set Dev name error codes (NETAPP_SET_GET_DEV_CONF_OPT_DEVICE_URN) */ #define SL_ERROR_DEVICE_NAME_LEN_ERR (-117) #define SL_ERROR_DEVICE_NAME_INVALID (-118) /* Set domain name error codes (NETAPP_SET_GET_DEV_CONF_OPT_DOMAIN_NAME) */ #define SL_ERROR_DOMAIN_NAME_LEN_ERR (-119) #define SL_ERROR_DOMAIN_NAME_INVALID (-120) /********************************************************************************************************/ /* NetApp application IDs */ #define SL_NET_APP_HTTP_SERVER_ID (1) #define SL_NET_APP_DHCP_SERVER_ID (2) #define SL_NET_APP_MDNS_ID (4) /* #define SL_NET_APP_DNS_SERVER_ID (8) */ #define SL_NET_APP_DEVICE_CONFIG_ID (16) /* NetApp application set/get options */ #define NETAPP_SET_DHCP_SRV_BASIC_OPT (0) /* HTTP server set/get options */ #define NETAPP_SET_GET_HTTP_OPT_PORT_NUMBER (0) #define NETAPP_SET_GET_HTTP_OPT_AUTH_CHECK (1) #define NETAPP_SET_GET_HTTP_OPT_AUTH_NAME (2) #define NETAPP_SET_GET_HTTP_OPT_AUTH_PASSWORD (3) #define NETAPP_SET_GET_HTTP_OPT_AUTH_REALM (4) #define NETAPP_SET_GET_HTTP_OPT_ROM_PAGES_ACCESS (5) #define NETAPP_SET_GET_MDNS_CONT_QUERY_OPT (1) #define NETAPP_SET_GET_MDNS_QEVETN_MASK_OPT (2) #define NETAPP_SET_GET_MDNS_TIMING_PARAMS_OPT (3) /* DNS server set/get options */ #define NETAPP_SET_GET_DNS_OPT_DOMAIN_NAME (0) /* Device Config set/get options */ #define NETAPP_SET_GET_DEV_CONF_OPT_DEVICE_URN (0) #define NETAPP_SET_GET_DEV_CONF_OPT_DOMAIN_NAME (1) /*****************************************************************************/ /* Structure/Enum declarations */ /*****************************************************************************/ typedef struct { _u32 PacketsSent; _u32 PacketsReceived; _u16 MinRoundTime; _u16 MaxRoundTime; _u16 AvgRoundTime; _u32 TestTime; }SlPingReport_t; typedef struct { _u32 PingIntervalTime; /* delay between pings, in milliseconds */ _u16 PingSize; /* ping packet size in bytes */ _u16 PingRequestTimeout; /* timeout time for every ping in milliseconds */ _u32 TotalNumberOfAttempts; /* max number of ping requests. 0 - forever */ _u32 Flags; /* flag - 0 report only when finished, 1 - return response for every ping, 2 - stop after 1 successful ping. */ _u32 Ip; /* IPv4 address or IPv6 first 4 bytes */ _u32 Ip1OrPaadding; _u32 Ip2OrPaadding; _u32 Ip3OrPaadding; }SlPingStartCommand_t; typedef struct _slHttpServerString_t { _u8 len; _u8 *data; } slHttpServerString_t; typedef struct _slHttpServerData_t { _u8 value_len; _u8 name_len; _u8 *token_value; _u8 *token_name; } slHttpServerData_t; typedef struct _slHttpServerPostData_t { slHttpServerString_t action; slHttpServerString_t token_name; slHttpServerString_t token_value; }slHttpServerPostData_t; typedef union { slHttpServerString_t httpTokenName; /* SL_NETAPP_HTTPGETTOKENVALUE */ slHttpServerPostData_t httpPostData; /* SL_NETAPP_HTTPPOSTTOKENVALUE */ } SlHttpServerEventData_u; typedef union { slHttpServerString_t token_value; } SlHttpServerResponsedata_u; typedef struct { _u32 Event; SlHttpServerEventData_u EventData; }SlHttpServerEvent_t; typedef struct { _u32 Response; SlHttpServerResponsedata_u ResponseData; }SlHttpServerResponse_t; typedef struct { _u32 lease_time; _u32 ipv4_addr_start; _u32 ipv4_addr_last; }SlNetAppDhcpServerBasicOpt_t; /*mDNS parameters*/ typedef enum { SL_NET_APP_FULL_SERVICE_WITH_TEXT_IPV4_TYPE = 1, SL_NET_APP_FULL_SERVICE_IPV4_TYPE, SL_NET_APP_SHORT_SERVICE_IPV4_TYPE } SlNetAppGetServiceListType_e; typedef struct { _u32 service_ipv4; _u16 service_port; _u16 Reserved; }SlNetAppGetShortServiceIpv4List_t; typedef struct { _u32 service_ipv4; _u16 service_port; _u16 Reserved; _u8 service_name[NETAPP_MAX_SERVICE_NAME_SIZE]; _u8 service_host[NETAPP_MAX_SERVICE_HOST_NAME_SIZE]; }SlNetAppGetFullServiceIpv4List_t; typedef struct { _u32 service_ipv4; _u16 service_port; _u16 Reserved; _u8 service_name[NETAPP_MAX_SERVICE_NAME_SIZE]; _u8 service_host[NETAPP_MAX_SERVICE_HOST_NAME_SIZE]; _u8 service_text[NETAPP_MAX_SERVICE_TEXT_SIZE]; }SlNetAppGetFullServiceWithTextIpv4List_t; typedef struct { /*The below parameters are used to configure the advertise times and interval For example: If: Period is set to T Repetitions are set to P Telescopic factor is K=2 The transmission shall be: advertise P times wait T advertise P times wait 4 * T advertise P time wait 16 * T ... (till max time reached / configuration changed / query issued) */ _u32 t; /* Number of ticks for the initial period. Default is 100 ticks for 1 second. */ _u32 p; /* Number of repetitions. Default value is 1 */ _u32 k; /* Telescopic factor. Default value is 2. */ _u32 RetransInterval;/* Announcing retransmission interval */ _u32 Maxinterval; /* Announcing max period interval */ _u32 max_time; /* Announcing max time */ }SlNetAppServiceAdvertiseTimingParameters_t; /*****************************************************************************/ /* Types declarations */ /*****************************************************************************/ typedef void (*P_SL_DEV_PING_CALLBACK)(SlPingReport_t*); /*****************************************************************************/ /* Function prototypes */ /*****************************************************************************/ /*! \brief Starts a network application Gets and starts network application for the current WLAN mode \param[in] AppBitMap application bitmap, could be one or combination of the following: \n - SL_NET_APP_HTTP_SERVER_ID - SL_NET_APP_DHCP_SERVER_ID - SL_NET_APP_MDNS_ID \return On error, negative number is returned \sa Stop one or more the above started applications using sl_NetAppStop \note This command activates the application for the current WLAN mode (AP or STA) \warning \par Example: \code For example: Starting internal HTTP server + DHCP server: sl_NetAppStart(SL_NET_APP_HTTP_SERVER_ID | SL_NET_APP_DHCP_SERVER_ID) \endcode */ #if _SL_INCLUDE_FUNC(sl_NetAppStart) _i16 sl_NetAppStart(const _u32 AppBitMap); #endif /*! \brief Stops a network application Gets and stops network application for the current WLAN mode \param[in] AppBitMap application id, could be one of the following: \n - SL_NET_APP_HTTP_SERVER_ID - SL_NET_APP_DHCP_SERVER_ID - SL_NET_APP_MDNS_ID \return On error, negative number is returned \sa \note This command disables the application for the current active WLAN mode (AP or STA) \warning \par Example: \code For example: Stopping internal HTTP server: sl_NetAppStop(SL_NET_APP_HTTP_SERVER_ID); \endcode */ #if _SL_INCLUDE_FUNC(sl_NetAppStop) _i16 sl_NetAppStop(const _u32 AppBitMap); #endif /*! \brief Get host IP by name Obtain the IP Address of machine on network, by machine name. \param[in] hostname host name \param[in] usNameLen name length \param[out] out_ip_addr This parameter is filled in with host IP address. In case that host name is not resolved, out_ip_addr is zero. \param[in] family protocol family \return On success, 0 is returned. On error, negative is returned SL_POOL_IS_EMPTY may be return in case there are no resources in the system In this case try again later or increase MAX_CONCURRENT_ACTIONS Possible DNS error codes: - SL_NET_APP_DNS_QUERY_NO_RESPONSE - SL_NET_APP_DNS_NO_SERVER - SL_NET_APP_DNS_QUERY_FAILED - SL_NET_APP_DNS_MALFORMED_PACKET - SL_NET_APP_DNS_MISMATCHED_RESPONSE \sa \note Only one sl_NetAppDnsGetHostByName can be handled at a time. Calling this API while the same command is called from another thread, may result in one of the two scenarios: 1. The command will wait (internal) until the previous command finish, and then be executed. 2. There are not enough resources and POOL_IS_EMPTY error will return. In this case, MAX_CONCURRENT_ACTIONS can be increased (result in memory increase) or try again later to issue the command. \warning In case an IP address in a string format is set as input, without any prefix (e.g. "1.2.3.4") the device will not try to access the DNS and it will return the input address on the 'out_ip_addr' field \par Example: \code _u32 DestinationIP; sl_NetAppDnsGetHostByName("www.google.com", strlen("www.google.com"), &DestinationIP,SL_AF_INET); Addr.sin_family = SL_AF_INET; Addr.sin_port = sl_Htons(80); Addr.sin_addr.s_addr = sl_Htonl(DestinationIP); AddrSize = sizeof(SlSockAddrIn_t); SockID = sl_Socket(SL_AF_INET,SL_SOCK_STREAM, 0); \endcode */ #if _SL_INCLUDE_FUNC(sl_NetAppDnsGetHostByName) _i16 sl_NetAppDnsGetHostByName(_i8 * hostname,const _u16 usNameLen, _u32* out_ip_addr,const _u8 family ); #endif /*! \brief Return service attributes like IP address, port and text according to service name \par The user sets a service name Full/Part (see example below), and should get: - IP of service - The port of service - The text of service Hence it can make a connection to the specific service and use it. It is similar to get host by name method. It is done by a single shot query with PTR type on the service name. The command that is sent is from constant parameters and variables parameters. \param[in] pService Service name can be full or partial. \n Example for full service name: 1. PC1._ipp._tcp.local 2. PC2_server._ftp._tcp.local \n . Example for partial service name: 1. _ipp._tcp.local 2. _ftp._tcp.local \param[in] ServiceLen The length of the service name (in_pService). \param[in] Family IPv4 or IPv6 (SL_AF_INET , SL_AF_INET6). \param[out] pAddr Contains the IP address of the service. \param[out] pPort Contains the port of the service. \param[out] pTextLen Has 2 options. One as Input field and the other one as output: - Input: \n Contains the max length of the text that the user wants to get.\n It means that if the text len of service is bigger that its value than the text is cut to inout_TextLen value. - Output: \n Contain the length of the text that is returned. Can be full text or part of the text (see above). \param[out] pOut_pText Contains the text of the service full or partial \return On success, zero is returned SL_POOL_IS_EMPTY may be return in case there are no resources in the system In this case try again later or increase MAX_CONCURRENT_ACTIONS In case No service is found error SL_NET_APP_DNS_NO_ANSWER will be returned \note The returns attributes belongs to the first service found. There may be other services with the same service name that will response to the query. The results of these responses are saved in the peer cache of the Device and should be read by another API. Only one sl_NetAppDnsGetHostByService can be handled at a time. Calling this API while the same command is called from another thread, may result in one of the two scenarios: 1. The command will wait (internal) until the previous command finish, and then be executed. 2. There are not enough resources and SL_POOL_IS_EMPTY error will return. In this case, MAX_CONCURRENT_ACTIONS can be increased (result in memory increase) or try again later to issue the command. \warning Text length can be 120 bytes only */ #if _SL_INCLUDE_FUNC(sl_NetAppDnsGetHostByService) _i32 sl_NetAppDnsGetHostByService(_i8 *pServiceName, /* string containing all (or only part): name + subtype + service */ const _u8 ServiceLen, const _u8 Family, /* 4-IPv4 , 16-IPv6 */ _u32 pAddr[], _u32 *pPort, _u16 *pTextLen, /* in: max len , out: actual len */ _i8 *pText ); #endif /*! \brief Get service List Insert into out pBuffer a list of peer's services that are the NWP. The list is in a form of service struct. The user should chose the type of the service struct like: - Full service parameters with text. - Full service parameters. - Short service parameters (port and IP only) especially for tiny hosts. The different types of struct are made to give the Possibility to save memory in the host The user also chose how many max services to get and start point index NWP peer cache. For example: 1. Get max of 3 full services from index 0.Up to 3 full services from index 0 are inserted into pBuffer (services that are in indexes 0,1,2). 2. Get max of 4 full services from index 3.Up to 4 full services from index 3 are inserted into pBuffer (services that are in indexes 3,4,5,6). 3. Get max of 2 int services from index 6.Up to 2 int services from index 6 are inserted into pBuffer (services that are in indexes 6,7). See below - command parameters. \param[in] indexOffset - The start index in the peer cache that from it the first service is returned. \param[in] MaxServiceCount - The Max services that can be returned if existed or if not exceed the max index in the peer cache \param[in] Flags - an ENUM number that means which service struct to use (means which types of service to fill) - use SlNetAppGetFullServiceWithTextIpv4List_t - use SlNetAppGetFullServiceIpv4List_t - use SlNetAppGetShortServiceIpv4List_t \param[out] Buffer - The Services are inserted into this buffer. In the struct form according to the bit that is set in the Flags input parameter. \return ServiceFoundCount - The number of the services that were inserted into the buffer. zero means no service is found negative number means an error \sa sl_NetAppMDNSRegisterService \note \warning if the out pBuffer size is bigger than an RX packet(1480), than an error is returned because there is no place in the RX packet. The size is a multiply of MaxServiceCount and size of service struct(that is set according to flag value). */ #if _SL_INCLUDE_FUNC(sl_NetAppGetServiceList) _i16 sl_NetAppGetServiceList(const _u8 IndexOffest, const _u8 MaxServiceCount, const _u8 Flags, _i8 *pBuffer, const _u32 RxBufferLength ); #endif /*! \brief Unregister mDNS service This function deletes the mDNS service from the mDNS package and the database. The mDNS service that is to be unregistered is a service that the application no longer wishes to provide. \n The service name should be the full service name according to RFC of the DNS-SD - meaning the value in name field in the SRV answer. Examples for service names: 1. PC1._ipp._tcp.local 2. PC2_server._ftp._tcp.local \param[in] pServiceName Full service name. \n Example for service name: 1. PC1._ipp._tcp.local 2. PC2_server._ftp._tcp.local \param[in] ServiceLen The length of the service. \return On success, zero is returned \sa sl_NetAppMDNSRegisterService \note \warning The size of the service length should be smaller than 255. */ #if _SL_INCLUDE_FUNC(sl_NetAppMDNSUnRegisterService) _i16 sl_NetAppMDNSUnRegisterService(const _i8 *pServiceName,const _u8 ServiceNameLen); #endif /*! \brief Register a new mDNS service \par This function registers a new mDNS service to the mDNS package and the DB. This registered service is a service offered by the application. The service name should be full service name according to RFC of the DNS-SD - meaning the value in name field in the SRV answer. Example for service name: 1. PC1._ipp._tcp.local 2. PC2_server._ftp._tcp.local If the option is_unique is set, mDNS probes the service name to make sure it is unique before starting to announce the service on the network. Instance is the instance portion of the service name. \param[in] ServiceLen The length of the service. \param[in] TextLen The length of the service should be smaller than 64. \param[in] port The port on this target host port. \param[in] TTL The TTL of the service \param[in] Options bitwise parameters: \n - bit 0 - service is unique (means that the service needs to be unique) - bit 31 - for internal use if the service should be added or deleted (set means ADD). - bit 1-30 for future. \param[in] pServiceName The service name. Example for service name: \n 1. PC1._ipp._tcp.local 2. PC2_server._ftp._tcp.local \param[in] pText The description of the service. should be as mentioned in the RFC (according to type of the service IPP,FTP...) \return On success, zero is returned Possible error codes: - Maximum advertise services are already configured. Delete another existed service that is registered and then register again the new service - Trying to register a service that is already exists - Trying to delete service that does not existed - Illegal service name according to the RFC - Retry request - Illegal length of one of the mDNS Set functions - mDNS is not operational as the device has no IP.Connect the device to an AP to get an IP address. - mDNS parameters error - mDNS internal cache error - mDNS internal error - Adding a service is not allowed as it is already exist (duplicate service) - mDNS is not running - Host name error. Host name format is not allowed according to RFC 1033,1034,1035, 6763 - List size buffer is bigger than internally allowed in the NWP (API get service list), change the APIs’ parameters to decrease the size of the list \sa sl_NetAppMDNSUnRegisterService \warning 1) Temporary - there is an allocation on stack of internal buffer. Its size is NETAPP_MDNS_MAX_SERVICE_NAME_AND_TEXT_LENGTH. \n It means that the sum of the text length and service name length cannot be bigger than NETAPP_MDNS_MAX_SERVICE_NAME_AND_TEXT_LENGTH.\n If it is - An error is returned. \n 2) According to now from certain constraints the variables parameters are set in the attribute part (contain constant parameters) */ #if _SL_INCLUDE_FUNC(sl_NetAppMDNSRegisterService) _i16 sl_NetAppMDNSRegisterService( const _i8* pServiceName, const _u8 ServiceNameLen, const _i8* pText, const _u8 TextLen, const _u16 Port, const _u32 TTL, _u32 Options); #endif /*! \brief send ICMP ECHO_REQUEST to network hosts Ping uses the ICMP protocol's mandatory ECHO_REQUEST \param[in] pPingParams Pointer to the ping request structure: \n - if flags parameter is set to 0, ping will report back once all requested pings are done (as defined by TotalNumberOfAttempts). \n - if flags parameter is set to 1, ping will report back after every ping, for TotalNumberOfAttempts. - if flags parameter is set to 2, ping will stop after the first successful ping, and report back for the successful ping, as well as any preceding failed ones. For stopping an ongoing ping activity, set parameters IP address to 0 \param[in] family SL_AF_INET or SL_AF_INET6 \param[out] pReport Ping pReport \param[out] pCallback Callback function upon completion. If callback is NULL, the API is blocked until data arrives \return On success, zero is returned. On error, -1 is returned SL_POOL_IS_EMPTY may be return in case there are no resources in the system In this case try again later or increase MAX_CONCURRENT_ACTIONS \sa sl_NetAppPingReport \note Only one sl_NetAppPingStart can be handled at a time. Calling this API while the same command is called from another thread, may result in one of the two scenarios: 1. The command will wait (internal) until the previous command finish, and then be executed. 2. There are not enough resources and SL_POOL_IS_EMPTY error will return. In this case, MAX_CONCURRENT_ACTIONS can be increased (result in memory increase) or try again later to issue the command. \warning \par Example: \code An example of sending 20 ping requests and reporting results to a callback routine when all requests are sent: // callback routine void pingRes(SlPingReport_t* pReport) { // handle ping results } // ping activation void PingTest() { SlPingReport_t report; SlPingStartCommand_t pingCommand; pingCommand.Ip = SL_IPV4_VAL(10,1,1,200); // destination IP address is 10.1.1.200 pingCommand.PingSize = 150; // size of ping, in bytes pingCommand.PingIntervalTime = 100; // delay between pings, in milliseconds pingCommand.PingRequestTimeout = 1000; // timeout for every ping in milliseconds pingCommand.TotalNumberOfAttempts = 20; // max number of ping requests. 0 - forever pingCommand.Flags = 0; // report only when finished sl_NetAppPingStart( &pingCommand, SL_AF_INET, &report, pingRes ) ; } \endcode */ #if _SL_INCLUDE_FUNC(sl_NetAppPingStart) _i16 sl_NetAppPingStart(const SlPingStartCommand_t* pPingParams,const _u8 family,SlPingReport_t *pReport,const P_SL_DEV_PING_CALLBACK pPingCallback); #endif /*! \brief Internal function for setting network application configurations \return On success, zero is returned. On error, -1 is returned \param[in] AppId Application id, could be one of the following: \n - SL_NET_APP_HTTP_SERVER_ID - SL_NET_APP_DHCP_SERVER_ID - SL_NET_APP_MDNS_ID - SL_NET_APP_DEVICE_CONFIG_ID \param[in] SetOptions set option, could be one of the following: \n - SL_NET_APP_DHCP_SERVER_ID - NETAPP_SET_DHCP_SRV_BASIC_OPT - SL_NET_APP_HTTP_SERVER_ID - NETAPP_SET_GET_HTTP_OPT_PORT_NUMBER - NETAPP_SET_GET_HTTP_OPT_AUTH_CHECK - NETAPP_SET_GET_HTTP_OPT_AUTH_NAME - NETAPP_SET_GET_HTTP_OPT_AUTH_PASSWORD - NETAPP_SET_GET_HTTP_OPT_AUTH_REALM - NETAPP_SET_GET_HTTP_OPT_ROM_PAGES_ACCESS - SL_NET_APP_MDNS_ID - NETAPP_SET_GET_MDNS_CONT_QUERY_OPT - NETAPP_SET_GET_MDNS_QEVETN_MASK_OPT - NETAPP_SET_GET_MDNS_TIMING_PARAMS_OPT - SL_NET_APP_DEVICE_CONFIG_ID - NETAPP_SET_GET_DEV_CONF_OPT_DEVICE_URN - NETAPP_SET_GET_DEV_CONF_OPT_DOMAIN_NAME \param[in] OptionLen option structure length \param[in] pOptionValues pointer to the option structure \sa \note \warning \par \code Set DHCP Server (AP mode) parameters example: SlNetAppDhcpServerBasicOpt_t dhcpParams; _u8 outLen = sizeof(SlNetAppDhcpServerBasicOpt_t); dhcpParams.lease_time = 4096; // lease time (in seconds) of the IP Address dhcpParams.ipv4_addr_start = SL_IPV4_VAL(192,168,1,10); // first IP Address for allocation. IP Address should be set as Hex number - i.e. 0A0B0C01 for (10.11.12.1) dhcpParams.ipv4_addr_last = SL_IPV4_VAL(192,168,1,16); // last IP Address for allocation. IP Address should be set as Hex number - i.e. 0A0B0C01 for (10.11.12.1) sl_NetAppStop(SL_NET_APP_DHCP_SERVER_ID); // Stop DHCP server before settings sl_NetAppSet(SL_NET_APP_DHCP_SERVER_ID, NETAPP_SET_DHCP_SRV_BASIC_OPT, outLen, (_u8* )&dhcpParams); // set parameters sl_NetAppStart(SL_NET_APP_DHCP_SERVER_ID); // Start DHCP server with new settings \endcode \code Set Device URN name example: Device name, maximum length of 33 characters Device name affects URN name, own SSID name in AP mode, and WPS file "device name" in WPS I.E (STA-WPS / P2P) In case no device URN name set, the default name is "mysimplelink" Allowed characters in device name are: 'a - z' , 'A - Z' , '0-9' and '-' _u8 *my_device = "MY-SIMPLELINK-DEV"; sl_NetAppSet (SL_NET_APP_DEVICE_CONFIG_ID, NETAPP_SET_GET_DEV_CONF_OPT_DEVICE_URN, strlen(my_device), (_u8 *) my_device); \endcode */ #if _SL_INCLUDE_FUNC(sl_NetAppSet) _i32 sl_NetAppSet(const _u8 AppId ,const _u8 Option,const _u8 OptionLen,const _u8 *pOptionValue); #endif /*! \brief Internal function for getting network applications configurations \return On success, zero is returned. On error, -1 is returned \param[in] AppId Application id, could be one of the following: \n - SL_NET_APP_HTTP_SERVER_ID - SL_NET_APP_DHCP_SERVER_ID - SL_NET_APP_MDNS_ID - SL_NET_APP_DEVICE_CONFIG_ID \param[in] SetOptions set option, could be one of the following: \n - SL_NET_APP_DHCP_SERVER_ID - NETAPP_SET_DHCP_SRV_BASIC_OPT - SL_NET_APP_HTTP_SERVER_ID - NETAPP_SET_GET_HTTP_OPT_PORT_NUMBER - NETAPP_SET_GET_HTTP_OPT_AUTH_CHECK - NETAPP_SET_GET_HTTP_OPT_AUTH_NAME - NETAPP_SET_GET_HTTP_OPT_AUTH_PASSWORD - NETAPP_SET_GET_HTTP_OPT_AUTH_REALM - NETAPP_SET_GET_HTTP_OPT_ROM_PAGES_ACCESS - SL_NET_APP_MDNS_ID - NETAPP_SET_GET_MDNS_CONT_QUERY_OPT - NETAPP_SET_GET_MDNS_QEVETN_MASK_OPT - NETAPP_SET_GET_MDNS_TIMING_PARAMS_OPT - SL_NET_APP_DEVICE_CONFIG_ID - NETAPP_SET_GET_DEV_CONF_OPT_DEVICE_URN - NETAPP_SET_GET_DEV_CONF_OPT_DOMAIN_NAME \param[in] OptionLen The length of the allocated memory as input, when the function complete, the value of this parameter would be the len that actually read from the device. If the device return length that is longer from the input value, the function will cut the end of the returned structure and will return ESMALLBUF \param[out] pValues pointer to the option structure which will be filled with the response from the device \sa \note \warning \par \code Get DHCP Server parameters example: SlNetAppDhcpServerBasicOpt_t dhcpParams; _u8 outLen = sizeof(SlNetAppDhcpServerBasicOpt_t); sl_NetAppGet(SL_NET_APP_DHCP_SERVER_ID, NETAPP_SET_DHCP_SRV_BASIC_OPT, &outLen, (_u8* )&dhcpParams); printf("DHCP Start IP %d.%d.%d.%d End IP %d.%d.%d.%d Lease time seconds %d\n", SL_IPV4_BYTE(dhcpParams.ipv4_addr_start,3),SL_IPV4_BYTE(dhcpParams.ipv4_addr_start,2), SL_IPV4_BYTE(dhcpParams.ipv4_addr_start,1),SL_IPV4_BYTE(dhcpParams.ipv4_addr_start,0), SL_IPV4_BYTE(dhcpParams.ipv4_addr_last,3),SL_IPV4_BYTE(dhcpParams.ipv4_addr_last,2), SL_IPV4_BYTE(dhcpParams.ipv4_addr_last,1),SL_IPV4_BYTE(dhcpParams.ipv4_addr_last,0), dhcpParams.lease_time); \endcode \code Get Device URN name example: Maximum length of 33 characters of device name. Device name affects URN name, own SSID name in AP mode, and WPS file "device name" in WPS I.E (STA-WPS / P2P) in case no device URN name set, the default name is "mysimplelink" _u8 my_device_name[35]; sl_NetAppGet (SL_NET_APP_DEVICE_CONFIG_ID, NETAPP_SET_GET_DEV_CONF_OPT_DEVICE_URN, strlen(my_device_name), (_u8 *)my_device_name); \endcode */ #if _SL_INCLUDE_FUNC(sl_NetAppGet) _i32 sl_NetAppGet(const _u8 AppId,const _u8 Option,_u8 *pOptionLen, _u8 *pOptionValue); #endif /*! Close the Doxygen group. @} */ #ifdef __cplusplus } #endif /* __cplusplus */ #endif /* __NETAPP_H__ */
{ "content_hash": "1b32a73ebed62d06e81ad9b39e82bf6d", "timestamp": "", "source": "github", "line_count": 851, "max_line_length": 194, "avg_line_length": 49.3454759106933, "alnum_prop": 0.5396375586407258, "repo_name": "runchip/zephyr-cc3200", "id": "3169213bde24595cd444fc79c926d70ba87926f1", "size": "43702", "binary": false, "copies": "1", "ref": "refs/heads/cc3200", "path": "ext/hal/ti/cc3200sdk/simplelink/include/netapp.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "320924" }, { "name": "C", "bytes": "165097093" }, { "name": "C++", "bytes": "2117097" }, { "name": "Lex", "bytes": "11299" }, { "name": "Makefile", "bytes": "174806" }, { "name": "Objective-C", "bytes": "446398" }, { "name": "Perl", "bytes": "269211" }, { "name": "Python", "bytes": "181537" }, { "name": "Shell", "bytes": "48080" }, { "name": "Verilog", "bytes": "1449" }, { "name": "Yacc", "bytes": "15396" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <parent> <artifactId>base-smart</artifactId> <groupId>com.si.jupiter.smart</groupId> <version>1.0.0-SNAPSHOT</version> </parent> <modelVersion>4.0.0</modelVersion> <artifactId>smart-new-core</artifactId> <properties> <kotlin.version>1.1.2-3</kotlin.version> </properties> <dependencies> <dependency> <groupId>org.jetbrains.kotlin</groupId> <artifactId>kotlin-stdlib-jre8</artifactId> <version>${kotlin.version}</version> </dependency> <dependency> <groupId>org.jetbrains.kotlin</groupId> <artifactId>kotlin-test</artifactId> <version>${kotlin.version}</version> <scope>test</scope> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.jetbrains.kotlin</groupId> <artifactId>kotlin-maven-plugin</artifactId> <version>${kotlin.version}</version> <executions> <execution> <id>compile</id> <phase>compile</phase> <goals> <goal>compile</goal> </goals> </execution> <execution> <id>test-compile</id> <phase>test-compile</phase> <goals> <goal>test-compile</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <executions> <execution> <id>compile</id> <phase>compile</phase> <goals> <goal>compile</goal> </goals> </execution> <execution> <id>testCompile</id> <phase>test-compile</phase> <goals> <goal>testCompile</goal> </goals> </execution> </executions> </plugin> </plugins> </build> </project>
{ "content_hash": "df6db822f4e43aa67e4e958af1d44ed7", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 108, "avg_line_length": 34.49367088607595, "alnum_prop": 0.44587155963302755, "repo_name": "davidxiaozhi/smart", "id": "68d656095783969d85c52474f4aeb62103a07ed4", "size": "2725", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "smart-new-core/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "483371" } ], "symlink_target": "" }
package org.wso2.carbon.identity.user.functionality.mgt.internal; import org.wso2.carbon.user.core.service.RealmService; import javax.sql.DataSource; /** * A class to keep the data of the user functionality manager component. */ public class UserFunctionalityManagerComponentDataHolder { private static UserFunctionalityManagerComponentDataHolder instance = new UserFunctionalityManagerComponentDataHolder(); private DataSource dataSource; private RealmService realmService; public static UserFunctionalityManagerComponentDataHolder getInstance() { return instance; } public DataSource getDataSource() { return dataSource; } public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } public RealmService getRealmService() { return realmService; } public void setRealmService(RealmService realmService) { this.realmService = realmService; } }
{ "content_hash": "9e5287f1a4b645b1d84f38fed5454a07", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 77, "avg_line_length": 22.976744186046513, "alnum_prop": 0.7338056680161943, "repo_name": "omindu/carbon-identity-framework", "id": "c662c75f54e321921dbb2d477c98ce86234bd536", "size": "1658", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "components/user-functionality-mgt/org.wso2.carbon.identity.user.functionality.mgt/src/main/java/org/wso2/carbon/identity/user/functionality/mgt/internal/UserFunctionalityManagerComponentDataHolder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "266936" }, { "name": "HTML", "bytes": "30619" }, { "name": "Java", "bytes": "14075225" }, { "name": "JavaScript", "bytes": "1244664" }, { "name": "PLSQL", "bytes": "167077" }, { "name": "PLpgSQL", "bytes": "133284" }, { "name": "Shell", "bytes": "8422" }, { "name": "TSQL", "bytes": "295179" }, { "name": "Thrift", "bytes": "1513" }, { "name": "XSLT", "bytes": "1030" } ], "symlink_target": "" }
import loader from './loader'; import util from './util'; export default function (filepath) { let config = util.getConfig(); if (config.eslint) { const compiler = loader.load('wepy-eslint'); if (!compiler) { util.warning('未安装wepy-eslint,执行npm install wepy-eslint --save-dev 或者在wepy.config.js中关闭eslint选项'); return; } // 使用 eslint const esConfig = Object.assign({ useEslintrc: true, extensions: ['.js', config.wpyExt || '.wpy'] }, config.eslint === true ? {} : config.eslint); compiler(esConfig, filepath); } };
{ "content_hash": "80dbafb289329e05246c063b6a3275ff", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 109, "avg_line_length": 30, "alnum_prop": 0.5714285714285714, "repo_name": "TuhuFE/wepy", "id": "d01b2ab325ba24dee039cf790f01739947218a84", "size": "662", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/wepy-cli/src/eslint.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "11411" }, { "name": "HTML", "bytes": "598" }, { "name": "JavaScript", "bytes": "380354" }, { "name": "Shell", "bytes": "4506" }, { "name": "Vue", "bytes": "47129" } ], "symlink_target": "" }
package ru.prbb.common.validation; import javax.validation.ConstraintViolation; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import java.io.Serializable; /** * @author lesinsa */ @XmlType public class Violation implements Serializable { /** * Путь к элементу с невалидным значением */ private String propertyPath; /** * Класс ошибки */ private String clazz; /** * Текстовое сообщение, описывающее нарушение требований к значению */ private String message; public Violation() { } public Violation(ConstraintViolation<?> e) { this(); propertyPath = e.getPropertyPath().toString(); clazz = e.getConstraintDescriptor().getAnnotation().annotationType().getName(); message = e.getMessage(); } public Violation(String propertyPath, String clazz, String message) { this.propertyPath = propertyPath; this.clazz = clazz; this.message = message; } @XmlElement(name = "path") public String getPropertyPath() { return propertyPath; } public void setPropertyPath(String propertyPath) { this.propertyPath = propertyPath; } @XmlElement(name = "class") public String getClazz() { return clazz; } public void setClazz(String clazz) { this.clazz = clazz; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } }
{ "content_hash": "9150ebbeb16372f334f8d3a5e5bcb170", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 87, "avg_line_length": 23.764705882352942, "alnum_prop": 0.614480198019802, "repo_name": "lesinsa/horn-soft-pub", "id": "22a8f065c168e01862abeaaffa680e9674f31526", "size": "1717", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "commons-2.3/common-jee/src/main/java/ru/prbb/common/validation/Violation.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "658639" } ], "symlink_target": "" }
{% raw %}{% load wagtailadmin_tags %} {% load i18n %} <nav class="nav-main"> <ul> {{ menu_html }} <li class="menu-item"> <a href="/admin/collections/" class="icon icon-folder-open-1">Collections</a> </li> <li class="footer" id="footer"> <div class="account" id="account-settings" title="{% trans 'Edit your account' %}"> <span class="avatar square avatar-on-dark"> <img src="{% avatar_url request.user size=50 %}" /> </span> <em class="icon icon-arrow-up-after">{{ request.user.first_name|default:request.user.get_username }}</em> </div> <ul class="footer-submenu"> <li><a href="{% url 'wagtailadmin_account' %}" class="icon icon-user">{% trans "Account settings" %}</a></li> <li><a href="{% url 'wagtailadmin_logout' %}" class="icon icon-logout">{% trans "Log out" %}</a></li> </ul> </li> </ul> </nav>{% endraw %}
{ "content_hash": "49e34cad5a7bb0b6e168620c10c2d991", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 125, "avg_line_length": 39.76923076923077, "alnum_prop": 0.5058027079303675, "repo_name": "ilendl2/wagtail-cookiecutter-foundation", "id": "1ded87a825a08ab79d54299e5490230ac9f2f0d0", "size": "1034", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "{{cookiecutter.project_slug}}/pages/templates/wagtailadmin/shared/main_nav.html", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "5146" }, { "name": "CSS", "bytes": "102069" }, { "name": "HTML", "bytes": "105186" }, { "name": "JavaScript", "bytes": "4205" }, { "name": "Makefile", "bytes": "10743" }, { "name": "Python", "bytes": "216524" } ], "symlink_target": "" }
if (process.env.NODE_ENV !== 'production') { window.console.dev = function(msg) { console.log(msg); }; // use it for hot reload module.exports = require('./Root.hot'); // enable it and don't forget to add back render() function // module.exports = require('./Root.dev'); } else { window.console.dev = function(msg) {}; module.exports = require('./Root.prod'); }
{ "content_hash": "5a2777132f7959382c2937e1344ea4d8", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 60, "avg_line_length": 28.692307692307693, "alnum_prop": 0.6514745308310992, "repo_name": "SteamerTeam/steamer-react", "id": "a81d18fc59aa9a5ec74c8b964f4bbaf2e34a8905", "size": "373", "binary": false, "copies": "1", "ref": "refs/heads/v2", "path": "tools/template/list/root/Root.js", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "38824" }, { "name": "HTML", "bytes": "2464" }, { "name": "JavaScript", "bytes": "144714" } ], "symlink_target": "" }
const fs = require('fs-extra') const path = require('path') const babel = require('babel-core') module.exports = { fileExistsSync (path) { return fs.statSync(path).isFile() }, readDir (dirPath) { return new Promise(function (resolve, reject) { fs.readdir(path.resolve(dirPath), function (error, dirs) { if (error) { return reject(error) } resolve(dirs) }) }) }, readFile (filePath) { return new Promise(function (resolve, reject) { fs.readFile(path.resolve(filePath), function (error, file) { if (error) { return reject(error) } resolve(file.toString()) }) }) }, readScript (filePath) { return new Promise(function (resolve, reject) { babel.transformFile(filePath, { presets: [require('babel-preset-es2015')] }, function (err, result) { if (err) { return reject(err) } resolve(result.code) }) }) }, isUrl (string) { return string.substr(0, 7) === 'http://' || string.substr(0, 8) === 'https://' }, writeFile (filePath, content) { return new Promise(function (resolve, reject) { fs.writeFile(path.resolve(filePath), content, 'utf-8', function (error) { if (error) { return reject(error) } resolve() }) }) }, emptyDir (dirPath) { return new Promise(function (resolve, reject) { fs.emptyDir(path.resolve(dirPath), function (error) { if (error) { return reject(error) } resolve() }) }) }, copyDir (fromPath, toPath) { return new Promise(function (resolve, reject) { fs.copy(path.resolve(fromPath), path.resolve(toPath), function (error) { if (error) { return reject(error) } resolve() }) }) }, extractRawText (docs) { return Object.keys(docs).reduce(function (docsText, sectionKey) { docsText[sectionKey] = Object.keys(docs[sectionKey]).reduce(function (subDocsText, subSectionKey) { subDocsText[subSectionKey] = { raw: docs[sectionKey][subSectionKey].raw, title: docs[sectionKey][subSectionKey].toc[0].title } return subDocsText }, {}) return docsText }, {}) } }
{ "content_hash": "db7d3009000479f16e7916a45f21091f", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 105, "avg_line_length": 24.69148936170213, "alnum_prop": 0.5639810426540285, "repo_name": "bdjnk/cerebral", "id": "773623d068f46c1b1bd7110ab740133f7ef11cfe", "size": "2321", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "website/builder/builder/utils.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "27848" }, { "name": "HTML", "bytes": "6069" }, { "name": "JavaScript", "bytes": "568767" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.conf import settings import django.contrib.auth.models import django.contrib.auth.validators from django.db import migrations, models import django.db.models.deletion import django.utils.timezone import foundation.models.base import foundation.models.fields import foundation.models.manager from . import generate_initial_data class Migration(migrations.Migration): initial = True dependencies = [ ('auth', '0008_alter_user_username_max_length'), ] operations = [ migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), ('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')), ('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')), ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), ], options={ 'verbose_name': 'user', 'abstract': False, 'verbose_name_plural': 'users', }, managers=[ ('objects', django.contrib.auth.models.UserManager()), ], ), migrations.CreateModel( name='Blog', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slug', foundation.models.fields.SlugField(editable=False, max_length=25, unique=True)), ('title', models.CharField(max_length=200)), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blogs', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, bases=(foundation.models.base.AssociativeMixin, models.Model), managers=[ ('objects', foundation.models.manager.Manager()), ], ), migrations.CreateModel( name='Post', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slug', foundation.models.fields.SlugField(editable=False, unique=True)), ('title', models.CharField(max_length=200)), ('body', models.TextField()), ('publish', models.BooleanField(default=True)), ('created', models.DateTimeField(auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_entries', to='blogs.Blog')), ], options={ 'ordering': ['-created'], 'verbose_name': 'Blog Entry', 'verbose_name_plural': 'Blog Entries', }, bases=(foundation.models.base.AssociativeMixin, models.Model), managers=[ ('objects', foundation.models.manager.Manager()), ], ), migrations.RunPython(generate_initial_data), ]
{ "content_hash": "c0e1f236536cd7548e73fab761977ba7", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 329, "avg_line_length": 56.17977528089887, "alnum_prop": 0.6124, "repo_name": "altio/foundation", "id": "a3ec3e290bb57459f131088381664d3dbce8ff04", "size": "5073", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "sample/blogs/migrations/0001_initial.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1700" }, { "name": "HTML", "bytes": "60043" }, { "name": "JavaScript", "bytes": "6017" }, { "name": "Python", "bytes": "223531" } ], "symlink_target": "" }
package com.ciandt.techgallery.service.enums; import com.google.gson.annotations.SerializedName; public enum TowerEnum { @SerializedName("108") PACIFIC(108,"Pacific"), @SerializedName("134") PRODUCT_AND_LICENCES(134,"Product and Licences"), @SerializedName("27") RJ(27,"RJ"), @SerializedName("26") TELECOM(26,"Telecom"), @SerializedName("40") INTERNATIONAL(40,"[International]"), @SerializedName("110") INTERNATIONAL_NORTH_ATLANTIC(110,"[International] North Atlantic"), @SerializedName("111") INTERNATIONAL_NORTHEST(111,"[International] Northeast"), @SerializedName("112") INTERNATIONAL_SOUTHEST(112,"[International] Southeast"), @SerializedName("113") INTERNATIONAL_WEST(113,"[International] West"), @SerializedName("29") LATAM_BAKING(29,"[Latam] Banking"), @SerializedName("25") LATAM_COMMERCE(25,"[Latam] Commerce"), @SerializedName("23") LATAM_CONSUMER_GOODS(23,"[Latam] Consumer Goods"), @SerializedName("137") LATAM_INDUSTRY(137, "[Latam] Industry"), @SerializedName("28") LATAM_INSURANCE(28,"[Latam] Insurance"), @SerializedName("136") LATAM_PAYMENT_METHODS(136,"[Latam] Payment Methods"), @SerializedName("135") LATAM_RESOURCES_LOGISTICS(135,"[Latam] Resources & Logistics"), @SerializedName("24") LATAM_SERVICES(24,"[Latam] Services"), @SerializedName("163") PACIFIC_ASEAN(163,"[Pacific] ASEAN"), @SerializedName("164") PACIFIC_JAPAN(164,"[Pacific] Japan"), @SerializedName("165") PACIFIC_US_EUROPE(165,"[Pacific] US/Europe"), @SerializedName("186") LATAM_CONSUMER_LOGISTICS(186,"[LATAM] Consumer & Logistics"); private int id; private String name; private TowerEnum(int id, String name){ this.id = id; this.name = name; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
{ "content_hash": "51c4d17396c1b13f7e382e03b7b6f876", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 68, "avg_line_length": 25.253333333333334, "alnum_prop": 0.708025343189018, "repo_name": "sidharta/sales-gallery", "id": "c425df082d86072ceaf854be876d997a1b1866de", "size": "1894", "binary": false, "copies": "1", "ref": "refs/heads/release/1.0.0", "path": "src/main/java/com/ciandt/techgallery/service/enums/TowerEnum.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "37693" }, { "name": "HTML", "bytes": "36381" }, { "name": "Java", "bytes": "391804" }, { "name": "JavaScript", "bytes": "50518" } ], "symlink_target": "" }
var traverse = require('traverse') var type = require('type-component') var isostring = require('is-isodate') exports.encode = function encode (obj) { traverse(obj).forEach(function (d) { if (type(d) === 'date') this.update(d.toISOString()) }) return obj } exports.decode = function decode (obj) { traverse(obj).forEach(function (d) { if (type(d) === 'string' && isostring(d)) this.update(new Date(d)) }) return obj }
{ "content_hash": "be084ce0ec6100171b24825b2288404f", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 70, "avg_line_length": 25.88235294117647, "alnum_prop": 0.6522727272727272, "repo_name": "hden/isodate-convert", "id": "25f12857432b9fcfe8fc4dbc055c78e21bc07798", "size": "440", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "4045" } ], "symlink_target": "" }
package garden import proto "github.com/gogo/protobuf/proto" import math "math" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = math.Inf type TTY struct { WindowSize *TTY_WindowSize `protobuf:"bytes,1,opt,name=window_size" json:"window_size,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *TTY) Reset() { *m = TTY{} } func (m *TTY) String() string { return proto.CompactTextString(m) } func (*TTY) ProtoMessage() {} func (m *TTY) GetWindowSize() *TTY_WindowSize { if m != nil { return m.WindowSize } return nil } type TTY_WindowSize struct { Columns *uint32 `protobuf:"varint,1,req,name=columns" json:"columns,omitempty"` Rows *uint32 `protobuf:"varint,2,req,name=rows" json:"rows,omitempty"` XXX_unrecognized []byte `json:"-"` } func (m *TTY_WindowSize) Reset() { *m = TTY_WindowSize{} } func (m *TTY_WindowSize) String() string { return proto.CompactTextString(m) } func (*TTY_WindowSize) ProtoMessage() {} func (m *TTY_WindowSize) GetColumns() uint32 { if m != nil && m.Columns != nil { return *m.Columns } return 0 } func (m *TTY_WindowSize) GetRows() uint32 { if m != nil && m.Rows != nil { return *m.Rows } return 0 } func init() { }
{ "content_hash": "a680eeea3abcceec48483542f2381824", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 104, "avg_line_length": 25.333333333333332, "alnum_prop": 0.6493808049535603, "repo_name": "tsaleh/garden", "id": "ce5812cc00c2f6cbf72ba86cb67725f05fdf5c75", "size": "1368", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "protocol/tty.pb.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "367078" }, { "name": "Makefile", "bytes": "188" }, { "name": "Shell", "bytes": "1384" } ], "symlink_target": "" }
from typing import Callable, TypeVar, Any, Union, Optional, Sequence, List, Tuple, Dict from typing_extensions import final, overload from abc import ABC, abstractmethod from collections.abc import Iterable import eagerpy as ep from ..models import Model from ..criteria import Criterion from ..criteria import Misclassification from ..devutils import atleast_kd from ..distances import Distance T = TypeVar("T") CriterionType = TypeVar("CriterionType", bound=Criterion) # TODO: support manually specifying early_stop in __call__ class Attack(ABC): @overload def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Sequence[Union[float, None]], **kwargs: Any, ) -> Tuple[List[T], List[T], T]: ... @overload # noqa: F811 def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[float, None], **kwargs: Any, ) -> Tuple[T, T, T]: ... @abstractmethod # noqa: F811 def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[Sequence[Union[float, None]], float, None], **kwargs: Any, ) -> Union[Tuple[List[T], List[T], T], Tuple[T, T, T]]: # in principle, the type of criterion is Union[Criterion, T] # but we want to give subclasses the option to specify the supported # criteria explicitly (i.e. specifying a stricter type constraint) ... @abstractmethod def repeat(self, times: int) -> "Attack": ... def __repr__(self) -> str: args = ", ".join(f"{k.strip('_')}={v}" for k, v in vars(self).items()) return f"{self.__class__.__name__}({args})" class AttackWithDistance(Attack): @property @abstractmethod def distance(self) -> Distance: ... def repeat(self, times: int) -> Attack: return Repeated(self, times) class Repeated(AttackWithDistance): """Repeats the wrapped attack and returns the best result""" def __init__(self, attack: AttackWithDistance, times: int): if times < 1: raise ValueError(f"expected times >= 1, got {times}") # pragma: no cover self.attack = attack self.times = times @property def distance(self) -> Distance: return self.attack.distance @overload def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Sequence[Union[float, None]], **kwargs: Any, ) -> Tuple[List[T], List[T], T]: ... @overload # noqa: F811 def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[float, None], **kwargs: Any, ) -> Tuple[T, T, T]: ... def __call__( # noqa: F811 self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[Sequence[Union[float, None]], float, None], **kwargs: Any, ) -> Union[Tuple[List[T], List[T], T], Tuple[T, T, T]]: x, restore_type = ep.astensor_(inputs) del inputs verify_input_bounds(x, model) criterion = get_criterion(criterion) was_iterable = True if not isinstance(epsilons, Iterable): epsilons = [epsilons] was_iterable = False N = len(x) K = len(epsilons) for i in range(self.times): # run the attack xps, xpcs, success = self.attack( model, x, criterion, epsilons=epsilons, **kwargs ) assert len(xps) == K assert len(xpcs) == K for xp in xps: assert xp.shape == x.shape for xpc in xpcs: assert xpc.shape == x.shape assert success.shape == (K, N) if i == 0: best_xps = xps best_xpcs = xpcs best_success = success continue # TODO: test if stacking the list to a single tensor and # getting rid of the loop is faster for k, epsilon in enumerate(epsilons): first = best_success[k].logical_not() assert first.shape == (N,) if epsilon is None: # if epsilon is None, we need the minimum # TODO: maybe cache some of these distances # and then remove the else part closer = self.distance(x, xps[k]) < self.distance(x, best_xps[k]) assert closer.shape == (N,) new_best = ep.logical_and(success[k], ep.logical_or(closer, first)) else: # for concrete epsilon, we just need a successful one new_best = ep.logical_and(success[k], first) new_best = atleast_kd(new_best, x.ndim) best_xps[k] = ep.where(new_best, xps[k], best_xps[k]) best_xpcs[k] = ep.where(new_best, xpcs[k], best_xpcs[k]) best_success = ep.logical_or(success, best_success) best_xps_ = [restore_type(xp) for xp in best_xps] best_xpcs_ = [restore_type(xpc) for xpc in best_xpcs] if was_iterable: return best_xps_, best_xpcs_, restore_type(best_success) else: assert len(best_xps_) == 1 assert len(best_xpcs_) == 1 return ( best_xps_[0], best_xpcs_[0], restore_type(best_success.squeeze(axis=0)), ) def repeat(self, times: int) -> "Repeated": return Repeated(self.attack, self.times * times) class FixedEpsilonAttack(AttackWithDistance): """Fixed-epsilon attacks try to find adversarials whose perturbation sizes are limited by a fixed epsilon""" @abstractmethod def run( self, model: Model, inputs: T, criterion: Any, *, epsilon: float, **kwargs: Any ) -> T: """Runs the attack and returns perturbed inputs. The size of the perturbations should be at most epsilon, but this is not guaranteed and the caller should verify this or clip the result. """ ... @overload def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Sequence[Union[float, None]], **kwargs: Any, ) -> Tuple[List[T], List[T], T]: ... @overload # noqa: F811 def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[float, None], **kwargs: Any, ) -> Tuple[T, T, T]: ... @final # noqa: F811 def __call__( # type: ignore self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[Sequence[Union[float, None]], float, None], **kwargs: Any, ) -> Union[Tuple[List[T], List[T], T], Tuple[T, T, T]]: x, restore_type = ep.astensor_(inputs) del inputs verify_input_bounds(x, model) criterion = get_criterion(criterion) is_adversarial = get_is_adversarial(criterion, model) was_iterable = True if not isinstance(epsilons, Iterable): epsilons = [epsilons] was_iterable = False N = len(x) K = len(epsilons) # None means: just minimize, no early stopping, no limit on the perturbation size if any(eps is None for eps in epsilons): # TODO: implement a binary search raise NotImplementedError( "FixedEpsilonAttack subclasses do not yet support None in epsilons" ) real_epsilons = [eps for eps in epsilons if eps is not None] del epsilons xps = [] xpcs = [] success = [] for epsilon in real_epsilons: xp = self.run(model, x, criterion, epsilon=epsilon, **kwargs) # clip to epsilon because we don't really know what the attack returns; # alternatively, we could check if the perturbation is at most epsilon, # but then we would need to handle numerical violations; xpc = self.distance.clip_perturbation(x, xp, epsilon) is_adv = is_adversarial(xpc) xps.append(xp) xpcs.append(xpc) success.append(is_adv) # # TODO: the correction we apply here should make sure that the limits # # are not violated, but this is a hack and we need a better solution # # Alternatively, maybe can just enforce the limits in __call__ # xps = [ # self.run(model, x, criterion, epsilon=epsilon, **kwargs) # for epsilon in real_epsilons # ] # is_adv = ep.stack([is_adversarial(xp) for xp in xps]) # assert is_adv.shape == (K, N) # in_limits = ep.stack( # [ # self.distance(x, xp) <= epsilon # for xp, epsilon in zip(xps, real_epsilons) # ], # ) # assert in_limits.shape == (K, N) # if not in_limits.all(): # # TODO handle (numerical) violations # # warn user if run() violated the epsilon constraint # import pdb # pdb.set_trace() # success = ep.logical_and(in_limits, is_adv) # assert success.shape == (K, N) success_ = ep.stack(success) assert success_.shape == (K, N) xps_ = [restore_type(xp) for xp in xps] xpcs_ = [restore_type(xpc) for xpc in xpcs] if was_iterable: return xps_, xpcs_, restore_type(success_) else: assert len(xps_) == 1 assert len(xpcs_) == 1 return xps_[0], xpcs_[0], restore_type(success_.squeeze(axis=0)) class MinimizationAttack(AttackWithDistance): """Minimization attacks try to find adversarials with minimal perturbation sizes""" @abstractmethod def run( self, model: Model, inputs: T, criterion: Any, *, early_stop: Optional[float] = None, **kwargs: Any, ) -> T: """Runs the attack and returns perturbed inputs. The size of the perturbations should be as small as possible such that the perturbed inputs are still adversarial. In general, this is not guaranteed and the caller has to verify this. """ ... @overload def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Sequence[Union[float, None]], **kwargs: Any, ) -> Tuple[List[T], List[T], T]: ... @overload # noqa: F811 def __call__( self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[float, None], **kwargs: Any, ) -> Tuple[T, T, T]: ... @final # noqa: F811 def __call__( # type: ignore self, model: Model, inputs: T, criterion: Any, *, epsilons: Union[Sequence[Union[float, None]], float, None], **kwargs: Any, ) -> Union[Tuple[List[T], List[T], T], Tuple[T, T, T]]: x, restore_type = ep.astensor_(inputs) del inputs verify_input_bounds(x, model) criterion = get_criterion(criterion) is_adversarial = get_is_adversarial(criterion, model) was_iterable = True if not isinstance(epsilons, Iterable): epsilons = [epsilons] was_iterable = False N = len(x) K = len(epsilons) # None means: just minimize, no early stopping, no limit on the perturbation size if any(eps is None for eps in epsilons): early_stop = None else: early_stop = min(epsilons) # type: ignore # run the actual attack xp = self.run(model, x, criterion, early_stop=early_stop, **kwargs) xpcs = [] success = [] for epsilon in epsilons: if epsilon is None: xpc = xp else: xpc = self.distance.clip_perturbation(x, xp, epsilon) is_adv = is_adversarial(xpc) xpcs.append(xpc) success.append(is_adv) success_ = ep.stack(success) assert success_.shape == (K, N) xp_ = restore_type(xp) xpcs_ = [restore_type(xpc) for xpc in xpcs] if was_iterable: return [xp_] * K, xpcs_, restore_type(success_) else: assert len(xpcs_) == 1 return xp_, xpcs_[0], restore_type(success_.squeeze(axis=0)) class FlexibleDistanceMinimizationAttack(MinimizationAttack): def __init__(self, *, distance: Optional[Distance] = None): self._distance = distance @property def distance(self) -> Distance: if self._distance is None: # we delay the error until the distance is needed, # e.g. when __call__ is executed (that way, run # can be used without specifying a distance) raise ValueError( "unknown distance, please pass `distance` to the attack initializer" ) return self._distance def get_is_adversarial( criterion: Criterion, model: Model ) -> Callable[[ep.Tensor], ep.Tensor]: def is_adversarial(perturbed: ep.Tensor) -> ep.Tensor: outputs = model(perturbed) return criterion(perturbed, outputs) return is_adversarial def get_criterion(criterion: Union[Criterion, Any]) -> Criterion: if isinstance(criterion, Criterion): return criterion else: return Misclassification(criterion) def get_channel_axis(model: Model, ndim: int) -> Optional[int]: data_format = getattr(model, "data_format", None) if data_format is None: return None if data_format == "channels_first": return 1 if data_format == "channels_last": return ndim - 1 raise ValueError( f"unknown data_format, expected 'channels_first' or 'channels_last', got {data_format}" ) def raise_if_kwargs(kwargs: Dict[str, Any]) -> None: if kwargs: raise TypeError( f"attack got an unexpected keyword argument '{next(iter(kwargs.keys()))}'" ) def verify_input_bounds(input: ep.Tensor, model: Model) -> None: # verify that input to the attack lies within model's input bounds assert input.min().item() >= model.bounds.lower assert input.max().item() <= model.bounds.upper
{ "content_hash": "d46e6472cd8001af670e038381eefb29", "timestamp": "", "source": "github", "line_count": 499, "max_line_length": 95, "avg_line_length": 29.521042084168336, "alnum_prop": 0.5464666349874414, "repo_name": "bethgelab/foolbox", "id": "6eaa1690774824b49e0cd46f4dabc96eb2d3aadd", "size": "14731", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "foolbox/attacks/base.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "668" }, { "name": "Jupyter Notebook", "bytes": "23091" }, { "name": "Makefile", "bytes": "2670" }, { "name": "Python", "bytes": "405918" }, { "name": "TeX", "bytes": "3946" } ], "symlink_target": "" }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Immutable; using System.Diagnostics; using System.Threading; using Analyzer.Utilities; using Analyzer.Utilities.Extensions; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.FlowAnalysis.DataFlow.PointsToAnalysis; namespace Microsoft.CodeAnalysis.FlowAnalysis.DataFlow.DisposeAnalysis { using DisposeAnalysisData = DictionaryAnalysisData<AbstractLocation, DisposeAbstractValue>; using DisposeAnalysisDomain = MapAbstractDomain<AbstractLocation, DisposeAbstractValue>; /// <summary> /// Dataflow analysis to track dispose state of <see cref="AbstractLocation"/>/<see cref="IOperation"/> instances. /// </summary> public partial class DisposeAnalysis : ForwardDataFlowAnalysis<DisposeAnalysisData, DisposeAnalysisContext, DisposeAnalysisResult, DisposeBlockAnalysisResult, DisposeAbstractValue> { // Invoking an instance method may likely invalidate all the instance field analysis state, i.e. // reference type fields might be re-assigned to point to different objects in the called method. // An optimistic points to analysis assumes that the points to values of instance fields don't change on invoking an instance method. // A pessimistic points to analysis resets all the instance state and assumes the instance field might point to any object, hence has unknown state. // For dispose analysis, we want to perform an optimistic points to analysis as we assume a disposable field is not likely to be re-assigned to a separate object in helper method invocations in Dispose. private const bool PessimisticAnalysis = false; internal static readonly DisposeAnalysisDomain DisposeAnalysisDomainInstance = new DisposeAnalysisDomain(DisposeAbstractValueDomain.Default); private DisposeAnalysis(DisposeAnalysisDomain analysisDomain, DisposeDataFlowOperationVisitor operationVisitor) : base(analysisDomain, operationVisitor) { } public static DisposeAnalysisResult? TryGetOrComputeResult( ControlFlowGraph cfg, ISymbol owningSymbol, WellKnownTypeProvider wellKnownTypeProvider, AnalyzerOptions analyzerOptions, DiagnosticDescriptor rule, ImmutableHashSet<INamedTypeSymbol> disposeOwnershipTransferLikelyTypes, PointsToAnalysisKind defaultPointsToAnalysisKind, bool trackInstanceFields, bool exceptionPathsAnalysis, CancellationToken cancellationToken, out PointsToAnalysisResult? pointsToAnalysisResult, InterproceduralAnalysisKind interproceduralAnalysisKind = InterproceduralAnalysisKind.ContextSensitive, bool performCopyAnalysisIfNotUserConfigured = false, InterproceduralAnalysisPredicate? interproceduralAnalysisPredicate = null, bool defaultDisposeOwnershipTransferAtConstructor = false, bool defaultDisposeOwnershipTransferAtMethodCall = false) { Debug.Assert(!owningSymbol.IsConfiguredToSkipAnalysis(analyzerOptions, rule, wellKnownTypeProvider.Compilation, cancellationToken)); var interproceduralAnalysisConfig = InterproceduralAnalysisConfiguration.Create( analyzerOptions, rule, owningSymbol, wellKnownTypeProvider.Compilation, interproceduralAnalysisKind, cancellationToken); var disposeOwnershipTransferAtConstructor = analyzerOptions.GetDisposeOwnershipTransferAtConstructorOption( rule, owningSymbol, wellKnownTypeProvider.Compilation, defaultValue: defaultDisposeOwnershipTransferAtConstructor, cancellationToken); var disposeOwnershipTransferAtMethodCall = analyzerOptions.GetDisposeOwnershipTransferAtMethodCall( rule, owningSymbol, wellKnownTypeProvider.Compilation, defaultValue: defaultDisposeOwnershipTransferAtMethodCall, cancellationToken); return TryGetOrComputeResult(cfg, owningSymbol, analyzerOptions, wellKnownTypeProvider, interproceduralAnalysisConfig, interproceduralAnalysisPredicate, disposeOwnershipTransferLikelyTypes, disposeOwnershipTransferAtConstructor, disposeOwnershipTransferAtMethodCall, trackInstanceFields, exceptionPathsAnalysis, pointsToAnalysisKind: analyzerOptions.GetPointsToAnalysisKindOption(rule, owningSymbol, wellKnownTypeProvider.Compilation, defaultPointsToAnalysisKind, cancellationToken), performCopyAnalysis: analyzerOptions.GetCopyAnalysisOption(rule, owningSymbol, wellKnownTypeProvider.Compilation, defaultValue: performCopyAnalysisIfNotUserConfigured, cancellationToken), excludedSymbols: analyzerOptions.GetExcludedSymbolNamesWithValueOption(rule, owningSymbol, wellKnownTypeProvider.Compilation, cancellationToken), out pointsToAnalysisResult); } private static DisposeAnalysisResult? TryGetOrComputeResult( ControlFlowGraph cfg, ISymbol owningSymbol, AnalyzerOptions analyzerOptions, WellKnownTypeProvider wellKnownTypeProvider, InterproceduralAnalysisConfiguration interproceduralAnalysisConfig, InterproceduralAnalysisPredicate? interproceduralAnalysisPredicate, ImmutableHashSet<INamedTypeSymbol> disposeOwnershipTransferLikelyTypes, bool disposeOwnershipTransferAtConstructor, bool disposeOwnershipTransferAtMethodCall, bool trackInstanceFields, bool exceptionPathsAnalysis, PointsToAnalysisKind pointsToAnalysisKind, bool performCopyAnalysis, SymbolNamesWithValueOption<Unit> excludedSymbols, out PointsToAnalysisResult? pointsToAnalysisResult) { Debug.Assert(wellKnownTypeProvider.TryGetOrCreateTypeByMetadataName(WellKnownTypeNames.SystemIDisposable, out _)); pointsToAnalysisResult = PointsToAnalysis.PointsToAnalysis.TryGetOrComputeResult( cfg, owningSymbol, analyzerOptions, wellKnownTypeProvider, pointsToAnalysisKind, interproceduralAnalysisConfig, interproceduralAnalysisPredicate, PessimisticAnalysis, performCopyAnalysis, exceptionPathsAnalysis); if (pointsToAnalysisResult == null) { return null; } if (cfg == null) { Debug.Fail("Expected non-null CFG"); return null; } var analysisContext = DisposeAnalysisContext.Create( DisposeAbstractValueDomain.Default, wellKnownTypeProvider, cfg, owningSymbol, analyzerOptions, interproceduralAnalysisConfig, interproceduralAnalysisPredicate, PessimisticAnalysis, exceptionPathsAnalysis, pointsToAnalysisResult, TryGetOrComputeResultForAnalysisContext, disposeOwnershipTransferLikelyTypes, disposeOwnershipTransferAtConstructor, disposeOwnershipTransferAtMethodCall, trackInstanceFields, excludedSymbols); return TryGetOrComputeResultForAnalysisContext(analysisContext); } private static DisposeAnalysisResult? TryGetOrComputeResultForAnalysisContext(DisposeAnalysisContext disposeAnalysisContext) { var operationVisitor = new DisposeDataFlowOperationVisitor(disposeAnalysisContext); var disposeAnalysis = new DisposeAnalysis(DisposeAnalysisDomainInstance, operationVisitor); return disposeAnalysis.TryGetOrComputeResultCore(disposeAnalysisContext, cacheResult: false); } protected override DisposeAnalysisResult ToResult(DisposeAnalysisContext analysisContext, DataFlowAnalysisResult<DisposeBlockAnalysisResult, DisposeAbstractValue> dataFlowAnalysisResult) { var operationVisitor = (DisposeDataFlowOperationVisitor)OperationVisitor; var trackedInstanceFieldPointsToMap = analysisContext.TrackInstanceFields ? operationVisitor.TrackedInstanceFieldPointsToMap : null; return new DisposeAnalysisResult(dataFlowAnalysisResult, trackedInstanceFieldPointsToMap); } protected override DisposeBlockAnalysisResult ToBlockResult(BasicBlock basicBlock, DictionaryAnalysisData<AbstractLocation, DisposeAbstractValue> blockAnalysisData) => new DisposeBlockAnalysisResult(basicBlock, blockAnalysisData); } }
{ "content_hash": "aafd9bb9d2738cd6479639b2b481f89c", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 210, "avg_line_length": 66.32307692307693, "alnum_prop": 0.759104616098353, "repo_name": "pakdev/roslyn-analyzers", "id": "b581afe7546af786973ba5b98aa4f37b46d2443c", "size": "8624", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Utilities/FlowAnalysis/FlowAnalysis/Analysis/DisposeAnalysis/DisposeAnalysis.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5165" }, { "name": "C#", "bytes": "10824190" }, { "name": "PowerShell", "bytes": "19192" }, { "name": "Rich Text Format", "bytes": "246282" }, { "name": "Shell", "bytes": "9077" }, { "name": "Visual Basic", "bytes": "161767" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <resources> <color name="title_gray">#717171</color> <color name="dark_gray">#aaa</color> <color name="corner_shadow">#ccc</color> <color name="color_primary">#2196f3</color> <color name="color_primary_dark">#1976d2</color> <color name="color_accent">#69f0ae</color> </resources>
{ "content_hash": "ac1dd5bb9b9bed7c18525a2a163bbf19", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 52, "avg_line_length": 38.888888888888886, "alnum_prop": 0.64, "repo_name": "zzwwws/ZhihuDailyPurify", "id": "e9c05d65613f321194c8f2af3786437d4826eddf", "size": "350", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/values/colors.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "110059" } ], "symlink_target": "" }
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeContainerUpdateProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeContainerUpdateProtoOrBuilder; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeContainerUpdate; public class NodeContainerUpdatePBImpl extends NodeContainerUpdate{ NodeContainerUpdateProto proto = NodeContainerUpdateProto.getDefaultInstance(); NodeContainerUpdateProto.Builder builder = null; boolean viaProto = false; private ContainerId containerId; public NodeContainerUpdatePBImpl() { builder = NodeContainerUpdateProto.newBuilder(); } public NodeContainerUpdatePBImpl(NodeContainerUpdateProto proto) { this.proto = proto; viaProto = true; } public NodeContainerUpdateProto getProto() { mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; } private void mergeLocalToProto() { if (viaProto){ maybeInitBuilder(); } mergeLocalToBuilder(); proto = builder.build(); viaProto = true; } private void maybeInitBuilder() { if (viaProto || builder == null) { builder = NodeContainerUpdateProto.newBuilder(proto); } viaProto = false; } private void mergeLocalToBuilder() { if(this.containerId != null) { builder.setContainerId( convertToProtoFormat(this.containerId)); } } @Override public void setContianerId(ContainerId containerId) { maybeInitBuilder(); if (containerId == null) builder.clearContainerId(); this.containerId = containerId; } @Override public ContainerId getContainerId() { NodeContainerUpdateProtoOrBuilder p = viaProto ? proto : builder; if (this.containerId != null) { return this.containerId; } if (!p.hasContainerId()) { return null; } this.containerId = convertFromProtoFormat(p.getContainerId()); return this.containerId; } @Override public void setMemory(int memory) { maybeInitBuilder(); builder.setMemory(memory); } @Override public int getMemory() { NodeContainerUpdateProtoOrBuilder p = viaProto ? proto : builder; return p.getMemory(); } @Override public void setCores(int cores) { maybeInitBuilder(); builder.setCpuCores(cores); } @Override public int getCores() { NodeContainerUpdateProtoOrBuilder p = viaProto ? proto : builder; return p.getCpuCores(); } private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { return new ContainerIdPBImpl(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { return ((ContainerIdPBImpl)t).getProto(); } @Override public void setSuspend(boolean suspend) { maybeInitBuilder(); builder.setSuspend(suspend); } @Override public boolean getSuspend() { NodeContainerUpdateProtoOrBuilder p = viaProto ? proto : builder; return p.getSuspend(); } @Override public void setResume(boolean resume) { maybeInitBuilder(); builder.setResume(resume); } @Override public boolean getResume() { NodeContainerUpdateProtoOrBuilder p = viaProto ? proto : builder; return p.getResume(); } }
{ "content_hash": "6398fa229fd2598a0113b635e0982e7e", "timestamp": "", "source": "github", "line_count": 141, "max_line_length": 100, "avg_line_length": 25.113475177304963, "alnum_prop": 0.7209827732279017, "repo_name": "yncxcw/PreemptYARN-2.7.1", "id": "41bed7a09315d7d2eb3c74092b2c5374b07fdfe2", "size": "3541", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeContainerUpdatePBImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AspectJ", "bytes": "31146" }, { "name": "Batchfile", "bytes": "65917" }, { "name": "C", "bytes": "1387886" }, { "name": "C++", "bytes": "95394" }, { "name": "CMake", "bytes": "40065" }, { "name": "CSS", "bytes": "50572" }, { "name": "HTML", "bytes": "189022" }, { "name": "Java", "bytes": "50745027" }, { "name": "JavaScript", "bytes": "26275" }, { "name": "Perl", "bytes": "18992" }, { "name": "Protocol Buffer", "bytes": "234916" }, { "name": "Python", "bytes": "83497" }, { "name": "Shell", "bytes": "196772" }, { "name": "TLA", "bytes": "14993" }, { "name": "TeX", "bytes": "19322" }, { "name": "XSLT", "bytes": "20949" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Navigation; using System.Windows.Shapes; namespace Fortaggle.Views.Common { /// <summary> /// ConfirmDialog.xaml の相互作用ロジック /// </summary> public partial class ConfirmDialog : UserControl { public ConfirmDialog() { InitializeComponent(); } } }
{ "content_hash": "0c1d6946f420d49191dda212c2eafad0", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 52, "avg_line_length": 23.814814814814813, "alnum_prop": 0.6842923794712286, "repo_name": "gembaf/Fortaggle", "id": "20ad4a892a3f43a1c88cf7b40a742483a548e3d7", "size": "663", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Fortaggle/Views/Common/ConfirmDialog.xaml.cs", "mode": "33261", "license": "mit", "language": [ { "name": "C#", "bytes": "96154" } ], "symlink_target": "" }
layout: post title: "Hasil Rapat Persiapan Halal Bi Halal 2" date: 2015-07-28 06:00:00 permalink: "hasil-rapat2-10-tahun-smansa-2005" description: "Hasil Rapat 2 menuju Halal Bi Halal 10 Tahun IKA Smansa 2005 Makassar" --- Berikut ini adalah Hasil Rapat 2 yang dilaksanakan di rumah Sdr. A. Mala Indira (Mapong) pada tanggal 25 Juli 2015 dengan agenda pemantapan rapat sebelumnya: - Pemaparan gambaran umum [konsep outbond]({% post_url 2015-07-28-konsep-outbond-reuni-smansa2005 %}) dari Sdr. Patria Muhammad (Bucek) - Pengadaan doorprize untuk games-games outbond. - Pembuatan rekening untuk menampung kontribusi peserta. __(PJ: Sdr. Amalia Salman)__ - Publikasi secara kontinyu kepada semua teman-teman Alumni Smansa 2005 Makassar. Mohon maaf karena pembahasan yang selesai hanya sedikit dikarenakan terlalu banyak rumpi di antara kami peserta rapat. Hahahahah :p Terakhir, untuk info rapat selanjutnya silahkan cek web ini secara berkala. Terima Kasih.
{ "content_hash": "d5edd6621931324a9686174229e50256", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 135, "avg_line_length": 50.578947368421055, "alnum_prop": 0.7898022892819979, "repo_name": "novrian/1dekade-2005-sites", "id": "a0c1c1413293f3e4db58d52929e1ecf390d4d89b", "size": "965", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2015-07-28-hasil-rapat2-10-tahun-smansa2005.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "76894" }, { "name": "HTML", "bytes": "21940" }, { "name": "JavaScript", "bytes": "855" }, { "name": "Ruby", "bytes": "11110" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>Smart Twitter Sentiment Analysis On Tweets &amps; Hashtags</title> </head> <body> <div id="app"></div> <script src="js/app.js"></script> </body> </html>
{ "content_hash": "83517b01cab7eae3473f341d42ea9ab3", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 77, "avg_line_length": 25.333333333333332, "alnum_prop": 0.6289473684210526, "repo_name": "adeelibr/stsath", "id": "5f4433f434fb99b7ff0ea492dff71068d927c6c5", "size": "380", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/static/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4651" }, { "name": "HTML", "bytes": "380" }, { "name": "JavaScript", "bytes": "159591" } ], "symlink_target": "" }
title: acy25 type: products image: /img/Screen Shot 2017-05-09 at 11.56.54 AM.png heading: y25 description: lksadjf lkasdjf lksajdf lksdaj flksadj flksa fdj main: heading: Foo Bar BAz description: |- ***This is i a thing***kjh hjk kj # Blah Blah ## Blah![undefined](undefined) ### Baah image1: alt: kkkk ---
{ "content_hash": "3db0f965499a1d256b9a898fc6e4781f", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 61, "avg_line_length": 22.333333333333332, "alnum_prop": 0.6656716417910448, "repo_name": "pblack/kaldi-hugo-cms-template", "id": "2290ffbfba8bfcaab4e941b16e35bb18b064e1a0", "size": "339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "site/content/pages2/acy25.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "94394" }, { "name": "HTML", "bytes": "18889" }, { "name": "JavaScript", "bytes": "10014" } ], "symlink_target": "" }
#import <Foundation/Foundation.h> #if TARGET_OS_IPHONE #import <CFNetwork/CFNetwork.h> #if __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0 #import <UIKit/UIKit.h> // Necessary for background task support #endif #endif #import <stdio.h> #import "ASIHTTPRequestConfig.h" #import "ASIHTTPRequestDelegate.h" #import "ASIProgressDelegate.h" #import "ASICacheDelegate.h" @class ASIDataDecompressor; extern NSString *ASIHTTPRequestVersion; // Make targeting different platforms more reliable // See: http://www.blumtnwerx.com/blog/2009/06/cross-sdk-code-hygiene-in-xcode/ #ifndef __IPHONE_3_2 #define __IPHONE_3_2 30200 #endif #ifndef __IPHONE_4_0 #define __IPHONE_4_0 40000 #endif #ifndef __MAC_10_5 #define __MAC_10_5 1050 #endif #ifndef __MAC_10_6 #define __MAC_10_6 1060 #endif typedef enum _ASIAuthenticationState { ASINoAuthenticationNeededYet = 0, ASIHTTPAuthenticationNeeded = 1, ASIProxyAuthenticationNeeded = 2 } ASIAuthenticationState; typedef enum _ASINetworkErrorType { ASIConnectionFailureErrorType = 1, ASIRequestTimedOutErrorType = 2, ASIAuthenticationErrorType = 3, ASIRequestCancelledErrorType = 4, ASIUnableToCreateRequestErrorType = 5, ASIInternalErrorWhileBuildingRequestType = 6, ASIInternalErrorWhileApplyingCredentialsType = 7, ASIFileManagementError = 8, ASITooMuchRedirectionErrorType = 9, ASIUnhandledExceptionError = 10, ASICompressionError = 11 } ASINetworkErrorType; // The error domain that all errors generated by ASIHTTPRequest use extern NSString* const NetworkRequestErrorDomain; // You can use this number to throttle upload and download bandwidth in iPhone OS apps send or receive a large amount of data // This may help apps that might otherwise be rejected for inclusion into the app store for using excessive bandwidth // This number is not official, as far as I know there is no officially documented bandwidth limit extern unsigned long const ASIWWANBandwidthThrottleAmount; #if NS_BLOCKS_AVAILABLE typedef void (^ASIBasicBlock)(void); typedef void (^ASIHeadersBlock)(NSDictionary *responseHeaders); typedef void (^ASISizeBlock)(long long size); typedef void (^ASIProgressBlock)(unsigned long long size, unsigned long long total); typedef void (^ASIDataBlock)(NSData *data); #endif @interface ASIHTTPRequest : NSOperation <NSCopying> { // The url for this operation, should include GET params in the query string where appropriate NSURL *url; // Will always contain the original url used for making the request (the value of url can change when a request is redirected) NSURL *originalURL; // Temporarily stores the url we are about to redirect to. Will be nil again when we do redirect NSURL *redirectURL; // The delegate, you need to manage setting and talking to your delegate in your subclasses id <ASIHTTPRequestDelegate> delegate; // Another delegate that is also notified of request status changes and progress updates // Generally, you won't use this directly, but ASINetworkQueue sets itself as the queue so it can proxy updates to its own delegates // NOTE: WILL BE RETAINED BY THE REQUEST id <ASIHTTPRequestDelegate, ASIProgressDelegate> queue; // HTTP method to use (GET / POST / PUT / DELETE / HEAD). Defaults to GET NSString *requestMethod; // Request body - only used when the whole body is stored in memory (shouldStreamPostDataFromDisk is false) NSMutableData *postBody; // gzipped request body used when shouldCompressRequestBody is YES NSData *compressedPostBody; // When true, post body will be streamed from a file on disk, rather than loaded into memory at once (useful for large uploads) // Automatically set to true in ASIFormDataRequests when using setFile:forKey: BOOL shouldStreamPostDataFromDisk; // Path to file used to store post body (when shouldStreamPostDataFromDisk is true) // You can set this yourself - useful if you want to PUT a file from local disk NSString *postBodyFilePath; // Path to a temporary file used to store a deflated post body (when shouldCompressPostBody is YES) NSString *compressedPostBodyFilePath; // Set to true when ASIHTTPRequest automatically created a temporary file containing the request body (when true, the file at postBodyFilePath will be deleted at the end of the request) BOOL didCreateTemporaryPostDataFile; // Used when writing to the post body when shouldStreamPostDataFromDisk is true (via appendPostData: or appendPostDataFromFile:) NSOutputStream *postBodyWriteStream; // Used for reading from the post body when sending the request NSInputStream *postBodyReadStream; // Dictionary for custom HTTP request headers NSMutableDictionary *requestHeaders; // Set to YES when the request header dictionary has been populated, used to prevent this happening more than once BOOL haveBuiltRequestHeaders; // Will be populated with HTTP response headers from the server NSDictionary *responseHeaders; // Can be used to manually insert cookie headers to a request, but it's more likely that sessionCookies will do this for you NSMutableArray *requestCookies; // Will be populated with cookies NSArray *responseCookies; // If use useCookiePersistence is true, network requests will present valid cookies from previous requests BOOL useCookiePersistence; // If useKeychainPersistence is true, network requests will attempt to read credentials from the keychain, and will save them in the keychain when they are successfully presented BOOL useKeychainPersistence; // If useSessionPersistence is true, network requests will save credentials and reuse for the duration of the session (until clearSession is called) BOOL useSessionPersistence; // If allowCompressedResponse is true, requests will inform the server they can accept compressed data, and will automatically decompress gzipped responses. Default is true. BOOL allowCompressedResponse; // If shouldCompressRequestBody is true, the request body will be gzipped. Default is false. // You will probably need to enable this feature on your webserver to make this work. Tested with apache only. BOOL shouldCompressRequestBody; // When downloadDestinationPath is set, the result of this request will be downloaded to the file at this location // If downloadDestinationPath is not set, download data will be stored in memory NSString *downloadDestinationPath; // The location that files will be downloaded to. Once a download is complete, files will be decompressed (if necessary) and moved to downloadDestinationPath NSString *temporaryFileDownloadPath; // If the response is gzipped and shouldWaitToInflateCompressedResponses is NO, a file will be created at this path containing the inflated response as it comes in NSString *temporaryUncompressedDataDownloadPath; // Used for writing data to a file when downloadDestinationPath is set NSOutputStream *fileDownloadOutputStream; NSOutputStream *inflatedFileDownloadOutputStream; // When the request fails or completes successfully, complete will be true BOOL complete; // external "finished" indicator, subject of KVO notifications; updates after 'complete' BOOL finished; // True if our 'cancel' selector has been called BOOL cancelled; // If an error occurs, error will contain an NSError // If error code is = ASIConnectionFailureErrorType (1, Connection failure occurred) - inspect [[error userInfo] objectForKey:NSUnderlyingErrorKey] for more information NSError *error; // Username and password used for authentication NSString *username; NSString *password; // User-Agent for this request NSString *userAgent; // Domain used for NTLM authentication NSString *domain; // Username and password used for proxy authentication NSString *proxyUsername; NSString *proxyPassword; // Domain used for NTLM proxy authentication NSString *proxyDomain; // Delegate for displaying upload progress (usually an NSProgressIndicator, but you can supply a different object and handle this yourself) id <ASIProgressDelegate> uploadProgressDelegate; // Delegate for displaying download progress (usually an NSProgressIndicator, but you can supply a different object and handle this yourself) id <ASIProgressDelegate> downloadProgressDelegate; // Whether we've seen the headers of the response yet BOOL haveExaminedHeaders; // Data we receive will be stored here. Data may be compressed unless allowCompressedResponse is false - you should use [request responseData] instead in most cases NSMutableData *rawResponseData; // Used for sending and receiving data CFHTTPMessageRef request; NSInputStream *readStream; // Used for authentication CFHTTPAuthenticationRef requestAuthentication; NSDictionary *requestCredentials; // Used during NTLM authentication int authenticationRetryCount; // Authentication scheme (Basic, Digest, NTLM) NSString *authenticationScheme; // Realm for authentication when credentials are required NSString *authenticationRealm; // When YES, ASIHTTPRequest will present a dialog allowing users to enter credentials when no-matching credentials were found for a server that requires authentication // The dialog will not be shown if your delegate responds to authenticationNeededForRequest: // Default is NO. BOOL shouldPresentAuthenticationDialog; // When YES, ASIHTTPRequest will present a dialog allowing users to enter credentials when no-matching credentials were found for a proxy server that requires authentication // The dialog will not be shown if your delegate responds to proxyAuthenticationNeededForRequest: // Default is YES (basically, because most people won't want the hassle of adding support for authenticating proxies to their apps) BOOL shouldPresentProxyAuthenticationDialog; // Used for proxy authentication CFHTTPAuthenticationRef proxyAuthentication; NSDictionary *proxyCredentials; // Used during authentication with an NTLM proxy int proxyAuthenticationRetryCount; // Authentication scheme for the proxy (Basic, Digest, NTLM) NSString *proxyAuthenticationScheme; // Realm for proxy authentication when credentials are required NSString *proxyAuthenticationRealm; // HTTP status code, eg: 200 = OK, 404 = Not found etc int responseStatusCode; // Description of the HTTP status code NSString *responseStatusMessage; // Size of the response unsigned long long contentLength; // Size of the partially downloaded content unsigned long long partialDownloadSize; // Size of the POST payload unsigned long long postLength; // The total amount of downloaded data unsigned long long totalBytesRead; // The total amount of uploaded data unsigned long long totalBytesSent; // Last amount of data read (used for incrementing progress) unsigned long long lastBytesRead; // Last amount of data sent (used for incrementing progress) unsigned long long lastBytesSent; // This lock prevents the operation from being cancelled at an inopportune moment NSRecursiveLock *cancelledLock; // Called on the delegate (if implemented) when the request starts. Default is requestStarted: SEL didStartSelector; // Called on the delegate (if implemented) when the request receives response headers. Default is request:didReceiveResponseHeaders: SEL didReceiveResponseHeadersSelector; // Called on the delegate (if implemented) when the request receives a Location header and shouldRedirect is YES // The delegate can then change the url if needed, and can restart the request by calling [request resume], or simply cancel it SEL willRedirectSelector; // Called on the delegate (if implemented) when the request completes successfully. Default is requestFinished: SEL didFinishSelector; // Called on the delegate (if implemented) when the request fails. Default is requestFailed: SEL didFailSelector; // Called on the delegate (if implemented) when the request receives data. Default is request:didReceiveData: // If you set this and implement the method in your delegate, you must handle the data yourself - ASIHTTPRequest will not populate responseData or write the data to downloadDestinationPath SEL didReceiveDataSelector; // Used for recording when something last happened during the request, we will compare this value with the current date to time out requests when appropriate NSDate *lastActivityTime; // Number of seconds to wait before timing out - default is 10 NSTimeInterval timeOutSeconds; // Will be YES when a HEAD request will handle the content-length before this request starts BOOL shouldResetUploadProgress; BOOL shouldResetDownloadProgress; // Used by HEAD requests when showAccurateProgress is YES to preset the content-length for this request ASIHTTPRequest *mainRequest; // When NO, this request will only update the progress indicator when it completes // When YES, this request will update the progress indicator according to how much data it has received so far // The default for requests is YES // Also see the comments in ASINetworkQueue.h BOOL showAccurateProgress; // Used to ensure the progress indicator is only incremented once when showAccurateProgress = NO BOOL updatedProgress; // Prevents the body of the post being built more than once (largely for subclasses) BOOL haveBuiltPostBody; // Used internally, may reflect the size of the internal buffer used by CFNetwork // POST / PUT operations with body sizes greater than uploadBufferSize will not timeout unless more than uploadBufferSize bytes have been sent // Likely to be 32KB on iPhone 3.0, 128KB on Mac OS X Leopard and iPhone 2.2.x unsigned long long uploadBufferSize; // Text encoding for responses that do not send a Content-Type with a charset value. Defaults to NSISOLatin1StringEncoding NSStringEncoding defaultResponseEncoding; // The text encoding of the response, will be defaultResponseEncoding if the server didn't specify. Can't be set. NSStringEncoding responseEncoding; // Tells ASIHTTPRequest not to delete partial downloads, and allows it to use an existing file to resume a download. Defaults to NO. BOOL allowResumeForFileDownloads; // Custom user information associated with the request NSDictionary *userInfo; // Use HTTP 1.0 rather than 1.1 (defaults to false) BOOL useHTTPVersionOne; // When YES, requests will automatically redirect when they get a HTTP 30x header (defaults to YES) BOOL shouldRedirect; // Used internally to tell the main loop we need to stop and retry with a new url BOOL needsRedirect; // Incremented every time this request redirects. When it reaches 5, we give up int redirectCount; // When NO, requests will not check the secure certificate is valid (use for self-signed certificates during development, DO NOT USE IN PRODUCTION) Default is YES BOOL validatesSecureCertificate; // If not nil and the URL scheme is https, CFNetwork configured to supply a client certificate SecIdentityRef clientCertificateIdentity; NSArray *clientCertificates; // Details on the proxy to use - you could set these yourself, but it's probably best to let ASIHTTPRequest detect the system proxy settings NSString *proxyHost; int proxyPort; // ASIHTTPRequest will assume kCFProxyTypeHTTP if the proxy type could not be automatically determined // Set to kCFProxyTypeSOCKS if you are manually configuring a SOCKS proxy NSString *proxyType; // URL for a PAC (Proxy Auto Configuration) file. If you want to set this yourself, it's probably best if you use a local file NSURL *PACurl; // See ASIAuthenticationState values above. 0 == default == No authentication needed yet ASIAuthenticationState authenticationNeeded; // When YES, ASIHTTPRequests will present credentials from the session store for requests to the same server before being asked for them // This avoids an extra round trip for requests after authentication has succeeded, which is much for efficient for authenticated requests with large bodies, or on slower connections // Set to NO to only present credentials when explicitly asked for them // This only affects credentials stored in the session cache when useSessionPersistence is YES. Credentials from the keychain are never presented unless the server asks for them // Default is YES BOOL shouldPresentCredentialsBeforeChallenge; // YES when the request hasn't finished yet. Will still be YES even if the request isn't doing anything (eg it's waiting for delegate authentication). READ-ONLY BOOL inProgress; // Used internally to track whether the stream is scheduled on the run loop or not // Bandwidth throttling can unschedule the stream to slow things down while a request is in progress BOOL readStreamIsScheduled; // Set to allow a request to automatically retry itself on timeout // Default is zero - timeout will stop the request int numberOfTimesToRetryOnTimeout; // The number of times this request has retried (when numberOfTimesToRetryOnTimeout > 0) int retryCount; // When YES, requests will keep the connection to the server alive for a while to allow subsequent requests to re-use it for a substantial speed-boost // Persistent connections will not be used if the server explicitly closes the connection // Default is YES BOOL shouldAttemptPersistentConnection; // Number of seconds to keep an inactive persistent connection open on the client side // Default is 60 // If we get a keep-alive header, this is this value is replaced with how long the server told us to keep the connection around // A future date is created from this and used for expiring the connection, this is stored in connectionInfo's expires value NSTimeInterval persistentConnectionTimeoutSeconds; // Set to yes when an appropriate keep-alive header is found BOOL connectionCanBeReused; // Stores information about the persistent connection that is currently in use. // It may contain: // * The id we set for a particular connection, incremented every time we want to specify that we need a new connection // * The date that connection should expire // * A host, port and scheme for the connection. These are used to determine whether that connection can be reused by a subsequent request (all must match the new request) // * An id for the request that is currently using the connection. This is used for determining if a connection is available or not (we store a number rather than a reference to the request so we don't need to hang onto a request until the connection expires) // * A reference to the stream that is currently using the connection. This is necessary because we need to keep the old stream open until we've opened a new one. // The stream will be closed + released either when another request comes to use the connection, or when the timer fires to tell the connection to expire NSMutableDictionary *connectionInfo; // When set to YES, 301 and 302 automatic redirects will use the original method and and body, according to the HTTP 1.1 standard // Default is NO (to follow the behaviour of most browsers) BOOL shouldUseRFC2616RedirectBehaviour; // Used internally to record when a request has finished downloading data BOOL downloadComplete; // An ID that uniquely identifies this request - primarily used for debugging persistent connections NSNumber *requestID; // Will be ASIHTTPRequestRunLoopMode for synchronous requests, NSDefaultRunLoopMode for all other requests NSString *runLoopMode; // This timer checks up on the request every 0.25 seconds, and updates progress NSTimer *statusTimer; // The download cache that will be used for this request (use [ASIHTTPRequest setDefaultCache:cache] to configure a default cache id <ASICacheDelegate> downloadCache; // The cache policy that will be used for this request - See ASICacheDelegate.h for possible values ASICachePolicy cachePolicy; // The cache storage policy that will be used for this request - See ASICacheDelegate.h for possible values ASICacheStoragePolicy cacheStoragePolicy; // Will be true when the response was pulled from the cache rather than downloaded BOOL didUseCachedResponse; // Set secondsToCache to use a custom time interval for expiring the response when it is stored in a cache NSTimeInterval secondsToCache; #if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0 BOOL shouldContinueWhenAppEntersBackground; UIBackgroundTaskIdentifier backgroundTask; #endif // When downloading a gzipped response, the request will use this helper object to inflate the response ASIDataDecompressor *dataDecompressor; // Controls how responses with a gzipped encoding are inflated (decompressed) // When set to YES (This is the default): // * gzipped responses for requests without a downloadDestinationPath will be inflated only when [request responseData] / [request responseString] is called // * gzipped responses for requests with a downloadDestinationPath set will be inflated only when the request completes // // When set to NO // All requests will inflate the response as it comes in // * If the request has no downloadDestinationPath set, the raw (compressed) response is discarded and rawResponseData will contain the decompressed response // * If the request has a downloadDestinationPath, the raw response will be stored in temporaryFileDownloadPath as normal, the inflated response will be stored in temporaryUncompressedDataDownloadPath // Once the request completes successfully, the contents of temporaryUncompressedDataDownloadPath are moved into downloadDestinationPath // // Setting this to NO may be especially useful for users using ASIHTTPRequest in conjunction with a streaming parser, as it will allow partial gzipped responses to be inflated and passed on to the parser while the request is still running BOOL shouldWaitToInflateCompressedResponses; // Will be YES if this is a request created behind the scenes to download a PAC file - these requests do not attempt to configure their own proxies BOOL isPACFileRequest; // Used for downloading PAC files from http / https webservers ASIHTTPRequest *PACFileRequest; // Used for asynchronously reading PAC files from file:// URLs NSInputStream *PACFileReadStream; // Used for storing PAC data from file URLs as it is downloaded NSMutableData *PACFileData; // Set to YES in startSynchronous. Currently used by proxy detection to download PAC files synchronously when appropriate BOOL isSynchronous; #if NS_BLOCKS_AVAILABLE //block to execute when request starts ASIBasicBlock startedBlock; //block to execute when headers are received ASIHeadersBlock headersReceivedBlock; //block to execute when request completes successfully ASIBasicBlock completionBlock; //block to execute when request fails ASIBasicBlock failureBlock; //block for when bytes are received ASIProgressBlock bytesReceivedBlock; //block for when bytes are sent ASIProgressBlock bytesSentBlock; //block for when download size is incremented ASISizeBlock downloadSizeIncrementedBlock; //block for when upload size is incremented ASISizeBlock uploadSizeIncrementedBlock; //block for handling raw bytes received ASIDataBlock dataReceivedBlock; //block for handling authentication ASIBasicBlock authenticationNeededBlock; //block for handling proxy authentication ASIBasicBlock proxyAuthenticationNeededBlock; //block for handling redirections, if you want to ASIBasicBlock requestRedirectedBlock; #endif } #pragma mark init / dealloc // Should be an HTTP or HTTPS url, may include username and password if appropriate - (id)initWithURL:(NSURL *)newURL; // Convenience constructor + (id)requestWithURL:(NSURL *)newURL; + (id)requestWithURL:(NSURL *)newURL usingCache:(id <ASICacheDelegate>)cache; + (id)requestWithURL:(NSURL *)newURL usingCache:(id <ASICacheDelegate>)cache andCachePolicy:(ASICachePolicy)policy; #if NS_BLOCKS_AVAILABLE - (void)setStartedBlock:(ASIBasicBlock)aStartedBlock; - (void)setHeadersReceivedBlock:(ASIHeadersBlock)aReceivedBlock; - (void)setCompletionBlock:(ASIBasicBlock)aCompletionBlock; - (void)setFailedBlock:(ASIBasicBlock)aFailedBlock; - (void)setBytesReceivedBlock:(ASIProgressBlock)aBytesReceivedBlock; - (void)setBytesSentBlock:(ASIProgressBlock)aBytesSentBlock; - (void)setDownloadSizeIncrementedBlock:(ASISizeBlock) aDownloadSizeIncrementedBlock; - (void)setUploadSizeIncrementedBlock:(ASISizeBlock) anUploadSizeIncrementedBlock; - (void)setDataReceivedBlock:(ASIDataBlock)aReceivedBlock; - (void)setAuthenticationNeededBlock:(ASIBasicBlock)anAuthenticationBlock; - (void)setProxyAuthenticationNeededBlock:(ASIBasicBlock)aProxyAuthenticationBlock; - (void)setRequestRedirectedBlock:(ASIBasicBlock)aRedirectBlock; #endif #pragma mark setup request // Add a custom header to the request - (void)addRequestHeader:(NSString *)header value:(NSString *)value; // Called during buildRequestHeaders and after a redirect to create a cookie header from request cookies and the global store - (void)applyCookieHeader; // Populate the request headers dictionary. Called before a request is started, or by a HEAD request that needs to borrow them - (void)buildRequestHeaders; // Used to apply authorization header to a request before it is sent (when shouldPresentCredentialsBeforeChallenge is YES) - (void)applyAuthorizationHeader; // Create the post body - (void)buildPostBody; // Called to add data to the post body. Will append to postBody when shouldStreamPostDataFromDisk is false, or write to postBodyWriteStream when true - (void)appendPostData:(NSData *)data; - (void)appendPostDataFromFile:(NSString *)file; #pragma mark get information about this request // Returns the contents of the result as an NSString (not appropriate for binary data - used responseData instead) - (NSString *)responseString; // Response data, automatically uncompressed where appropriate - (NSData *)responseData; // Returns true if the response was gzip compressed - (BOOL)isResponseCompressed; #pragma mark running a request // Run a request synchronously, and return control when the request completes or fails - (void)startSynchronous; // Run request in the background - (void)startAsynchronous; // Clears all delegates and blocks, then cancels the request - (void)clearDelegatesAndCancel; #pragma mark HEAD request // Used by ASINetworkQueue to create a HEAD request appropriate for this request with the same headers (though you can use it yourself) - (ASIHTTPRequest *)HEADRequest; #pragma mark upload/download progress // Called approximately every 0.25 seconds to update the progress delegates - (void)updateProgressIndicators; // Updates upload progress (notifies the queue and/or uploadProgressDelegate of this request) - (void)updateUploadProgress; // Updates download progress (notifies the queue and/or uploadProgressDelegate of this request) - (void)updateDownloadProgress; // Called when authorisation is needed, as we only find out we don't have permission to something when the upload is complete - (void)removeUploadProgressSoFar; // Called when we get a content-length header and shouldResetDownloadProgress is true - (void)incrementDownloadSizeBy:(long long)length; // Called when a request starts and shouldResetUploadProgress is true // Also called (with a negative length) to remove the size of the underlying buffer used for uploading - (void)incrementUploadSizeBy:(long long)length; // Helper method for interacting with progress indicators to abstract the details of different APIS (NSProgressIndicator and UIProgressView) + (void)updateProgressIndicator:(id *)indicator withProgress:(unsigned long long)progress ofTotal:(unsigned long long)total; // Helper method used for performing invocations on the main thread (used for progress) + (void)performSelector:(SEL)selector onTarget:(id *)target withObject:(id)object amount:(void *)amount callerToRetain:(id)caller; #pragma mark talking to delegates // Called when a request starts, lets the delegate know via didStartSelector - (void)requestStarted; // Called when a request receives response headers, lets the delegate know via didReceiveResponseHeadersSelector - (void)requestReceivedResponseHeaders:(NSDictionary *)newHeaders; // Called when a request completes successfully, lets the delegate know via didFinishSelector - (void)requestFinished; // Called when a request fails, and lets the delegate know via didFailSelector - (void)failWithError:(NSError *)theError; // Called to retry our request when our persistent connection is closed // Returns YES if we haven't already retried, and connection will be restarted // Otherwise, returns NO, and nothing will happen - (BOOL)retryUsingNewConnection; // Can be called by delegates from inside their willRedirectSelector implementations to restart the request with a new url - (void)redirectToURL:(NSURL *)newURL; #pragma mark parsing HTTP response headers // Reads the response headers to find the content length, encoding, cookies for the session // Also initiates request redirection when shouldRedirect is true // And works out if HTTP auth is required - (void)readResponseHeaders; // Attempts to set the correct encoding by looking at the Content-Type header, if this is one - (void)parseStringEncodingFromHeaders; + (void)parseMimeType:(NSString **)mimeType andResponseEncoding:(NSStringEncoding *)stringEncoding fromContentType:(NSString *)contentType; #pragma mark http authentication stuff // Apply credentials to this request - (BOOL)applyCredentials:(NSDictionary *)newCredentials; - (BOOL)applyProxyCredentials:(NSDictionary *)newCredentials; // Attempt to obtain credentials for this request from the URL, username and password or keychain - (NSMutableDictionary *)findCredentials; - (NSMutableDictionary *)findProxyCredentials; // Unlock (unpause) the request thread so it can resume the request // Should be called by delegates when they have populated the authentication information after an authentication challenge - (void)retryUsingSuppliedCredentials; // Should be called by delegates when they wish to cancel authentication and stop - (void)cancelAuthentication; // Apply authentication information and resume the request after an authentication challenge - (void)attemptToApplyCredentialsAndResume; - (void)attemptToApplyProxyCredentialsAndResume; // Attempt to show the built-in authentication dialog, returns YES if credentials were supplied, NO if user cancelled dialog / dialog is disabled / running on main thread // Currently only used on iPhone OS - (BOOL)showProxyAuthenticationDialog; - (BOOL)showAuthenticationDialog; // Construct a basic authentication header from the username and password supplied, and add it to the request headers // Used when shouldPresentCredentialsBeforeChallenge is YES - (void)addBasicAuthenticationHeaderWithUsername:(NSString *)theUsername andPassword:(NSString *)thePassword; #pragma mark stream status handlers // CFnetwork event handlers - (void)handleNetworkEvent:(CFStreamEventType)type; - (void)handleBytesAvailable; - (void)handleStreamComplete; - (void)handleStreamError; #pragma mark cleanup // Cleans up and lets the queue know this operation is finished. // Appears in this header for subclassing only, do not call this method from outside your request! - (void)markAsFinished; // Cleans up temporary files. There's normally no reason to call these yourself, they are called automatically when a request completes or fails // Clean up the temporary file used to store the downloaded data when it comes in (if downloadDestinationPath is set) - (BOOL)removeTemporaryDownloadFile; // Clean up the temporary file used to store data that is inflated (decompressed) as it comes in - (BOOL)removeTemporaryUncompressedDownloadFile; // Clean up the temporary file used to store the request body (when shouldStreamPostDataFromDisk is YES) - (BOOL)removeTemporaryUploadFile; // Clean up the temporary file used to store a deflated (compressed) request body when shouldStreamPostDataFromDisk is YES - (BOOL)removeTemporaryCompressedUploadFile; // Remove a file on disk, returning NO and populating the passed error pointer if it fails + (BOOL)removeFileAtPath:(NSString *)path error:(NSError **)err; #pragma mark persistent connections // Get the ID of the connection this request used (only really useful in tests and debugging) - (NSNumber *)connectionID; // Called automatically when a request is started to clean up any persistent connections that have expired + (void)expirePersistentConnections; #pragma mark default time out + (NSTimeInterval)defaultTimeOutSeconds; + (void)setDefaultTimeOutSeconds:(NSTimeInterval)newTimeOutSeconds; #pragma mark client certificate - (void)setClientCertificateIdentity:(SecIdentityRef)anIdentity; #pragma mark session credentials + (NSMutableArray *)sessionProxyCredentialsStore; + (NSMutableArray *)sessionCredentialsStore; + (void)storeProxyAuthenticationCredentialsInSessionStore:(NSDictionary *)credentials; + (void)storeAuthenticationCredentialsInSessionStore:(NSDictionary *)credentials; + (void)removeProxyAuthenticationCredentialsFromSessionStore:(NSDictionary *)credentials; + (void)removeAuthenticationCredentialsFromSessionStore:(NSDictionary *)credentials; - (NSDictionary *)findSessionProxyAuthenticationCredentials; - (NSDictionary *)findSessionAuthenticationCredentials; #pragma mark keychain storage // Save credentials for this request to the keychain - (void)saveCredentialsToKeychain:(NSDictionary *)newCredentials; // Save credentials to the keychain + (void)saveCredentials:(NSURLCredential *)credentials forHost:(NSString *)host port:(int)port protocol:(NSString *)protocol realm:(NSString *)realm; + (void)saveCredentials:(NSURLCredential *)credentials forProxy:(NSString *)host port:(int)port realm:(NSString *)realm; // Return credentials from the keychain + (NSURLCredential *)savedCredentialsForHost:(NSString *)host port:(int)port protocol:(NSString *)protocol realm:(NSString *)realm; + (NSURLCredential *)savedCredentialsForProxy:(NSString *)host port:(int)port protocol:(NSString *)protocol realm:(NSString *)realm; // Remove credentials from the keychain + (void)removeCredentialsForHost:(NSString *)host port:(int)port protocol:(NSString *)protocol realm:(NSString *)realm; + (void)removeCredentialsForProxy:(NSString *)host port:(int)port realm:(NSString *)realm; // We keep track of any cookies we accept, so that we can remove them from the persistent store later + (void)setSessionCookies:(NSMutableArray *)newSessionCookies; + (NSMutableArray *)sessionCookies; // Adds a cookie to our list of cookies we've accepted, checking first for an old version of the same cookie and removing that + (void)addSessionCookie:(NSHTTPCookie *)newCookie; // Dump all session data (authentication and cookies) + (void)clearSession; #pragma mark get user agent // Will be used as a user agent if requests do not specify a custom user agent // Is only used when you have specified a Bundle Display Name (CFDisplayBundleName) or Bundle Name (CFBundleName) in your plist + (NSString *)defaultUserAgentString; + (void)setDefaultUserAgentString:(NSString *)agent; #pragma mark mime-type detection // Return the mime type for a file + (NSString *)mimeTypeForFileAtPath:(NSString *)path; #pragma mark bandwidth measurement / throttling // The maximum number of bytes ALL requests can send / receive in a second // This is a rough figure. The actual amount used will be slightly more, this does not include HTTP headers + (unsigned long)maxBandwidthPerSecond; + (void)setMaxBandwidthPerSecond:(unsigned long)bytes; // Get a rough average (for the last 5 seconds) of how much bandwidth is being used, in bytes + (unsigned long)averageBandwidthUsedPerSecond; - (void)performThrottling; // Will return YES is bandwidth throttling is currently in use + (BOOL)isBandwidthThrottled; // Used internally to record bandwidth use, and by ASIInputStreams when uploading. It's probably best if you don't mess with this. + (void)incrementBandwidthUsedInLastSecond:(unsigned long)bytes; // On iPhone, ASIHTTPRequest can automatically turn throttling on and off as the connection type changes between WWAN and WiFi #if TARGET_OS_IPHONE // Set to YES to automatically turn on throttling when WWAN is connected, and automatically turn it off when it isn't + (void)setShouldThrottleBandwidthForWWAN:(BOOL)throttle; // Turns on throttling automatically when WWAN is connected using a custom limit, and turns it off automatically when it isn't + (void)throttleBandwidthForWWANUsingLimit:(unsigned long)limit; #pragma mark reachability // Returns YES when an iPhone OS device is connected via WWAN, false when connected via WIFI or not connected + (BOOL)isNetworkReachableViaWWAN; #endif #pragma mark queue // Returns the shared queue + (NSOperationQueue *)sharedQueue; #pragma mark cache + (void)setDefaultCache:(id <ASICacheDelegate>)cache; + (id <ASICacheDelegate>)defaultCache; // Returns the maximum amount of data we can read as part of the current measurement period, and sleeps this thread if our allowance is used up + (unsigned long)maxUploadReadLength; #pragma mark network activity + (BOOL)isNetworkInUse; + (void)setShouldUpdateNetworkActivityIndicator:(BOOL)shouldUpdate; // Shows the network activity spinner thing on iOS. You may wish to override this to do something else in Mac projects + (void)showNetworkActivityIndicator; // Hides the network activity spinner thing on iOS + (void)hideNetworkActivityIndicator; #pragma mark miscellany // Used for generating Authorization header when using basic authentication when shouldPresentCredentialsBeforeChallenge is true // And also by ASIS3Request + (NSString *)base64forData:(NSData *)theData; // Returns a date from a string in RFC1123 format + (NSDate *)dateFromRFC1123String:(NSString *)string; // Used for detecting multitasking support at runtime (for backgrounding requests) #if TARGET_OS_IPHONE + (BOOL)isMultitaskingSupported; #endif #pragma mark threading behaviour // In the default implementation, all requests run in a single background thread // Advanced users only: Override this method in a subclass for a different threading behaviour // Eg: return [NSThread mainThread] to run all requests in the main thread // Alternatively, you can create a thread on demand, or manage a pool of threads // Threads returned by this method will need to run the runloop in default mode (eg CFRunLoopRun()) // Requests will stop the runloop when they complete // If you have multiple requests sharing the thread you'll need to restart the runloop when this happens + (NSThread *)threadForRequest:(ASIHTTPRequest *)request; #pragma mark === @property (retain) NSString *username; @property (retain) NSString *password; @property (retain) NSString *userAgent; @property (retain) NSString *domain; @property (retain) NSString *proxyUsername; @property (retain) NSString *proxyPassword; @property (retain) NSString *proxyDomain; @property (retain) NSString *proxyHost; @property (assign) int proxyPort; @property (retain) NSString *proxyType; @property (retain,setter=setURL:, nonatomic) NSURL *url; @property (retain) NSURL *originalURL; @property (assign, nonatomic) id delegate; @property (retain, nonatomic) id queue; @property (assign, nonatomic) id uploadProgressDelegate; @property (assign, nonatomic) id downloadProgressDelegate; @property (assign) BOOL useKeychainPersistence; @property (assign) BOOL useSessionPersistence; @property (retain) NSString *downloadDestinationPath; @property (retain) NSString *temporaryFileDownloadPath; @property (retain) NSString *temporaryUncompressedDataDownloadPath; @property (assign) SEL didStartSelector; @property (assign) SEL didReceiveResponseHeadersSelector; @property (assign) SEL willRedirectSelector; @property (assign) SEL didFinishSelector; @property (assign) SEL didFailSelector; @property (assign) SEL didReceiveDataSelector; @property (retain,readonly) NSString *authenticationRealm; @property (retain,readonly) NSString *proxyAuthenticationRealm; @property (retain) NSError *error; @property (assign,readonly) BOOL complete; @property (retain) NSDictionary *responseHeaders; @property (retain) NSMutableDictionary *requestHeaders; @property (retain) NSMutableArray *requestCookies; @property (retain,readonly) NSArray *responseCookies; @property (assign) BOOL useCookiePersistence; @property (retain) NSDictionary *requestCredentials; @property (retain) NSDictionary *proxyCredentials; @property (assign,readonly) int responseStatusCode; @property (retain,readonly) NSString *responseStatusMessage; @property (retain) NSMutableData *rawResponseData; @property (assign) NSTimeInterval timeOutSeconds; @property (retain) NSString *requestMethod; @property (retain) NSMutableData *postBody; @property (assign) unsigned long long contentLength; @property (assign) unsigned long long postLength; @property (assign) BOOL shouldResetDownloadProgress; @property (assign) BOOL shouldResetUploadProgress; @property (assign) ASIHTTPRequest *mainRequest; @property (assign) BOOL showAccurateProgress; @property (assign) unsigned long long totalBytesRead; @property (assign) unsigned long long totalBytesSent; @property (assign) NSStringEncoding defaultResponseEncoding; @property (assign) NSStringEncoding responseEncoding; @property (assign) BOOL allowCompressedResponse; @property (assign) BOOL allowResumeForFileDownloads; @property (retain) NSDictionary *userInfo; @property (retain) NSString *postBodyFilePath; @property (assign) BOOL shouldStreamPostDataFromDisk; @property (assign) BOOL didCreateTemporaryPostDataFile; @property (assign) BOOL useHTTPVersionOne; @property (assign, readonly) unsigned long long partialDownloadSize; @property (assign) BOOL shouldRedirect; @property (assign) BOOL validatesSecureCertificate; @property (assign) BOOL shouldCompressRequestBody; @property (retain) NSURL *PACurl; @property (retain) NSString *authenticationScheme; @property (retain) NSString *proxyAuthenticationScheme; @property (assign) BOOL shouldPresentAuthenticationDialog; @property (assign) BOOL shouldPresentProxyAuthenticationDialog; @property (assign, readonly) ASIAuthenticationState authenticationNeeded; @property (assign) BOOL shouldPresentCredentialsBeforeChallenge; @property (assign, readonly) int authenticationRetryCount; @property (assign, readonly) int proxyAuthenticationRetryCount; @property (assign) BOOL haveBuiltRequestHeaders; @property (assign, nonatomic) BOOL haveBuiltPostBody; @property (assign, readonly) BOOL inProgress; @property (assign) int numberOfTimesToRetryOnTimeout; @property (assign, readonly) int retryCount; @property (assign) BOOL shouldAttemptPersistentConnection; @property (assign) NSTimeInterval persistentConnectionTimeoutSeconds; @property (assign) BOOL shouldUseRFC2616RedirectBehaviour; @property (assign, readonly) BOOL connectionCanBeReused; @property (retain, readonly) NSNumber *requestID; @property (assign) id <ASICacheDelegate> downloadCache; @property (assign) ASICachePolicy cachePolicy; @property (assign) ASICacheStoragePolicy cacheStoragePolicy; @property (assign, readonly) BOOL didUseCachedResponse; @property (assign) NSTimeInterval secondsToCache; @property (retain) NSArray *clientCertificates; #if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0 @property (assign) BOOL shouldContinueWhenAppEntersBackground; #endif @property (retain) ASIDataDecompressor *dataDecompressor; @property (assign) BOOL shouldWaitToInflateCompressedResponses; @end
{ "content_hash": "bb5baba429be6e18354b41025a360c01", "timestamp": "", "source": "github", "line_count": 983, "max_line_length": 260, "avg_line_length": 45.408952187182095, "alnum_prop": 0.7797567040795752, "repo_name": "ludaye123/SangforSDK", "id": "55be2659ef330e76a3ee4cbb4735c6cdfbb49257", "size": "45019", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SdkDemo/SdkDemo/Other Sources/ASIHTTPRequest/ASIHTTPRequest.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "18273" }, { "name": "Objective-C", "bytes": "1026022" }, { "name": "Ruby", "bytes": "5630" }, { "name": "Shell", "bytes": "8598" } ], "symlink_target": "" }
using System.Windows.Input; using Sketching.Tool; using Xamarin.Forms; namespace Sketching.Views { public partial class SketchToolbarItem : ContentView { public static readonly BindableProperty SelectionColorProperty = BindableProperty.Create(nameof(SelectionColor), typeof(Color), typeof(SketchToolbarItem), Color.Orange); public Color SelectionColor { get { return (Color)GetValue(SelectionColorProperty); } set { SetValue(SelectionColorProperty, value); } } public static readonly BindableProperty IsSelectedProperty = BindableProperty.Create(nameof(IsSelected), typeof(bool), typeof(SketchToolbarItem), false, propertyChanged: IsSelectedPropertyChanged); public bool IsSelected { get { return (bool)GetValue(IsSelectedProperty); } set { SetValue(IsSelectedProperty, value); } } private static void IsSelectedPropertyChanged(BindableObject bindable, object oldValue, object newValue) { if ((bool)newValue) { ((SketchToolbarItem)bindable).toolLine.BackgroundColor = ((SketchToolbarItem)bindable).SelectionColor; } else { ((SketchToolbarItem)bindable).toolLine.BackgroundColor = Color.Transparent; } } public ITool Tool { get; set; } public SketchToolbarItem(ImageSource imageSource, ITool tool, ICommand tappedCommand) { InitializeComponent(); toolImage.Source = imageSource; toolImage.GestureRecognizers.Add(new TapGestureRecognizer { Command = tappedCommand, CommandParameter = tool }); Tool = tool; } } }
{ "content_hash": "d7182f8925be17015beb1808bbd468cd", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 199, "avg_line_length": 30.686274509803923, "alnum_prop": 0.731629392971246, "repo_name": "xeterixon/Sketching", "id": "ddcb06e29de6beb0cc0b2e4c52f2cf92ee6d0706", "size": "1567", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SketchUpp/Sketching/Sketching.Common/Views/SketchToolbarItem.xaml.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "138377" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("ClientExample")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("ClientExample")] [assembly: AssemblyCopyright("Copyright © 2017")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("a1f8825e-9f68-45f7-80be-c4f07edd47e5")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "9b83390c75df85c8d06745c9106e1b02", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 84, "avg_line_length": 38.861111111111114, "alnum_prop": 0.7455325232308792, "repo_name": "Mobicents/restcomm-sdk-dotnet", "id": "e60f7cd98e729f14221bba7a6a8569ad79f1c07a", "size": "1402", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Examples/ClientExample/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
NS_ASSUME_NONNULL_BEGIN @protocol ABI43_0_0EXUserNotificationCenterService - (void)requestAuthorizationWithOptions:(UNAuthorizationOptions)options completionHandler:(void (^)(BOOL granted, NSError *__nullable error))completionHandler; - (void)setNotificationCategories:(NSSet<UNNotificationCategory *> *)categories; - (void)getNotificationCategoriesWithCompletionHandler:(void(^)(NSSet<UNNotificationCategory *> *categories))completionHandler; - (void)getNotificationSettingsWithCompletionHandler:(void(^)(UNNotificationSettings *settings))completionHandler; - (void)addNotificationRequest:(UNNotificationRequest *)request withCompletionHandler:(nullable void(^)(NSError *__nullable error))completionHandler; - (void)getPendingNotificationRequestsWithCompletionHandler:(void(^)(NSArray<UNNotificationRequest *> *requests))completionHandler; - (void)removePendingNotificationRequestsWithIdentifiers:(NSArray<NSString *> *)identifiers; - (void)removeAllPendingNotificationRequests; @end @protocol ABI43_0_0EXNotificationsScopedModuleDelegate - (void)getApnsTokenForScopedModule:(id)scopedModule completionHandler:(void (^)(NSString * _Nullable, NSError * _Nullable))handler; - (void)getExpoPushTokenForScopedModule:(id)scopedModule completionHandler:(void (^)(NSString * _Nullable pushToken, NSError * _Nullable error))handler; @end @protocol ABI43_0_0EXNotificationsIdentifiersManager - (NSString *)internalIdForIdentifier:(NSString *)identifier scopeKey:(NSString *)scopeKey; - (NSString *)exportedIdForInternalIdentifier:(NSString *)identifier; @end @interface ABI43_0_0EXNotifications : ABI43_0_0EXScopedBridgeModule @end NS_ASSUME_NONNULL_END
{ "content_hash": "62bf5957903f4b2520b776d3893599b4", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 159, "avg_line_length": 42.55, "alnum_prop": 0.8037602820211516, "repo_name": "exponentjs/exponent", "id": "ef178a020518682c49c9ae727b9a08c67e302891", "size": "1901", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "ios/versioned-react-native/ABI43_0_0/Expo/ExpoKit/Core/Api/ABI43_0_0EXNotifications.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "96902" }, { "name": "Batchfile", "bytes": "382" }, { "name": "C", "bytes": "896724" }, { "name": "C++", "bytes": "867983" }, { "name": "CSS", "bytes": "6732" }, { "name": "HTML", "bytes": "152590" }, { "name": "IDL", "bytes": "897" }, { "name": "Java", "bytes": "4588748" }, { "name": "JavaScript", "bytes": "9343259" }, { "name": "Makefile", "bytes": "8790" }, { "name": "Objective-C", "bytes": "10675806" }, { "name": "Objective-C++", "bytes": "364286" }, { "name": "Perl", "bytes": "5860" }, { "name": "Prolog", "bytes": "287" }, { "name": "Python", "bytes": "97564" }, { "name": "Ruby", "bytes": "45432" }, { "name": "Shell", "bytes": "6501" } ], "symlink_target": "" }
 using SolutionZ.StandAloneWidget; namespace StandAloneWidget.Models.Search { public class AirportDistanceModel { public Airport Airport { get; set; } public double DistanceToCity { get; set; } } }
{ "content_hash": "23fdd86c76b32552da86007046ccbd7d", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 50, "avg_line_length": 22.7, "alnum_prop": 0.6872246696035242, "repo_name": "DanielCruz4th/StandAloneWidgetZ", "id": "0e4337e5a27ff1ee4b0446a49c5040005e81a68c", "size": "229", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "StandAloneWidget/Models/Search/AirportDistanceModel.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "107" }, { "name": "C#", "bytes": "325870" }, { "name": "CSS", "bytes": "50" } ], "symlink_target": "" }
class Solution(object): def isSameTree(self, p, q): """ :type p: TreeNode :type q: TreeNode :rtype: bool """ # if node p and q dont exist then True if not p and not q: return True # if one of them doesnt exist return false if not p or not q: return False # else it has the node there so check if the nodes are same # check isSame left subtree and check if same right subtree return p.val == q.val and self.isSameTree(p.left, q.left) and self.isSameTree(p.right, q.right)
{ "content_hash": "cb279c3d81c5da490cc7f59e4da62817", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 103, "avg_line_length": 37.1875, "alnum_prop": 0.5764705882352941, "repo_name": "young-geng/leet_code", "id": "d270efffeafc67ad7ee43c3b9a086b40372795ae", "size": "810", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "problems/100_same-tree/main.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "599" }, { "name": "Python", "bytes": "111519" } ], "symlink_target": "" }
#ifndef __UX_DARWIN_FCNTL_H # define __UX_DARWIN_FCNTL_H 1 /* fcntl() cmd values */ /* fcntl() file descriptor flags */ /* fcntl() locking types */ /* open flags */ /* create file if it does not exist */ # define O_CREAT 0x000200 /* exclusive use flag */ # define O_EXCL 0x000800 /* do not assign controlling terminal */ # define O_NOCTTY 0x020000 /* truncate flag */ # define O_TRUNC 0x000400 /* set append mode */ # define O_APPEND 0x000008 # if __UX_OPT_SIO >= 0 /* write according to synchronized I/O data integrity completion */ # define O_DSYNC 0x400000 # endif /* non-blocking mode */ # define O_NONBLOCK 0x000004 # if __UX_OPT_SIO >= 0 /* synchronised read I/O operations */ # define O_RSYNC 0x000000 /* file status flags */ # endif /* write according to synchronized I/O file integrity completion */ # define O_SYNC 0x000080 /* access modes mask */ # define O_ACCMODE 0x000003 /* access modes */ /* open for reading */ # define O_RDONLY 0x000000 /* open for writing */ # define O_WRONLY 0x000001 /* open for reading and writing */ # define O_RDWR 0x000002 #endif /*__UX_DARWIN_FCNTL_H*/
{ "content_hash": "c05244bcfb6aa61f2f30497285cb70da", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 67, "avg_line_length": 27.41176470588235, "alnum_prop": 0.5443490701001431, "repo_name": "coreux/libunistd", "id": "b39a45eb88cbdddf707f172c417d8a01118f177d", "size": "2003", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ux/darwin/fcntl.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "32618" }, { "name": "C", "bytes": "35734" }, { "name": "Objective-C", "bytes": "5139" }, { "name": "Perl", "bytes": "4382" }, { "name": "Shell", "bytes": "1030" } ], "symlink_target": "" }
package com.example.zcp.recyclerviewdemo; /** * Created by 赵晨璞 on 2016/6/16. */ public class Meizi { public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } private String url; public int getPage() { return page; } public void setPage(int page) { this.page = page; } private int page; }
{ "content_hash": "e18339d31a9c33e2c0c252e720166907", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 41, "avg_line_length": 15.423076923076923, "alnum_prop": 0.5660847880299252, "repo_name": "weiwenqiang/GitHub", "id": "9340704ab97bc50c21c747b35f0aca5543244651", "size": "407", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ListView/RecyclerViewDemoB-master/app/src/main/java/com/example/zcp/recyclerviewdemo/Meizi.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "87" }, { "name": "C", "bytes": "42062" }, { "name": "C++", "bytes": "12137" }, { "name": "CMake", "bytes": "202" }, { "name": "CSS", "bytes": "75087" }, { "name": "Clojure", "bytes": "12036" }, { "name": "FreeMarker", "bytes": "21704" }, { "name": "Groovy", "bytes": "55083" }, { "name": "HTML", "bytes": "61549" }, { "name": "Java", "bytes": "42222825" }, { "name": "JavaScript", "bytes": "216823" }, { "name": "Kotlin", "bytes": "24319" }, { "name": "Makefile", "bytes": "19490" }, { "name": "Perl", "bytes": "280" }, { "name": "Prolog", "bytes": "1030" }, { "name": "Python", "bytes": "13032" }, { "name": "Scala", "bytes": "310450" }, { "name": "Shell", "bytes": "27802" } ], "symlink_target": "" }
package org.jboss.qa.brms.performance.localsearch.tsp; import java.util.Collections; import org.jboss.qa.brms.performance.examples.Examples; import org.jboss.qa.brms.performance.examples.tsp.TravelingSalesmanExample; import org.jboss.qa.brms.performance.examples.tsp.solution.TSPSolutionInitializer; import org.jboss.qa.brms.performance.localsearch.AbstractLocalSearchPlannerBenchmark; import org.openjdk.jmh.annotations.Param; import org.optaplanner.core.api.solver.Solver; import org.optaplanner.core.api.solver.SolverFactory; import org.optaplanner.core.config.heuristic.selector.move.composite.UnionMoveSelectorConfig; import org.optaplanner.core.config.localsearch.LocalSearchPhaseConfig; import org.optaplanner.core.config.localsearch.decider.forager.LocalSearchForagerConfig; import org.optaplanner.core.config.phase.custom.CustomPhaseConfig; import org.optaplanner.core.config.solver.SolverConfig; import org.optaplanner.examples.tsp.domain.TspSolution; public abstract class AbstractTSPLocalSearchBenchmark extends AbstractLocalSearchPlannerBenchmark<TspSolution> { @Param({"LU_980", "USA_CA_2716", "GREECE_9882"}) private TravelingSalesmanExample.DataSet dataset; @Override protected TspSolution createInitialSolution() { CustomPhaseConfig customPhaseConfig = new CustomPhaseConfig(); customPhaseConfig.setCustomPhaseCommandClassList(Collections.singletonList(TSPSolutionInitializer.class)); SolverConfig solverConfig = Examples.TRAVELING_SALESMAN.getBaseSolverConfig(); solverConfig.setPhaseConfigList(Collections.singletonList(customPhaseConfig)); SolverFactory<TspSolution> solverFactory = SolverFactory.create(solverConfig); Solver<TspSolution> constructionSolver = solverFactory.buildSolver(); TspSolution solution = Examples.TRAVELING_SALESMAN.loadSolvingProblem(dataset); constructionSolver.solve(solution); return constructionSolver.getBestSolution(); } @Override protected Solver<TspSolution> createSolver() { LocalSearchPhaseConfig localSearchPhaseConfig = new LocalSearchPhaseConfig(); localSearchPhaseConfig.setMoveSelectorConfig(new UnionMoveSelectorConfig()); ((UnionMoveSelectorConfig) localSearchPhaseConfig.getMoveSelectorConfig()) .setMoveSelectorConfigList(createMoveSelectorConfigList()); localSearchPhaseConfig.setAcceptorConfig(createAcceptorConfig()); localSearchPhaseConfig.setForagerConfig(new LocalSearchForagerConfig()); localSearchPhaseConfig.getForagerConfig().setAcceptedCountLimit(getAcceptedCountLimit()); localSearchPhaseConfig.setTerminationConfig(getTerminationConfig()); SolverConfig solverConfig = Examples.TRAVELING_SALESMAN.getBaseSolverConfig(); solverConfig.setPhaseConfigList(Collections.singletonList(localSearchPhaseConfig)); SolverFactory<TspSolution> solverFactory = SolverFactory.create(solverConfig); return solverFactory.buildSolver(); } }
{ "content_hash": "9653a6cefdfd8c7455f7ff78532f603d", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 114, "avg_line_length": 52.172413793103445, "alnum_prop": 0.8017184401850628, "repo_name": "droolsjbpm/kie-benchmarks", "id": "a9ba920fa670a3b93dec038e73d9998db76a6bdb", "size": "3026", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "optaplanner-7-benchmarks/optaplanner-perf-benchmark/src/main/java/org/jboss/qa/brms/performance/localsearch/tsp/AbstractTSPLocalSearchBenchmark.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "360638" } ], "symlink_target": "" }
<!doctype html> <title>fieldset accessibility test: position: absolute legend</title> <link rel=help href=https://w3c.github.io/html-aam/#fieldset-element-accessible-name-computation> <style> legend { position: absolute; } </style> <fieldset id=fieldset> <legend>Foo</legend> <input> </fieldset> <p>Expected accessible name for id=fieldset: "Foo"
{ "content_hash": "99b1733d9d02fc699435f5aee8321ed7", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 97, "avg_line_length": 31.818181818181817, "alnum_prop": 0.7514285714285714, "repo_name": "ric2b/Vivaldi-browser", "id": "019e63fcd3644236516a72b5796d063eb00eb36e", "size": "350", "binary": false, "copies": "36", "ref": "refs/heads/master", "path": "chromium/third_party/blink/web_tests/external/wpt/html/semantics/forms/the-fieldset-element/accessibility/legend-abspos-manual.html", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
/*! * @module compute/rule */ 'use strict'; var nodeutil = require('util'); /** * @type {module:common/serviceObject} * @private */ var ServiceObject = require('../common/service-object.js'); /** * @type {module:common/util} * @private */ var util = require('../common/util.js'); /*! Developer Documentation * * @param {module:compute|module:compute/region} scope - The parent scope this * firewall rule belongs to. * @param {string} name - Rule name. */ /** * Forwarding rules work in conjunction with target pools and target instances * to support load balancing and protocol forwarding features. To use load * balancing and protocol forwarding, you must create a forwarding rule that * directs traffic to specific target pools (for load balancing) or target * instances (for protocol forwarding). * * @resource [Forwarding rules]{@link https://cloud.google.com/compute/docs/load-balancing/network/forwarding-rules} * * @constructor * @alias module:compute/rule * * @example * var gcloud = require('gcloud')({ * keyFilename: '/path/to/keyfile.json', * projectId: 'grape-spaceship-123' * }); * * var gce = gcloud.compute(); * * //- * // Reference a global rule. * //- * var rule = gce.rule('rule-name'); * * //- * // Reference a region rule. * //- * var region = gce.region('us-central1'); * var rule = region.rule('rule-name'); */ function Rule(scope, name) { var isGlobalRule = scope.constructor.name === 'Compute'; var methods = { /** * Create a forwarding rule. * * @param {object} config - See {module:compute#createRule} or * {module:compute/region#createRule} if accessing this object through * a Region. * * @example * var config = { * // `target` will be different depending of this is a Regional or Global * // forwarding rule * target: 'global/targetHttpProxies/my-proxy', * portRange: '8080-8089' * }; * * rule.create(config, function(err, rule, operation, apiResponse) { * // `rule` is a Rule object. * * // `operation` is an Operation object that can be used to check the * // of the request. * }); */ create: true, /** * Check if the forwarding rule exists. * * @param {function} callback - The callback function. * @param {?error} callback.err - An error returned while making this * request. * @param {boolean} callback.exists - Whether the rule exists or not. * * @example * rule.exists(function(err, exists) {}); */ exists: true, /** * Get a forwarding rule if it exists. * * You may optionally use this to "get or create" an object by providing an * object with `autoCreate` set to `true`. Any extra configuration that is * normally required for the `create` method must be contained within this * object as well. * * @param {options=} options - Configuration object. * @param {boolean} options.autoCreate - Automatically create the object if * it does not exist. Default: `false` * * @example * rule.get(function(err, rule, apiResponse) { * // `rule` is a Rule object. * }); */ get: true, /** * Get the metadata of this rule. * * @resource [GlobalForwardingRule Resource]{@link https://cloud.google.com/compute/docs/reference/v1/globalForwardingRules#resource} * @resource [ForwardingRule Resource]{@link https://cloud.google.com/compute/docs/reference/v1/globalForwardingRules#resource} * @resource [GlobalForwardingRules: get API Documentation]{@link https://cloud.google.com/compute/docs/reference/v1/globalForwardingRules/get} * @resource [ForwardingRules: get API Documentation]{@link https://cloud.google.com/compute/docs/reference/v1/forwardingRules/get} * * @param {function=} callback - The callback function. * @param {?error} callback.err - An error returned while making this * request. * @param {object} callback.metadata - The rule's metadata. * @param {object} callback.apiResponse - The full API response. * * @example * rule.getMetadata(function(err, metadata, apiResponse) {}); */ getMetadata: true }; ServiceObject.call(this, { parent: scope, baseUrl: (isGlobalRule ? '/global' : '') + '/forwardingRules', id: name, createMethod: scope.createRule.bind(scope), methods: methods }); this.scope = scope; } nodeutil.inherits(Rule, ServiceObject); /** * Delete the rule. * * @resource [GlobalForwardingRules: delete API Documentation]{@link https://cloud.google.com/compute/docs/reference/v1/globalForwardingRules/delete} * @resource [ForwardingRules: delete API Documentation]{@link https://cloud.google.com/compute/docs/reference/v1/forwardingRules/delete} * * @param {function=} callback - The callback function. * @param {?error} callback.err - An error returned while making this request. * @param {module:compute/operation} callback.operation - An operation object * that can be used to check the status of the request. * @param {object} callback.apiResponse - The full API response. * * @example * rule.delete(function(err, operation, apiResponse) { * // `operation` is an Operation object that can be used to check the status * // of the request. * }); */ Rule.prototype.delete = function(callback) { callback = callback || util.noop; var scope = this.scope; ServiceObject.prototype.delete.call(this, function(err, resp) { if (err) { callback(err, null, resp); return; } var operation = scope.operation(resp.name); operation.metadata = resp; callback(null, operation, resp); }); }; /** * Set the target for this forwarding rule. * * @resource [GlobalForwardingRules: setTarget API Documentation]{@link https://cloud.google.com/compute/docs/reference/v1/globalForwardingRules/setTarget} * @resource [ForwardingRules: setTarget API Documentation]{@link https://cloud.google.com/compute/docs/reference/v1/forwardingRules/setTarget} * * @param {string} target - The full or valid partial URL of the target resource * to receive the matched traffic. For regional forwarding rules, this * target must live in the same region as the forwarding rule. For global * forwarding rules, this target must be a global `TargetHttpProxy` * resource. * @param {function} callback - The callback function. * @param {?error} callback.err - An error returned while making this request. * @param {object} callback.apiResponse - The full API response. * * @example * rule.setTarget('new-target', function(err, operation, apiResponse) { * // `operation` is an Operation object that can be used to check the status * // of the request. * }); */ Rule.prototype.setTarget = function(target, callback) { callback = callback || util.noop; var scope = this.scope; this.request({ method: 'POST', uri: '/setTarget', json: { target: target } }, function(err, resp) { if (err) { callback(err, null, resp); return; } var operation = scope.operation(resp.name); operation.metadata = resp; callback(null, operation, resp); }); }; module.exports = Rule;
{ "content_hash": "0e55af4a9974d6cf8af7d8195874c6bd", "timestamp": "", "source": "github", "line_count": 234, "max_line_length": 155, "avg_line_length": 31.28205128205128, "alnum_prop": 0.6602459016393443, "repo_name": "pcostell/gcloud-node", "id": "e08c24352570682c31c685a56cc4cb61a46c7339", "size": "7937", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/compute/rule.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "86425" }, { "name": "JavaScript", "bytes": "1378127" }, { "name": "Protocol Buffer", "bytes": "20376" }, { "name": "Shell", "bytes": "7623" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <plugin xmlns="http://cordova.apache.org/ns/plugins/1.0" id="fr.edps.cordova.powermanagement" version="1.0.2"> <name>PowerManagement</name> <description>PowerManagement plugin for Cordova</description> <license>Apache 2.0</license> <keywords>cordova,powermanagement</keywords> <js-module src="www/powermanagement.js" name="device"> <clobbers target="window.powerManagement" /> </js-module> <!-- wp7 --> <platform name="wp7"> <config-file target="config.xml" parent="/*"> <feature name="PowerManagement"> <param name="wp-package" value="PowerManagement"/> </feature> </config-file> <source-file src="src/wp/PowerManagement.cs" /> </platform> <!-- wp8 --> <platform name="wp8"> <config-file target="config.xml" parent="/*"> <feature name="PowerManagement"> <param name="wp-package" value="PowerManagement"/> </feature> </config-file> <source-file src="src/wp/PowerManagement.cs" /> </platform> <!-- ios --> <platform name="ios"> <config-file target="config.xml" parent="/widget"> <feature name="PowerManagement"> <param name="ios-package" value="PowerManagement" /> </feature> </config-file> <header-file src="src/ios/PowerManagement.h" /> <source-file src="src/ios/PowerManagement.m" /> </platform> <!-- android --> <platform name="android"> <config-file target="res/xml/config.xml" parent="/*"> <feature name="PowerManagement" > <param name="android-package" value="org.apache.cordova.powermanagement.PowerManagement"/> </feature> </config-file> <config-file target="AndroidManifest.xml" parent="/*"> <uses-permission android:name="android.permission.WAKE_LOCK" /> </config-file> <source-file src="src/android/PowerManagement.java" target-dir="src/org/apache/cordova/powermanagement" /> </platform> </plugin>
{ "content_hash": "3ba24e7c7e7bb8bbe75ba413d03be82a", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 114, "avg_line_length": 35.016129032258064, "alnum_prop": 0.5831414094887148, "repo_name": "jimibi/cordova-plugin-powermanagement", "id": "644e3e71260696a0eb0264e880206fe78f7b6474", "size": "2171", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "plugin.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "2279" }, { "name": "Java", "bytes": "5390" }, { "name": "JavaScript", "bytes": "2043" }, { "name": "Objective-C", "bytes": "3575" } ], "symlink_target": "" }
'use strict'; var path = require('path'); var _ = require('lodash'); function requiredProcessEnv(name) { if(!process.env[name]) { throw new Error('You must set the ' + name + ' environment variable'); } return process.env[name]; } // All configurations will extend these options // ============================================ var all = { env: process.env.NODE_ENV, // Root path of server root: path.normalize(__dirname + '/../../..'), // Server port port: process.env.PORT || 9000, // Should we populate the DB with sample data? seedDB: false, // Secret for session, you will want to change this and make it an environment variable secrets: { session: 'hacka-finder-secret' }, // List of user roles userRoles: ['guest', 'user', 'admin'], // MongoDB connection options mongo: { options: { db: { safe: true } } }, }; // Export the config object based on the NODE_ENV // ============================================== module.exports = _.merge( all, require('./' + process.env.NODE_ENV + '.js') || {});
{ "content_hash": "c3a6d2d4d279af6722bc77ebdda3aba8", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 89, "avg_line_length": 21.72, "alnum_prop": 0.5616942909760589, "repo_name": "davidtimmons/hackafinder", "id": "ff128daef910dc5f808a90d2b431fbddcacd9361", "size": "1086", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/config/environment/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "21075" }, { "name": "HTML", "bytes": "48681" }, { "name": "JavaScript", "bytes": "375225" } ], "symlink_target": "" }
title: aiz31 type: products image: /img/Screen Shot 2017-05-09 at 11.56.54 AM.png heading: z31 description: lksadjf lkasdjf lksajdf lksdaj flksadj flksa fdj main: heading: Foo Bar BAz description: |- ***This is i a thing***kjh hjk kj # Blah Blah ## Blah![undefined](undefined) ### Baah image1: alt: kkkk ---
{ "content_hash": "e72531fffa0e722acbaa639f55f7a062", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 61, "avg_line_length": 22.333333333333332, "alnum_prop": 0.6656716417910448, "repo_name": "pblack/kaldi-hugo-cms-template", "id": "2afd2c498999648bca1b584bae3dfc092d632168", "size": "339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "site/content/pages2/aiz31.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "94394" }, { "name": "HTML", "bytes": "18889" }, { "name": "JavaScript", "bytes": "10014" } ], "symlink_target": "" }
package com.microsoft.azure.spring.cloud.autoconfigure.context; import com.microsoft.azure.AzureEnvironment; import com.microsoft.azure.credentials.AzureTokenCredentials; import com.microsoft.azure.management.Azure; import com.microsoft.azure.spring.cloud.context.core.api.CredentialsProvider; import com.microsoft.azure.spring.cloud.context.core.api.ResourceManagerProvider; import com.microsoft.azure.spring.cloud.context.core.config.AzureProperties; import org.junit.Test; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.test.context.FilteredClassLoader; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; public class AzureContextAutoConfigurationTest { private ApplicationContextRunner contextRunner = new ApplicationContextRunner().withConfiguration(AutoConfigurations.of(AzureContextAutoConfiguration.class)) .withUserConfiguration(TestConfiguration.class); @Test public void testAzurePropertiesConfigured() { this.contextRunner.withPropertyValues("spring.cloud.azure.credentialFilePath=credential") .withPropertyValues("spring.cloud.azure.resourceGroup=group1") .withPropertyValues("spring.cloud.azure.region=westUS").run(context -> { assertThat(context).hasSingleBean(AzureProperties.class); assertThat(context.getBean(AzureProperties.class).getCredentialFilePath()).isEqualTo("credential"); assertThat(context.getBean(AzureProperties.class).getResourceGroup()).isEqualTo("group1"); assertThat(context.getBean(AzureProperties.class).getRegion()).isEqualTo("westUS"); assertThat(context.getBean(AzureProperties.class).getEnvironment()).isEqualTo(AzureEnvironment.AZURE); }); } @Test public void testRequiredAzureProperties() { this.contextRunner.withPropertyValues("spring.cloud.azure.credentialFilePath=credential") .withPropertyValues("spring.cloud.azure.resourceGroup=group1").run(context -> { assertThat(context).hasSingleBean(AzureProperties.class); assertThat(context.getBean(AzureProperties.class).getCredentialFilePath()).isEqualTo("credential"); assertThat(context.getBean(AzureProperties.class).getResourceGroup()).isEqualTo("group1"); }); } @Test public void testAzureDisabled() { this.contextRunner.withPropertyValues("spring.cloud.azure.enabled=false") .run(context -> assertThat(context).doesNotHaveBean(AzureProperties.class)); } @Test public void testWithoutAzureClass() { this.contextRunner.withClassLoader(new FilteredClassLoader(Azure.class)) .run(context -> assertThat(context).doesNotHaveBean(AzureProperties.class)); } @Test(expected = IllegalStateException.class) public void testLocationRequiredWhenAutoCreateResources() { this.contextRunner.withPropertyValues("spring.cloud.azure.credentialFilePath=credential") .withPropertyValues("spring.cloud.azure.resourceGroup=group1") .withPropertyValues("spring.cloud.azure.auto-create-resources=true") .run(context -> context.getBean(AzureProperties.class)); } @Configuration static class TestConfiguration { @Bean Azure azure() { return mock(Azure.class); } @Bean CredentialsProvider credentialsProvider() { return mock(CredentialsProvider.class); } @Bean ResourceManagerProvider resourceManagerProvider() { return mock(ResourceManagerProvider.class); } @Bean AzureTokenCredentials credentials() { return mock(AzureTokenCredentials.class); } } }
{ "content_hash": "b6d9b9040a18bb5509e9b76147d9d60a", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 118, "avg_line_length": 45.01111111111111, "alnum_prop": 0.7269809923475685, "repo_name": "selvasingh/azure-sdk-for-java", "id": "96391f3cf0d7fb2d8d605374660e900ee5d8fe8e", "size": "4148", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sdk/spring/azure-spring-cloud-autoconfigure/src/test/java/com/microsoft/azure/spring/cloud/autoconfigure/context/AzureContextAutoConfigurationTest.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "29891970" }, { "name": "JavaScript", "bytes": "6198" }, { "name": "PowerShell", "bytes": "160" }, { "name": "Shell", "bytes": "609" } ], "symlink_target": "" }
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */ /* Part of the Processing project - http://processing.org Copyright (c) 2011-12 Ben Fry and Casey Reas This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package processing.opengl; import processing.core.PApplet; import processing.core.PConstants; import java.nio.IntBuffer; /** * Encapsulates a Frame Buffer Object for offscreen rendering. * When created with onscreen == true, it represents the normal * framebuffer. Needed by the stack mechanism in OPENGL2 to return * to onscreen rendering after a sequence of pushFramebuffer calls. * It transparently handles the situations when the FBO extension is * not available. * * By Andres Colubri. */ public class FrameBuffer implements PConstants { protected PGL pgl; protected int context; // The context that created this framebuffer. public int glFbo; public int glDepth; public int glStencil; public int glDepthStencil; public int glMultisample; public int width; public int height; protected int depthBits; protected int stencilBits; protected boolean packedDepthStencil; protected boolean multisample; protected int nsamples; protected int numColorBuffers; protected Texture[] colorBufferTex; protected boolean screenFb; protected boolean noDepth; protected IntBuffer pixelBuffer; FrameBuffer() { pgl = PGraphicsOpenGL.pgl; context = pgl.createEmptyContext(); } FrameBuffer(int w, int h, int samples, int colorBuffers, int depthBits, int stencilBits, boolean packedDepthStencil, boolean screen) { this(); glFbo = 0; glDepth = 0; glStencil = 0; glDepthStencil = 0; glMultisample = 0; if (screen) { // If this framebuffer is used to represent a on-screen buffer, // then it doesn't make it sense for it to have multisampling, // color, depth or stencil buffers. depthBits = stencilBits = samples = colorBuffers = 0; } width = w; height = h; if (1 < samples) { multisample = true; nsamples = samples; } else { multisample = false; nsamples = 1; } numColorBuffers = colorBuffers; colorBufferTex = new Texture[numColorBuffers]; for (int i = 0; i < numColorBuffers; i++) { colorBufferTex[i] = null; } if (depthBits < 1 && stencilBits < 1) { this.depthBits = 0; this.stencilBits = 0; this.packedDepthStencil = false; } else { if (packedDepthStencil) { // When combined depth/stencil format is required, the depth and stencil // bits are overriden and the 24/8 combination for a 32 bits surface is // used. this.depthBits = 24; this.stencilBits = 8; this.packedDepthStencil = true; } else { this.depthBits = depthBits; this.stencilBits = stencilBits; this.packedDepthStencil = false; } } screenFb = screen; allocate(); noDepth = false; pixelBuffer = null; } FrameBuffer(int w, int h) { this(w, h, 1, 1, 0, 0, false, false); } FrameBuffer(int w, int h, boolean screen) { this(w, h, 1, 1, 0, 0, false, screen); } @Override protected void finalize() throws Throwable { try { if (!screenFb) { if (glFbo != 0) { PGraphicsOpenGL.finalizeFrameBufferObject(glFbo, context); } if (glDepth != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glDepth, context); } if (glStencil != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glStencil, context); } if (glMultisample != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glMultisample, context); } if (glDepthStencil != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glDepthStencil, context); } } } finally { super.finalize(); } } public void clear() { PGraphicsOpenGL.pushFramebuffer(); PGraphicsOpenGL.setFramebuffer(this); pgl.clearDepth(1); pgl.clearStencil(0); pgl.clearColor(0, 0, 0, 0); pgl.clear(PGL.DEPTH_BUFFER_BIT | PGL.STENCIL_BUFFER_BIT | PGL.COLOR_BUFFER_BIT); PGraphicsOpenGL.popFramebuffer(); } public void copy(FrameBuffer dest, FrameBuffer current) { pgl.bindFramebuffer(PGL.READ_FRAMEBUFFER, this.glFbo); pgl.bindFramebuffer(PGL.DRAW_FRAMEBUFFER, dest.glFbo); pgl.blitFramebuffer(0, 0, this.width, this.height, 0, 0, dest.width, dest.height, PGL.COLOR_BUFFER_BIT, PGL.NEAREST); pgl.bindFramebuffer(PGL.READ_FRAMEBUFFER, current.glFbo); pgl.bindFramebuffer(PGL.DRAW_FRAMEBUFFER, current.glFbo); } public void bind() { pgl.bindFramebuffer(PGL.FRAMEBUFFER, glFbo); } public void disableDepthTest() { noDepth = true; } public void finish(PGraphicsOpenGL pg) { if (noDepth) { // No need to clear depth buffer because depth testing was disabled. if (pg.getHint(ENABLE_DEPTH_TEST)) { pgl.enable(PGL.DEPTH_TEST); } else { pgl.disable(PGL.DEPTH_TEST); } } } public void readPixels() { if (pixelBuffer == null) createPixelBuffer(); pixelBuffer.rewind(); pgl.readPixels(0, 0, width, height, PGL.RGBA, PGL.UNSIGNED_BYTE, pixelBuffer); } public void getPixels(int[] pixels) { if (pixelBuffer != null) { pixelBuffer.get(pixels, 0, pixels.length); pixelBuffer.rewind(); } } public IntBuffer getPixelBuffer() { return pixelBuffer; } public boolean hasDepthBuffer() { return 0 < depthBits; } public boolean hasStencilBuffer() { return 0 < stencilBits; } public void setFBO(int id) { if (screenFb) { glFbo = id; } } /////////////////////////////////////////////////////////// // Color buffer setters. public void setColorBuffer(Texture tex) { setColorBuffers(new Texture[] { tex }, 1); } public void setColorBuffers(Texture[] textures) { setColorBuffers(textures, textures.length); } public void setColorBuffers(Texture[] textures, int n) { if (screenFb) return; if (numColorBuffers != PApplet.min(n, textures.length)) { throw new RuntimeException("Wrong number of textures to set the color " + "buffers."); } for (int i = 0; i < numColorBuffers; i++) { colorBufferTex[i] = textures[i]; } PGraphicsOpenGL.pushFramebuffer(); PGraphicsOpenGL.setFramebuffer(this); // Making sure nothing is attached. for (int i = 0; i < numColorBuffers; i++) { pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0 + i, PGL.TEXTURE_2D, 0, 0); } for (int i = 0; i < numColorBuffers; i++) { pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0 + i, colorBufferTex[i].glTarget, colorBufferTex[i].glName, 0); } pgl.validateFramebuffer(); PGraphicsOpenGL.popFramebuffer(); } public void swapColorBuffers() { for (int i = 0; i < numColorBuffers - 1; i++) { int i1 = (i + 1); Texture tmp = colorBufferTex[i]; colorBufferTex[i] = colorBufferTex[i1]; colorBufferTex[i1] = tmp; } PGraphicsOpenGL.pushFramebuffer(); PGraphicsOpenGL.setFramebuffer(this); for (int i = 0; i < numColorBuffers; i++) { pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0 + i, colorBufferTex[i].glTarget, colorBufferTex[i].glName, 0); } pgl.validateFramebuffer(); PGraphicsOpenGL.popFramebuffer(); } public int getDefaultReadBuffer() { if (screenFb) { return pgl.getDefaultReadBuffer(); } else { return PGL.COLOR_ATTACHMENT0; } } public int getDefaultDrawBuffer() { if (screenFb) { return pgl.getDefaultDrawBuffer(); } else { return PGL.COLOR_ATTACHMENT0; } } /////////////////////////////////////////////////////////// // Allocate/release framebuffer. protected void allocate() { dispose(); // Just in the case this object is being re-allocated. context = pgl.getCurrentContext(); if (screenFb) { glFbo = 0; } else { //create the FBO object... glFbo = PGraphicsOpenGL.createFrameBufferObject(context); // ... and then create the rest of the stuff. if (multisample) { createColorBufferMultisample(); } if (packedDepthStencil) { createPackedDepthStencilBuffer(); } else { if (0 < depthBits) { createDepthBuffer(); } if (0 < stencilBits) { createStencilBuffer(); } } } } protected void dispose() { if (screenFb) return; if (glFbo != 0) { PGraphicsOpenGL.finalizeFrameBufferObject(glFbo, context); glFbo = 0; } if (glDepth != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glDepth, context); glDepth = 0; } if (glStencil != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glStencil, context); glStencil = 0; } if (glMultisample != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glMultisample, context); glMultisample = 0; } if (glDepthStencil != 0) { PGraphicsOpenGL.finalizeRenderBufferObject(glDepthStencil, context); glDepthStencil = 0; } } protected boolean contextIsOutdated() { if (screenFb) return false; boolean outdated = !pgl.contextIsCurrent(context); if (outdated) { PGraphicsOpenGL.removeFrameBufferObject(glFbo, context); PGraphicsOpenGL.removeRenderBufferObject(glDepth, context); PGraphicsOpenGL.removeRenderBufferObject(glStencil, context); PGraphicsOpenGL.removeRenderBufferObject(glDepthStencil, context); PGraphicsOpenGL.removeRenderBufferObject(glMultisample, context); glFbo = 0; glDepth = 0; glStencil = 0; glDepthStencil = 0; glMultisample = 0; for (int i = 0; i < numColorBuffers; i++) { colorBufferTex[i] = null; } } return outdated; } protected void createColorBufferMultisample() { if (screenFb) return; PGraphicsOpenGL.pushFramebuffer(); PGraphicsOpenGL.setFramebuffer(this); glMultisample = PGraphicsOpenGL.createRenderBufferObject(context); pgl.bindRenderbuffer(PGL.RENDERBUFFER, glMultisample); pgl.renderbufferStorageMultisample(PGL.RENDERBUFFER, nsamples, PGL.RGBA8, width, height); pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, PGL.RENDERBUFFER, glMultisample); PGraphicsOpenGL.popFramebuffer(); } protected void createPackedDepthStencilBuffer() { if (screenFb) return; if (width == 0 || height == 0) { throw new RuntimeException("PFramebuffer: size undefined."); } PGraphicsOpenGL.pushFramebuffer(); PGraphicsOpenGL.setFramebuffer(this); glDepthStencil = PGraphicsOpenGL.createRenderBufferObject(context); pgl.bindRenderbuffer(PGL.RENDERBUFFER, glDepthStencil); if (multisample) { pgl.renderbufferStorageMultisample(PGL.RENDERBUFFER, nsamples, PGL.DEPTH24_STENCIL8, width, height); } else { pgl.renderbufferStorage(PGL.RENDERBUFFER, PGL.DEPTH24_STENCIL8, width, height); } pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.DEPTH_ATTACHMENT, PGL.RENDERBUFFER, glDepthStencil); pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.STENCIL_ATTACHMENT, PGL.RENDERBUFFER, glDepthStencil); PGraphicsOpenGL.popFramebuffer(); } protected void createDepthBuffer() { if (screenFb) return; if (width == 0 || height == 0) { throw new RuntimeException("PFramebuffer: size undefined."); } PGraphicsOpenGL.pushFramebuffer(); PGraphicsOpenGL.setFramebuffer(this); glDepth = PGraphicsOpenGL.createRenderBufferObject(context); pgl.bindRenderbuffer(PGL.RENDERBUFFER, glDepth); int glConst = PGL.DEPTH_COMPONENT16; if (depthBits == 16) { glConst = PGL.DEPTH_COMPONENT16; } else if (depthBits == 24) { glConst = PGL.DEPTH_COMPONENT24; } else if (depthBits == 32) { glConst = PGL.DEPTH_COMPONENT32; } if (multisample) { pgl.renderbufferStorageMultisample(PGL.RENDERBUFFER, nsamples, glConst, width, height); } else { pgl.renderbufferStorage(PGL.RENDERBUFFER, glConst, width, height); } pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.DEPTH_ATTACHMENT, PGL.RENDERBUFFER, glDepth); PGraphicsOpenGL.popFramebuffer(); } protected void createStencilBuffer() { if (screenFb) return; if (width == 0 || height == 0) { throw new RuntimeException("PFramebuffer: size undefined."); } PGraphicsOpenGL.pushFramebuffer(); PGraphicsOpenGL.setFramebuffer(this); glStencil = PGraphicsOpenGL.createRenderBufferObject(context); pgl.bindRenderbuffer(PGL.RENDERBUFFER, glStencil); int glConst = PGL.STENCIL_INDEX1; if (stencilBits == 1) { glConst = PGL.STENCIL_INDEX1; } else if (stencilBits == 4) { glConst = PGL.STENCIL_INDEX4; } else if (stencilBits == 8) { glConst = PGL.STENCIL_INDEX8; } if (multisample) { pgl.renderbufferStorageMultisample(PGL.RENDERBUFFER, nsamples, glConst, width, height); } else { pgl.renderbufferStorage(PGL.RENDERBUFFER, glConst, width, height); } pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.STENCIL_ATTACHMENT, PGL.RENDERBUFFER, glStencil); PGraphicsOpenGL.popFramebuffer(); } protected void createPixelBuffer() { pixelBuffer = IntBuffer.allocate(width * height); pixelBuffer.rewind(); } }
{ "content_hash": "615d285ce4df2f13efaba14d62f60768", "timestamp": "", "source": "github", "line_count": 542, "max_line_length": 80, "avg_line_length": 27.437269372693727, "alnum_prop": 0.6333804048147401, "repo_name": "d2fn/passage", "id": "8821861c979f6515c8bf85874d1db16c882e07ec", "size": "14871", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/processing/opengl/FrameBuffer.java", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "20513" }, { "name": "Java", "bytes": "16747828" }, { "name": "Perl", "bytes": "2123" }, { "name": "Shell", "bytes": "1070" } ], "symlink_target": "" }
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper') describe OSDb::MovieFile do subject do OSDb::MovieFile.new(File.dirname(__FILE__) + '/../fixtures/somemovie.avi') end its(:hash) { should == '243339b48f4e8741' } its(:name) { should == 'somemovie' } describe '#sub_path' do it 'should only change the extension of the movie' do movie_file = OSDb::MovieFile.new('directory-with-extension.avi/movie-file.avi', false) movie_file.sub_path('srt').should == 'directory-with-extension.avi/movie-file.srt' end end end
{ "content_hash": "f515796abad8c01fa4af2fce9d11c76d", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 94, "avg_line_length": 32.111111111111114, "alnum_prop": 0.657439446366782, "repo_name": "byroot/ruby-osdb", "id": "adf03742d9d844038d8575293a3622089cee622d", "size": "578", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/osdb/movie_file_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "30750" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Cloud API asynchronous "DOC To PDF" job example (allows to avoid timeout errors).</title> </head> <body> <?php // Cloud API asynchronous "DOC To PDF" job example. // Allows to avoid timeout errors when processing huge or scanned PDF documents. // The authentication key (API Key). // Get your own by registering at https://app.pdf.co $apiKey = "***********************************"; // Direct URL of source DOC or DOCX file. Check another example if you need to upload a local file to the cloud. // You can also upload your own file into PDF.co and use it as url. Check "Upload File" samples for code snippets: https://github.com/bytescout/pdf-co-api-samples/tree/master/File%20Upload/ $sourceFileUrl = "https://bytescout-com.s3.amazonaws.com/files/demo-files/cloud-api/doc-to-pdf/sample.docx"; // Prepare URL for `DOC To PDF` API call $url = "https://api.pdf.co/v1/pdf/convert/from/doc"; // Prepare requests params $parameters = array(); $parameters["url"] = $sourceFileUrl; $parameters["async"] = true; // (!) Make asynchronous job // Create Json payload $data = json_encode($parameters); // Create request $curl = curl_init(); curl_setopt($curl, CURLOPT_HTTPHEADER, array("x-api-key: " . $apiKey, "Content-type: application/json")); curl_setopt($curl, CURLOPT_URL, $url); curl_setopt($curl, CURLOPT_POST, true); curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1); curl_setopt($curl, CURLOPT_POSTFIELDS, $data); // Execute request $result = curl_exec($curl); if (curl_errno($curl) == 0) { $status_code = curl_getinfo($curl, CURLINFO_HTTP_CODE); if ($status_code == 200) { $json = json_decode($result, true); if (!isset($json["error"]) || $json["error"] == false) { // URL of generated PDF file that will available after the job completion $resultFileUrl = $json["url"]; // Asynchronous job ID $jobId = $json["jobId"]; // Check the job status in a loop do { $status = CheckJobStatus($jobId, $apiKey); // Possible statuses: "working", "failed", "aborted", "success". // Display timestamp and status (for demo purposes) echo "<p>" . date(DATE_RFC2822) . ": " . $status . "</p>"; if ($status == "success") { // Display link to the file with conversion results echo "<div><h2>Conversion Result:</h2><a href='" . $resultFileUrl . "' target='_blank'>" . $resultFileUrl . "</a></div>"; break; } else if ($status == "working") { // Pause for a few seconds sleep(3); } else { echo $status . "<br/>"; break; } } while (true); } else { // Display service reported error echo "<p>Error: " . $json["message"] . "</p>"; } } else { // Display request error echo "<p>Status code: " . $status_code . "</p>"; echo "<p>" . $result . "</p>"; } } else { // Display CURL error echo "Error: " . curl_error($curl); } // Cleanup curl_close($curl); function CheckJobStatus($jobId, $apiKey) { $status = null; // Create URL $url = "https://api.pdf.co/v1/job/check"; // Prepare requests params $parameters = array(); $parameters["jobid"] = $jobId; // Create Json payload $data = json_encode($parameters); // Create request $curl = curl_init(); curl_setopt($curl, CURLOPT_HTTPHEADER, array("x-api-key: " . $apiKey, "Content-type: application/json")); curl_setopt($curl, CURLOPT_URL, $url); curl_setopt($curl, CURLOPT_POST, true); curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1); curl_setopt($curl, CURLOPT_POSTFIELDS, $data); // Execute request $result = curl_exec($curl); if (curl_errno($curl) == 0) { $status_code = curl_getinfo($curl, CURLINFO_HTTP_CODE); if ($status_code == 200) { $json = json_decode($result, true); if (!isset($json["error"]) || $json["error"] == false) { $status = $json["status"]; } else { // Display service reported error echo "<p>Error: " . $json["message"] . "</p>"; } } else { // Display request error echo "<p>Status code: " . $status_code . "</p>"; echo "<p>" . $result . "</p>"; } } else { // Display CURL error echo "Error: " . curl_error($curl); } // Cleanup curl_close($curl); return $status; } ?> </body> </html>
{ "content_hash": "2625de5b88f250a588c595cfd95e2804", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 193, "avg_line_length": 28.681818181818183, "alnum_prop": 0.5221870047543582, "repo_name": "bytescout/ByteScout-SDK-SourceCode", "id": "a2591936a0e6811e602485fa8ca896dea3554fa5", "size": "5048", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PDF.co Web API/DOC To PDF API/PHP/Convert DOC To PDF Asynchronously/doc-to-pdf-async.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP.NET", "bytes": "364116" }, { "name": "Apex", "bytes": "243500" }, { "name": "Batchfile", "bytes": "151832" }, { "name": "C", "bytes": "224568" }, { "name": "C#", "bytes": "12909855" }, { "name": "C++", "bytes": "440474" }, { "name": "CSS", "bytes": "56817" }, { "name": "Classic ASP", "bytes": "46655" }, { "name": "Dockerfile", "bytes": "776" }, { "name": "Gherkin", "bytes": "3386" }, { "name": "HTML", "bytes": "17276296" }, { "name": "Java", "bytes": "1483408" }, { "name": "JavaScript", "bytes": "3033610" }, { "name": "PHP", "bytes": "838746" }, { "name": "Pascal", "bytes": "398090" }, { "name": "PowerShell", "bytes": "715204" }, { "name": "Python", "bytes": "703542" }, { "name": "QMake", "bytes": "880" }, { "name": "TSQL", "bytes": "3080" }, { "name": "VBA", "bytes": "383773" }, { "name": "VBScript", "bytes": "1504410" }, { "name": "Visual Basic .NET", "bytes": "9489450" } ], "symlink_target": "" }
package liquibase.sqlgenerator.core; import liquibase.CatalogAndSchema; import liquibase.database.Database; import liquibase.database.core.*; import liquibase.datatype.DatabaseDataType; import liquibase.exception.ValidationErrors; import liquibase.logging.LogFactory; import liquibase.sql.Sql; import liquibase.sql.UnparsedSql; import liquibase.sqlgenerator.SqlGeneratorChain; import liquibase.statement.AutoIncrementConstraint; import liquibase.statement.ForeignKeyConstraint; import liquibase.statement.SequenceNextValueFunction; import liquibase.statement.UniqueConstraint; import liquibase.statement.core.CreateTableStatement; import liquibase.structure.core.*; import liquibase.util.StringUtils; import java.math.BigInteger; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; public class CreateTableGenerator extends AbstractSqlGenerator<CreateTableStatement> { @Override public ValidationErrors validate(CreateTableStatement createTableStatement, Database database, SqlGeneratorChain sqlGeneratorChain) { ValidationErrors validationErrors = new ValidationErrors(); validationErrors.checkRequiredField("tableName", createTableStatement.getTableName()); validationErrors.checkRequiredField("columns", createTableStatement.getColumns()); return validationErrors; } @Override public Sql[] generateSql(CreateTableStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) { if (database instanceof InformixDatabase) { AbstractSqlGenerator<CreateTableStatement> gen = new CreateTableGeneratorInformix(); return gen.generateSql(statement, database, sqlGeneratorChain); } List<Sql> additionalSql = new ArrayList<Sql>(); StringBuffer buffer = new StringBuffer(); buffer.append("CREATE TABLE ").append(database.escapeTableName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName())).append(" "); buffer.append("("); boolean isSinglePrimaryKeyColumn = statement.getPrimaryKeyConstraint() != null && statement.getPrimaryKeyConstraint().getColumns().size() == 1; boolean isPrimaryKeyAutoIncrement = false; Iterator<String> columnIterator = statement.getColumns().iterator(); List<String> primaryKeyColumns = new LinkedList<String>(); BigInteger mysqlTableOptionStartWith = null; while (columnIterator.hasNext()) { String column = columnIterator.next(); DatabaseDataType columnType = statement.getColumnTypes().get(column).toDatabaseDataType(database); buffer.append(database.escapeColumnName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName(), column, true)); buffer.append(" ").append(columnType); AutoIncrementConstraint autoIncrementConstraint = null; for (AutoIncrementConstraint currentAutoIncrementConstraint : statement.getAutoIncrementConstraints()) { if (column.equals(currentAutoIncrementConstraint.getColumnName())) { autoIncrementConstraint = currentAutoIncrementConstraint; break; } } boolean isAutoIncrementColumn = autoIncrementConstraint != null; boolean isPrimaryKeyColumn = statement.getPrimaryKeyConstraint() != null && statement.getPrimaryKeyConstraint().getColumns().contains(column); isPrimaryKeyAutoIncrement = isPrimaryKeyAutoIncrement || isPrimaryKeyColumn && isAutoIncrementColumn; if (isPrimaryKeyColumn) { primaryKeyColumns.add(column); } if ((database instanceof SQLiteDatabase) && isSinglePrimaryKeyColumn && isPrimaryKeyColumn && isAutoIncrementColumn) { String pkName = StringUtils.trimToNull(statement.getPrimaryKeyConstraint().getConstraintName()); if (pkName == null) { pkName = database.generatePrimaryKeyName(statement.getTableName()); } if (pkName != null) { buffer.append(" CONSTRAINT "); buffer.append(database.escapeConstraintName(pkName)); } buffer.append(" PRIMARY KEY"); } // for the serial data type in postgres, there should be no default value if (!columnType.isAutoIncrement() && statement.getDefaultValue(column) != null) { Object defaultValue = statement.getDefaultValue(column); if (database instanceof MSSQLDatabase) { buffer.append(" CONSTRAINT ").append(database.escapeObjectName(((MSSQLDatabase) database).generateDefaultConstraintName(statement.getTableName(), column), ForeignKey.class)); } if (database instanceof OracleDatabase && statement.getDefaultValue(column).toString().startsWith("GENERATED ALWAYS ")) { buffer.append(" "); } else { buffer.append(" DEFAULT "); } if (defaultValue instanceof SequenceNextValueFunction) { buffer.append(database.generateDatabaseFunctionValue((SequenceNextValueFunction) defaultValue)); } else { buffer.append(statement.getColumnTypes().get(column).objectToSql(defaultValue, database)); } } if (isAutoIncrementColumn) { // TODO: check if database supports auto increment on non primary key column if (database.supportsAutoIncrement()) { String autoIncrementClause = database.getAutoIncrementClause(autoIncrementConstraint.getStartWith(), autoIncrementConstraint.getIncrementBy()); if (!"".equals(autoIncrementClause)) { buffer.append(" ").append(autoIncrementClause); } if( autoIncrementConstraint.getStartWith() != null ){ if (database instanceof PostgresDatabase) { String sequenceName = statement.getTableName()+"_"+column+"_seq"; additionalSql.add(new UnparsedSql("alter sequence "+database.escapeSequenceName(statement.getCatalogName(), statement.getSchemaName(), sequenceName)+" start with "+autoIncrementConstraint.getStartWith(), new Sequence().setName(sequenceName).setSchema(statement.getCatalogName(), statement.getSchemaName()))); }else if(database instanceof MySQLDatabase){ mysqlTableOptionStartWith = autoIncrementConstraint.getStartWith(); } } } else { LogFactory.getLogger().warning(database.getShortName()+" does not support autoincrement columns as requested for "+(database.escapeTableName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName()))); } } if (statement.getNotNullColumns().contains(column)) { buffer.append(" NOT NULL"); } else { if (database instanceof SybaseDatabase || database instanceof SybaseASADatabase || database instanceof MySQLDatabase || (database instanceof MSSQLDatabase && columnType.toString().equalsIgnoreCase("timestamp"))) { buffer.append(" NULL"); } } if (database instanceof InformixDatabase && isSinglePrimaryKeyColumn && isPrimaryKeyColumn) { //buffer.append(" PRIMARY KEY"); } if(statement.getColumnRemarks(column) != null){ if (database instanceof MySQLDatabase) { buffer.append(" COMMENT '" + database.escapeStringForDatabase(statement.getColumnRemarks(column)) + "'"); } else if (database instanceof MSSQLDatabase) { String schemaName = new CatalogAndSchema(statement.getCatalogName(), statement.getSchemaName()).standardize(database).getSchemaName(); if (schemaName == null) { schemaName = database.getDefaultSchemaName(); } additionalSql.add(new UnparsedSql("EXEC sp_addextendedproperty @name = N'MS_Description', @value = '"+statement.getColumnRemarks(column)+"', @level0type = N'Schema', @level0name = "+ schemaName +", @level1type = N'Table', @level1name = "+statement.getTableName()+", @level2type = N'Column', @level2name = "+column)); } } if (columnIterator.hasNext()) { buffer.append(", "); } } buffer.append(","); if (!( (database instanceof SQLiteDatabase) && isSinglePrimaryKeyColumn && isPrimaryKeyAutoIncrement) && !((database instanceof InformixDatabase) && isSinglePrimaryKeyColumn )) { // ...skip this code block for sqlite if a single column primary key // with an autoincrement constraint exists. // This constraint is added after the column type. if (statement.getPrimaryKeyConstraint() != null && statement.getPrimaryKeyConstraint().getColumns().size() > 0) { if (database.supportsPrimaryKeyNames()) { String pkName = StringUtils.trimToNull(statement.getPrimaryKeyConstraint().getConstraintName()); if (pkName == null) { // TODO ORA-00972: identifier is too long // If tableName lenght is more then 28 symbols // then generated pkName will be incorrect pkName = database.generatePrimaryKeyName(statement.getTableName()); } if (pkName != null) { buffer.append(" CONSTRAINT "); buffer.append(database.escapeConstraintName(pkName)); } } buffer.append(" PRIMARY KEY ("); buffer.append(database.escapeColumnNameList(StringUtils.join(statement.getPrimaryKeyConstraint().getColumns(), ", "))); buffer.append(")"); // Setting up table space for PK's index if it exist if (database instanceof OracleDatabase && statement.getPrimaryKeyConstraint().getTablespace() != null) { buffer.append(" USING INDEX TABLESPACE "); buffer.append(statement.getPrimaryKeyConstraint().getTablespace()); } buffer.append(","); } } for (ForeignKeyConstraint fkConstraint : statement.getForeignKeyConstraints()) { if (!(database instanceof InformixDatabase)) { buffer.append(" CONSTRAINT "); buffer.append(database.escapeConstraintName(fkConstraint.getForeignKeyName())); } String referencesString = fkConstraint.getReferences(); buffer.append(" FOREIGN KEY (") .append(database.escapeColumnName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName(), fkConstraint.getColumn())) .append(") REFERENCES "); if (referencesString != null) { if (!referencesString.contains(".") && database.getDefaultSchemaName() != null && database.getOutputDefaultSchema()) { referencesString = database.escapeObjectName(database.getDefaultSchemaName(), Schema.class) +"."+referencesString; } buffer.append(referencesString); } else { buffer.append(database.escapeObjectName(fkConstraint.getReferencedTableName(), Table.class)) .append("(") .append(database.escapeColumnNameList(fkConstraint.getReferencedColumnNames())) .append(")"); } if (fkConstraint.isDeleteCascade()) { buffer.append(" ON DELETE CASCADE"); } if ((database instanceof InformixDatabase)) { buffer.append(" CONSTRAINT "); buffer.append(database.escapeConstraintName(fkConstraint.getForeignKeyName())); } if (fkConstraint.isInitiallyDeferred()) { buffer.append(" INITIALLY DEFERRED"); } if (fkConstraint.isDeferrable()) { buffer.append(" DEFERRABLE"); } buffer.append(","); } for (UniqueConstraint uniqueConstraint : statement.getUniqueConstraints()) { if (uniqueConstraint.getConstraintName() != null && !constraintNameAfterUnique(database)) { buffer.append(" CONSTRAINT "); buffer.append(database.escapeConstraintName(uniqueConstraint.getConstraintName())); } buffer.append(" UNIQUE ("); buffer.append(database.escapeColumnNameList(StringUtils.join(uniqueConstraint.getColumns(), ", "))); buffer.append(")"); if (uniqueConstraint.getConstraintName() != null && constraintNameAfterUnique(database)) { buffer.append(" CONSTRAINT "); buffer.append(database.escapeConstraintName(uniqueConstraint.getConstraintName())); } buffer.append(","); } // if (constraints != null && constraints.getCheckConstraint() != null) { // buffer.append(constraints.getCheckConstraint()).append(" "); // } // } String sql = buffer.toString().replaceFirst(",\\s*$", "")+")"; if (database instanceof MySQLDatabase && mysqlTableOptionStartWith != null){ LogFactory.getLogger().info("[MySQL] Using last startWith statement ("+mysqlTableOptionStartWith.toString()+") as table option."); sql += " "+((MySQLDatabase)database).getTableOptionAutoIncrementStartWithClause(mysqlTableOptionStartWith); } // if (StringUtils.trimToNull(tablespace) != null && database.supportsTablespaces()) { // if (database instanceof MSSQLDatabase) { // buffer.append(" ON ").append(tablespace); // } else if (database instanceof DB2Database) { // buffer.append(" IN ").append(tablespace); // } else { // buffer.append(" TABLESPACE ").append(tablespace); // } // } if (statement.getTablespace() != null && database.supportsTablespaces()) { if (database instanceof MSSQLDatabase || database instanceof SybaseASADatabase) { sql += " ON " + statement.getTablespace(); } else if (database instanceof DB2Database || database instanceof InformixDatabase) { sql += " IN " + statement.getTablespace(); } else { sql += " TABLESPACE " + statement.getTablespace(); } } if( database instanceof MySQLDatabase && statement.getRemarks() != null) { sql += " COMMENT='"+database.escapeStringForDatabase(statement.getRemarks())+"' "; } additionalSql.add(0, new UnparsedSql(sql, getAffectedTable(statement))); return additionalSql.toArray(new Sql[additionalSql.size()]); } protected Relation getAffectedTable(CreateTableStatement statement) { return new Table().setName(statement.getTableName()).setSchema(new Schema(statement.getCatalogName(), statement.getSchemaName())); } private boolean constraintNameAfterUnique(Database database) { return database instanceof InformixDatabase; } }
{ "content_hash": "d97644f125cddb2bc0713a6355804b24", "timestamp": "", "source": "github", "line_count": 317, "max_line_length": 336, "avg_line_length": 50.44479495268139, "alnum_prop": 0.61484585079107, "repo_name": "CoderPaulK/liquibase", "id": "6c5441d04088a055bcea51416b00ab624a586466", "size": "15991", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "liquibase-core/src/main/java/liquibase/sqlgenerator/core/CreateTableGenerator.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1847" }, { "name": "CSS", "bytes": "1202" }, { "name": "Groff", "bytes": "3153" }, { "name": "Groovy", "bytes": "576482" }, { "name": "HTML", "bytes": "2223" }, { "name": "Inno Setup", "bytes": "2522" }, { "name": "Java", "bytes": "4146437" }, { "name": "PLSQL", "bytes": "5072" }, { "name": "PLpgSQL", "bytes": "502" }, { "name": "Puppet", "bytes": "5196" }, { "name": "Ruby", "bytes": "5624" }, { "name": "SQLPL", "bytes": "1791" }, { "name": "Shell", "bytes": "7365" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <repositories> <repository> <id>atlassian-public</id> <url>https://maven.atlassian.com/repository/public</url> <snapshots> <enabled>true</enabled> <updatePolicy>never</updatePolicy> <checksumPolicy>warn</checksumPolicy> </snapshots> <releases> <enabled>true</enabled> <checksumPolicy>warn</checksumPolicy> </releases> </repository> </repositories> <modelVersion>4.0.0</modelVersion> <groupId>com.schubergphilis.confluence.plugins</groupId> <artifactId>tableau-plugin</artifactId> <version>0.93</version> <organization> <name>Schuberg Philis</name> <url>http://www.schubergphilis.com/</url> </organization> <name>Schuberg Philis Tableau Plugin</name> <description>This plugin renders a tableau report</description> <dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.6</version> <scope>test</scope> </dependency> <dependency> <groupId>com.atlassian.confluence</groupId> <artifactId>confluence</artifactId> <version>${confluence.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>com.atlassian.confluence.plugin</groupId> <artifactId>func-test</artifactId> <version>2.3</version> <scope>test</scope> </dependency> <dependency> <groupId>net.sourceforge.jwebunit</groupId> <artifactId>jwebunit-htmlunit-plugin</artifactId> <version>2.2</version> <scope>test</scope> </dependency> <dependency> <groupId>net.sourceforge.nekohtml</groupId> <artifactId>nekohtml</artifactId> <version>1.9.12</version> <scope>test</scope> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>1.9.0-rc1</version> <scope>test</scope> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>com.atlassian.maven.plugins</groupId> <artifactId>maven-confluence-plugin</artifactId> <version>${amps.version}</version> <extensions>true</extensions> <configuration> <productVersion>${confluence.version}</productVersion> <productDataVersion>${confluence.data.version}</productDataVersion> </configuration> </plugin> <plugin> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>1.8</source> <target>1.8</target> </configuration> </plugin> </plugins> </build> <properties> <confluence.version>5.10.0</confluence.version> <confluence.data.version>5.10.0</confluence.data.version> <amps.version>6.2.1</amps.version> </properties> <developers> <developer> <name>Roel Gerrits</name> <organization>Schuberg Philis</organization> <organizationUrl>www.schubergphilis.com</organizationUrl> </developer> </developers> </project>
{ "content_hash": "690c271d0db6fa3e8ef8ddd148c15866", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 201, "avg_line_length": 37.8, "alnum_prop": 0.5648148148148148, "repo_name": "schubergphilis/tableau-confluence-plugin", "id": "8496760370d4a5d473ff02ac636a7b192453ffc7", "size": "3780", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "70010" }, { "name": "JavaScript", "bytes": "4089" } ], "symlink_target": "" }
<Global.Microsoft.VisualBasic.CompilerServices.DesignerGenerated()> _ Partial Class Pair Inherits System.Windows.Forms.Form 'Form remplace la méthode Dispose pour nettoyer la liste des composants. <System.Diagnostics.DebuggerNonUserCode()> _ Protected Overrides Sub Dispose(ByVal disposing As Boolean) Try If disposing AndAlso components IsNot Nothing Then components.Dispose() End If Finally MyBase.Dispose(disposing) End Try End Sub 'Requise par le Concepteur Windows Form Private components As System.ComponentModel.IContainer 'REMARQUE : la procédure suivante est requise par le Concepteur Windows Form 'Elle peut être modifiée à l'aide du Concepteur Windows Form. 'Ne la modifiez pas à l'aide de l'éditeur de code. <System.Diagnostics.DebuggerStepThrough()> _ Private Sub InitializeComponent() Me.components = New System.ComponentModel.Container() Dim resources As System.ComponentModel.ComponentResourceManager = New System.ComponentModel.ComponentResourceManager(GetType(Pair)) Me.Button1 = New System.Windows.Forms.Button() Me.Button2 = New System.Windows.Forms.Button() Me.Button3 = New System.Windows.Forms.Button() Me.Button4 = New System.Windows.Forms.Button() Me.PictureBox1 = New System.Windows.Forms.PictureBox() Me.Check = New System.Windows.Forms.Timer(Me.components) Me.Label1 = New System.Windows.Forms.Label() Me.Button5 = New System.Windows.Forms.Button() Me.Button6 = New System.Windows.Forms.Button() Me.Button7 = New System.Windows.Forms.Button() Me.Button8 = New System.Windows.Forms.Button() Me.Button9 = New System.Windows.Forms.Button() Me.Button10 = New System.Windows.Forms.Button() Me.Button11 = New System.Windows.Forms.Button() Me.Button12 = New System.Windows.Forms.Button() Me.Button13 = New System.Windows.Forms.Button() Me.Button14 = New System.Windows.Forms.Button() Me.Button15 = New System.Windows.Forms.Button() Me.Button16 = New System.Windows.Forms.Button() Me.Button17 = New System.Windows.Forms.Button() Me.Button18 = New System.Windows.Forms.Button() Me.Button19 = New System.Windows.Forms.Button() Me.Button20 = New System.Windows.Forms.Button() Me.Button21 = New System.Windows.Forms.Button() Me.Button22 = New System.Windows.Forms.Button() Me.Button23 = New System.Windows.Forms.Button() Me.Button24 = New System.Windows.Forms.Button() Me.Restart = New System.Windows.Forms.Button() Me.Temps = New System.Windows.Forms.Timer(Me.components) Me.Label2 = New System.Windows.Forms.Label() Me.PictureBox2 = New System.Windows.Forms.PictureBox() CType(Me.PictureBox1, System.ComponentModel.ISupportInitialize).BeginInit() CType(Me.PictureBox2, System.ComponentModel.ISupportInitialize).BeginInit() Me.SuspendLayout() ' 'Button1 ' Me.Button1.Location = New System.Drawing.Point(274, 216) Me.Button1.Name = "Button1" Me.Button1.Size = New System.Drawing.Size(75, 23) Me.Button1.TabIndex = 0 Me.Button1.UseVisualStyleBackColor = True ' 'Button2 ' Me.Button2.Location = New System.Drawing.Point(108, 28) Me.Button2.Name = "Button2" Me.Button2.Size = New System.Drawing.Size(75, 23) Me.Button2.TabIndex = 1 Me.Button2.UseVisualStyleBackColor = True ' 'Button3 ' Me.Button3.Location = New System.Drawing.Point(27, 261) Me.Button3.Name = "Button3" Me.Button3.Size = New System.Drawing.Size(75, 23) Me.Button3.TabIndex = 2 Me.Button3.UseVisualStyleBackColor = True ' 'Button4 ' Me.Button4.Location = New System.Drawing.Point(274, 28) Me.Button4.Name = "Button4" Me.Button4.Size = New System.Drawing.Size(75, 23) Me.Button4.TabIndex = 3 Me.Button4.UseVisualStyleBackColor = True ' 'PictureBox1 ' Me.PictureBox1.BackColor = System.Drawing.Color.FromArgb(CType(CType(128, Byte), Integer), CType(CType(255, Byte), Integer), CType(CType(128, Byte), Integer)) Me.PictureBox1.Location = New System.Drawing.Point(27, 319) Me.PictureBox1.Name = "PictureBox1" Me.PictureBox1.Size = New System.Drawing.Size(322, 242) Me.PictureBox1.TabIndex = 4 Me.PictureBox1.TabStop = False ' 'Check ' ' 'Label1 ' Me.Label1.AutoSize = True Me.Label1.Font = New System.Drawing.Font("Microsoft Sans Serif", 15.25!) Me.Label1.Location = New System.Drawing.Point(446, 26) Me.Label1.Name = "Label1" Me.Label1.Size = New System.Drawing.Size(149, 25) Me.Label1.TabIndex = 5 Me.Label1.Text = "Vous avez /12" ' 'Button5 ' Me.Button5.Location = New System.Drawing.Point(274, 261) Me.Button5.Name = "Button5" Me.Button5.Size = New System.Drawing.Size(75, 23) Me.Button5.TabIndex = 6 Me.Button5.UseVisualStyleBackColor = True ' 'Button6 ' Me.Button6.Location = New System.Drawing.Point(108, 75) Me.Button6.Name = "Button6" Me.Button6.Size = New System.Drawing.Size(75, 23) Me.Button6.TabIndex = 7 Me.Button6.UseVisualStyleBackColor = True ' 'Button7 ' Me.Button7.Location = New System.Drawing.Point(108, 120) Me.Button7.Name = "Button7" Me.Button7.Size = New System.Drawing.Size(75, 23) Me.Button7.TabIndex = 8 Me.Button7.UseVisualStyleBackColor = True ' 'Button8 ' Me.Button8.Location = New System.Drawing.Point(274, 75) Me.Button8.Name = "Button8" Me.Button8.Size = New System.Drawing.Size(75, 23) Me.Button8.TabIndex = 9 Me.Button8.UseVisualStyleBackColor = True ' 'Button9 ' Me.Button9.Location = New System.Drawing.Point(108, 216) Me.Button9.Name = "Button9" Me.Button9.Size = New System.Drawing.Size(75, 23) Me.Button9.TabIndex = 13 Me.Button9.UseVisualStyleBackColor = True ' 'Button10 ' Me.Button10.Location = New System.Drawing.Point(27, 169) Me.Button10.Name = "Button10" Me.Button10.Size = New System.Drawing.Size(75, 23) Me.Button10.TabIndex = 12 Me.Button10.UseVisualStyleBackColor = True ' 'Button11 ' Me.Button11.Location = New System.Drawing.Point(274, 120) Me.Button11.Name = "Button11" Me.Button11.Size = New System.Drawing.Size(75, 23) Me.Button11.TabIndex = 11 Me.Button11.UseVisualStyleBackColor = True ' 'Button12 ' Me.Button12.Location = New System.Drawing.Point(27, 120) Me.Button12.Name = "Button12" Me.Button12.Size = New System.Drawing.Size(75, 23) Me.Button12.TabIndex = 10 Me.Button12.UseVisualStyleBackColor = True ' 'Button13 ' Me.Button13.Location = New System.Drawing.Point(27, 75) Me.Button13.Name = "Button13" Me.Button13.Size = New System.Drawing.Size(75, 23) Me.Button13.TabIndex = 25 Me.Button13.UseVisualStyleBackColor = True ' 'Button14 ' Me.Button14.Location = New System.Drawing.Point(193, 261) Me.Button14.Name = "Button14" Me.Button14.Size = New System.Drawing.Size(75, 23) Me.Button14.TabIndex = 24 Me.Button14.UseVisualStyleBackColor = True ' 'Button15 ' Me.Button15.Location = New System.Drawing.Point(108, 261) Me.Button15.Name = "Button15" Me.Button15.Size = New System.Drawing.Size(75, 23) Me.Button15.TabIndex = 23 Me.Button15.UseVisualStyleBackColor = True ' 'Button16 ' Me.Button16.Location = New System.Drawing.Point(193, 28) Me.Button16.Name = "Button16" Me.Button16.Size = New System.Drawing.Size(75, 23) Me.Button16.TabIndex = 22 Me.Button16.UseVisualStyleBackColor = True ' 'Button17 ' Me.Button17.Location = New System.Drawing.Point(27, 28) Me.Button17.Name = "Button17" Me.Button17.Size = New System.Drawing.Size(75, 23) Me.Button17.TabIndex = 21 Me.Button17.UseVisualStyleBackColor = True ' 'Button18 ' Me.Button18.Location = New System.Drawing.Point(193, 216) Me.Button18.Name = "Button18" Me.Button18.Size = New System.Drawing.Size(75, 23) Me.Button18.TabIndex = 20 Me.Button18.UseVisualStyleBackColor = True ' 'Button19 ' Me.Button19.Location = New System.Drawing.Point(193, 75) Me.Button19.Name = "Button19" Me.Button19.Size = New System.Drawing.Size(75, 23) Me.Button19.TabIndex = 19 Me.Button19.UseVisualStyleBackColor = True ' 'Button20 ' Me.Button20.Location = New System.Drawing.Point(27, 216) Me.Button20.Name = "Button20" Me.Button20.Size = New System.Drawing.Size(75, 23) Me.Button20.TabIndex = 18 Me.Button20.UseVisualStyleBackColor = True ' 'Button21 ' Me.Button21.Location = New System.Drawing.Point(274, 169) Me.Button21.Name = "Button21" Me.Button21.Size = New System.Drawing.Size(75, 23) Me.Button21.TabIndex = 17 Me.Button21.UseVisualStyleBackColor = True ' 'Button22 ' Me.Button22.Location = New System.Drawing.Point(193, 169) Me.Button22.Name = "Button22" Me.Button22.Size = New System.Drawing.Size(75, 23) Me.Button22.TabIndex = 16 Me.Button22.UseVisualStyleBackColor = True ' 'Button23 ' Me.Button23.Location = New System.Drawing.Point(108, 169) Me.Button23.Name = "Button23" Me.Button23.Size = New System.Drawing.Size(75, 23) Me.Button23.TabIndex = 15 Me.Button23.UseVisualStyleBackColor = True ' 'Button24 ' Me.Button24.Location = New System.Drawing.Point(193, 120) Me.Button24.Name = "Button24" Me.Button24.Size = New System.Drawing.Size(75, 23) Me.Button24.TabIndex = 14 Me.Button24.UseVisualStyleBackColor = True ' 'Restart ' Me.Restart.Font = New System.Drawing.Font("Microsoft Sans Serif", 15.25!) Me.Restart.Location = New System.Drawing.Point(420, 523) Me.Restart.Name = "Restart" Me.Restart.Size = New System.Drawing.Size(175, 38) Me.Restart.TabIndex = 26 Me.Restart.Text = "Commencer" Me.Restart.UseVisualStyleBackColor = True ' 'Temps ' Me.Temps.Interval = 1000 ' 'Label2 ' Me.Label2.AutoSize = True Me.Label2.Font = New System.Drawing.Font("Microsoft Sans Serif", 15.25!) Me.Label2.Location = New System.Drawing.Point(446, 51) Me.Label2.Name = "Label2" Me.Label2.Size = New System.Drawing.Size(89, 25) Me.Label2.TabIndex = 27 Me.Label2.Text = "Temps :" ' 'PictureBox2 ' Me.PictureBox2.Image = CType(resources.GetObject("PictureBox2.Image"), System.Drawing.Image) Me.PictureBox2.Location = New System.Drawing.Point(358, 511) Me.PictureBox2.Name = "PictureBox2" Me.PictureBox2.Size = New System.Drawing.Size(56, 50) Me.PictureBox2.SizeMode = System.Windows.Forms.PictureBoxSizeMode.StretchImage Me.PictureBox2.TabIndex = 28 Me.PictureBox2.TabStop = False ' 'Pair ' Me.AutoScaleDimensions = New System.Drawing.SizeF(6.0!, 13.0!) Me.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font Me.BackColor = System.Drawing.Color.Snow Me.ClientSize = New System.Drawing.Size(607, 573) Me.Controls.Add(Me.PictureBox2) Me.Controls.Add(Me.Label2) Me.Controls.Add(Me.Restart) Me.Controls.Add(Me.Button13) Me.Controls.Add(Me.Button14) Me.Controls.Add(Me.Button15) Me.Controls.Add(Me.Button16) Me.Controls.Add(Me.Button17) Me.Controls.Add(Me.Button18) Me.Controls.Add(Me.Button19) Me.Controls.Add(Me.Button20) Me.Controls.Add(Me.Button21) Me.Controls.Add(Me.Button22) Me.Controls.Add(Me.Button23) Me.Controls.Add(Me.Button24) Me.Controls.Add(Me.Button9) Me.Controls.Add(Me.Button10) Me.Controls.Add(Me.Button11) Me.Controls.Add(Me.Button12) Me.Controls.Add(Me.Button8) Me.Controls.Add(Me.Button7) Me.Controls.Add(Me.Button6) Me.Controls.Add(Me.Button5) Me.Controls.Add(Me.Label1) Me.Controls.Add(Me.PictureBox1) Me.Controls.Add(Me.Button4) Me.Controls.Add(Me.Button3) Me.Controls.Add(Me.Button2) Me.Controls.Add(Me.Button1) Me.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedToolWindow Me.Icon = CType(resources.GetObject("$this.Icon"), System.Drawing.Icon) Me.Name = "Pair" Me.Text = "Pair It" CType(Me.PictureBox1, System.ComponentModel.ISupportInitialize).EndInit() CType(Me.PictureBox2, System.ComponentModel.ISupportInitialize).EndInit() Me.ResumeLayout(False) Me.PerformLayout() End Sub Friend WithEvents Button1 As System.Windows.Forms.Button Friend WithEvents Button2 As System.Windows.Forms.Button Friend WithEvents Button3 As System.Windows.Forms.Button Friend WithEvents Button4 As System.Windows.Forms.Button Friend WithEvents PictureBox1 As System.Windows.Forms.PictureBox Friend WithEvents Check As System.Windows.Forms.Timer Friend WithEvents Label1 As System.Windows.Forms.Label Friend WithEvents Button5 As System.Windows.Forms.Button Friend WithEvents Button6 As System.Windows.Forms.Button Friend WithEvents Button7 As System.Windows.Forms.Button Friend WithEvents Button8 As System.Windows.Forms.Button Friend WithEvents Button9 As System.Windows.Forms.Button Friend WithEvents Button10 As System.Windows.Forms.Button Friend WithEvents Button11 As System.Windows.Forms.Button Friend WithEvents Button12 As System.Windows.Forms.Button Friend WithEvents Button13 As System.Windows.Forms.Button Friend WithEvents Button14 As System.Windows.Forms.Button Friend WithEvents Button15 As System.Windows.Forms.Button Friend WithEvents Button16 As System.Windows.Forms.Button Friend WithEvents Button17 As System.Windows.Forms.Button Friend WithEvents Button18 As System.Windows.Forms.Button Friend WithEvents Button19 As System.Windows.Forms.Button Friend WithEvents Button20 As System.Windows.Forms.Button Friend WithEvents Button21 As System.Windows.Forms.Button Friend WithEvents Button22 As System.Windows.Forms.Button Friend WithEvents Button23 As System.Windows.Forms.Button Friend WithEvents Button24 As System.Windows.Forms.Button Friend WithEvents Restart As System.Windows.Forms.Button Friend WithEvents Temps As System.Windows.Forms.Timer Friend WithEvents Label2 As System.Windows.Forms.Label Friend WithEvents PictureBox2 As System.Windows.Forms.PictureBox End Class
{ "content_hash": "888b4c4248dfd5526b2702c02e40ff6d", "timestamp": "", "source": "github", "line_count": 387, "max_line_length": 166, "avg_line_length": 40.7390180878553, "alnum_prop": 0.6417607509831282, "repo_name": "cedced19/iced", "id": "a474598159fc0d5a07da15482f3635c1758c1b29", "size": "15776", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ICed/Pair.Designer.vb", "mode": "33188", "license": "mit", "language": [ { "name": "Visual Basic", "bytes": "211734" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>hammer: Not compatible</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.13.2 / hammer - 1.3+8.12</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> hammer <small> 1.3+8.12 <span class="label label-info">Not compatible</span> </small> </h1> <p><em><script>document.write(moment("2021-11-05 09:57:39 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2021-11-05 09:57:39 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 3 Virtual package relying on a GMP lib system installation coq 8.13.2 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.12.0 The OCaml compiler (virtual package) ocaml-base-compiler 4.12.0 Official release 4.12.0 ocaml-config 2 OCaml Switch Configuration ocaml-options-vanilla 1 Ensure that OCaml is compiled with no special options enabled ocamlfind 1.9.1 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;https://github.com/lukaszcz/coqhammer&quot; dev-repo: &quot;git+https://github.com/lukaszcz/coqhammer.git&quot; bug-reports: &quot;https://github.com/lukaszcz/coqhammer/issues&quot; license: &quot;LGPL-2.1-only&quot; synopsis: &quot;General-purpose automated reasoning hammer tool for Coq&quot; description: &quot;&quot;&quot; A general-purpose automated reasoning hammer tool for Coq that combines learning from previous proofs with the translation of problems to the logics of automated systems and the reconstruction of successfully found proofs. &quot;&quot;&quot; build: [make &quot;-j%{jobs}%&quot; {ocaml:version &gt;= &quot;4.06&quot;} &quot;plugin&quot;] install: [ [make &quot;install-plugin&quot;] [make &quot;test-plugin&quot;] {with-test} ] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.12&quot; &amp; &lt; &quot;8.13~&quot;} (&quot;conf-g++&quot; {build} | &quot;conf-clang&quot; {build}) &quot;coq-hammer-tactics&quot; {= version} ] tags: [ &quot;category:Miscellaneous/Coq Extensions&quot; &quot;keyword:automation&quot; &quot;keyword:hammer&quot; &quot;logpath:Hammer.Plugin&quot; &quot;date:2020-07-28&quot; ] authors: [ &quot;Lukasz Czajka &lt;[email protected]&gt;&quot; &quot;Cezary Kaliszyk &lt;[email protected]&gt;&quot; ] url { src: &quot;https://github.com/lukaszcz/coqhammer/archive/v1.3-coq8.12.tar.gz&quot; checksum: &quot;sha512=666ea825c122319e398efb7287f429ebfb5d35611b4cabe4b88732ffb5c265ef348b53d5046c958831ac0b7a759b44ce1ca04220ca68b1915accfd23435b479c&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-hammer.1.3+8.12 coq.8.13.2</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.13.2). The following dependencies couldn&#39;t be met: - coq-hammer -&gt; coq &lt; 8.13~ -&gt; ocaml &lt; 4.12 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-hammer.1.3+8.12</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> <small>Sources are on <a href="https://github.com/coq-bench">GitHub</a>. © Guillaume Claret.</small> </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "e0d0c8ee64e29bc10ba1f9b058f601b4", "timestamp": "", "source": "github", "line_count": 179, "max_line_length": 159, "avg_line_length": 41.87150837988827, "alnum_prop": 0.5585056704469646, "repo_name": "coq-bench/coq-bench.github.io", "id": "c6a1230943ac0e41de56b3b6d7eb717f58fe722b", "size": "7497", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.12.0-2.0.8/released/8.13.2/hammer/1.3+8.12.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
import axios from "axios"; import { API } from "../api"; import { success, info } from "../toast/toast"; import { history } from "../history"; import { Actions } from "../constants"; import { destroy, initSave, initSaveGetId } from "../api/crud"; import { unpackUUID } from "../util"; import { isString } from "lodash"; import { TaggedSavedGarden, TaggedPlantTemplate } from "farmbot"; import { t } from "../i18next_wrapper"; import { stopTracking } from "../connectivity/data_consistency"; /** Save all Plant to PlantTemplates in a new SavedGarden. */ export const snapshotGarden = (gardenName?: string | undefined) => axios.post<void>(API.current.snapshotPath, gardenName ? { name: gardenName } : {}) .then(() => { success(t("Garden Saved.")); history.push("/app/designer/gardens"); }); export const unselectSavedGarden = { type: Actions.CHOOSE_SAVED_GARDEN, payload: undefined }; /** Save a SavedGarden's PlantTemplates as Plants. */ export const applyGarden = (gardenId: number) => (dispatch: Function) => axios .patch<void>(API.current.applyGardenPath(gardenId)) .then(data => { stopTracking(data.headers["x-farmbot-rpc-id"]); history.push("/app/designer/plants"); dispatch(unselectSavedGarden); const busyToastTitle = t("Please wait"); info(t("while your garden is applied."), { title: busyToastTitle }); }); export const destroySavedGarden = (uuid: string) => (dispatch: Function) => { dispatch(unselectSavedGarden); history.push("/app/designer/gardens"); dispatch(destroy(uuid)); }; export const closeSavedGarden = () => { history.push("/app/designer/gardens"); return (dispatch: Function) => dispatch(unselectSavedGarden); }; export const openSavedGarden = (savedGarden: string) => { history.push("/app/designer/gardens/" + unpackUUID(savedGarden).remoteId); return (dispatch: Function) => dispatch({ type: Actions.CHOOSE_SAVED_GARDEN, payload: savedGarden }); }; /** Open a SavedGarden if it is closed, otherwise close it. */ export const openOrCloseGarden = (props: { savedGarden: string | undefined, gardenIsOpen: boolean, dispatch: Function }) => () => !props.gardenIsOpen && isString(props.savedGarden) ? props.dispatch(openSavedGarden(props.savedGarden)) : props.dispatch(closeSavedGarden()); /** Create a new SavedGarden with the chosen name. */ export const newSavedGarden = (gardenName: string) => (dispatch: Function) => { dispatch(initSave("SavedGarden", { name: gardenName || "Untitled Garden" })) .then(() => { success(t("Garden Saved.")); history.push("/app/designer/gardens"); }); }; /** Create a copy of a PlantTemplate body and assign it a new SavedGarden. */ const newPTBody = (source: TaggedPlantTemplate, newSGId: number): TaggedPlantTemplate["body"] => ({ name: source.body.name, openfarm_slug: source.body.openfarm_slug, saved_garden_id: newSGId, radius: source.body.radius, x: source.body.x, y: source.body.y, z: source.body.z, }); /** Copy a SavedGarden and all of its PlantTemplates. */ export const copySavedGarden = ({ newSGName, savedGarden, plantTemplates }: { newSGName: string, savedGarden: TaggedSavedGarden, plantTemplates: TaggedPlantTemplate[] }) => (dispatch: Function) => { const sourceSavedGardenId = savedGarden.body.id; const gardenName = newSGName || `${savedGarden.body.name} (${t("copy")})`; dispatch(initSaveGetId(savedGarden.kind, { name: gardenName })) .then((newSGId: number) => { plantTemplates .filter(x => x.body.saved_garden_id === sourceSavedGardenId) .map(x => dispatch(initSave(x.kind, newPTBody(x, newSGId)))); success(t("Garden Saved.")); history.push("/app/designer/gardens"); }); };
{ "content_hash": "e1daa97aa27a6d6cdbe17660b1eb48db", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 80, "avg_line_length": 35.48148148148148, "alnum_prop": 0.6672755741127349, "repo_name": "FarmBot/Farmbot-Web-API", "id": "ee1e1861ada45564863558ad7a24e7b30083bd62", "size": "3832", "binary": false, "copies": "2", "ref": "refs/heads/soil_height", "path": "frontend/saved_gardens/actions.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "62380" }, { "name": "HTML", "bytes": "28417" }, { "name": "JavaScript", "bytes": "101562" }, { "name": "Ruby", "bytes": "268353" }, { "name": "Shell", "bytes": "610" }, { "name": "TypeScript", "bytes": "604078" } ], "symlink_target": "" }
<?php // Check for empty fields if(empty($_POST['name']) || empty($_POST['email']) || empty($_POST['phone']) || empty($_POST['message']) || !filter_var($_POST['email'],FILTER_VALIDATE_EMAIL)) { echo "No arguments Provided!"; return false; } $name = strip_tags(htmlspecialchars($_POST['name'])); $email_address = strip_tags(htmlspecialchars($_POST['email'])); $phone = strip_tags(htmlspecialchars($_POST['phone'])); $message = strip_tags(htmlspecialchars($_POST['message'])); // Create the email and send the message $to = '[email protected]'; // Add your email address inbetween the '' replacing [email protected] - This is where the form will send a message to. $email_subject = "Website Contact Form: $name"; $email_body = "You have received a new message from your website contact form.\n\n"."Here are the details:\n\nName: $name\n\nEmail: $email_address\n\nPhone: $phone\n\nMessage:\n$message"; $headers = "From: [email protected]\n"; // This is the email address the generated message will be from. We recommend using something like [email protected]. $headers .= "Reply-To: $email_address"; mail($to,$email_subject,$email_body,$headers); return true; ?>
{ "content_hash": "4952c90a605eed711952c569d780b392", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 187, "avg_line_length": 46.69230769230769, "alnum_prop": 0.6894563426688632, "repo_name": "jlopez08/jlopez08.github.io", "id": "150b9c76e777f3823ffbe882dfb75b0ede25bcda", "size": "1214", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mail/contact_me.php", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "21496" }, { "name": "HTML", "bytes": "28201" }, { "name": "JavaScript", "bytes": "44197" }, { "name": "PHP", "bytes": "1214" } ], "symlink_target": "" }
export function parseDirective(statement: any, context: any, type: any): void; export function clear(): void; export function setAxisFormat(txt: any): void; export function getAxisFormat(): string; export function setTodayMarker(txt: any): void; export function getTodayMarker(): string; export function setDateFormat(txt: any): void; export function enableInclusiveEndDates(): void; export function endDatesAreInclusive(): boolean; export function enableTopAxis(): void; export function topAxisEnabled(): boolean; export function getDateFormat(): string; export function setIncludes(txt: any): void; export function getIncludes(): any[]; export function setExcludes(txt: any): void; export function getExcludes(): any[]; export function getLinks(): {}; export function addSection(txt: any): void; export function getSections(): any[]; export function getTasks(): any[]; export function isInvalidDate(date: any, dateFormat: any, excludes: any, includes: any): boolean; export function addTask(descr: any, data: any): void; export function findTaskById(id: any): any; export function addTaskOrg(descr: any, data: any): void; export function setLink(ids: any, _linkStr: any): void; export function setClass(ids: any, className: any): void; export function setClickEvent(ids: any, functionName: any, functionArgs: any): void; export function bindFunctions(element: any): void; declare namespace _default { export { parseDirective }; export function getConfig(): import("../../config.type").GanttDiagramConfig | undefined; export { clear }; export { setDateFormat }; export { getDateFormat }; export { enableInclusiveEndDates }; export { endDatesAreInclusive }; export { enableTopAxis }; export { topAxisEnabled }; export { setAxisFormat }; export { getAxisFormat }; export { setTodayMarker }; export { getTodayMarker }; export { setAccTitle }; export { getAccTitle }; export { setDiagramTitle }; export { getDiagramTitle }; export { setAccDescription }; export { getAccDescription }; export { addSection }; export { getSections }; export { getTasks }; export { addTask }; export { findTaskById }; export { addTaskOrg }; export { setIncludes }; export { getIncludes }; export { setExcludes }; export { getExcludes }; export { setClickEvent }; export { setLink }; export { getLinks }; export { bindFunctions }; export { parseDuration }; export { isInvalidDate }; } export default _default; import { setAccTitle } from "../../commonDb"; import { getAccTitle } from "../../commonDb"; import { setDiagramTitle } from "../../commonDb"; import { getDiagramTitle } from "../../commonDb"; import { setAccDescription } from "../../commonDb"; import { getAccDescription } from "../../commonDb"; /** * Parse a string as a moment duration. * * The string have to be compound by a value and a shorthand duration unit. For example `5d` * representes 5 days. * * Shorthand unit supported are: * * - `y` for years * - `M` for months * - `w` for weeks * - `d` for days * - `h` for hours * - `s` for seconds * - `ms` for milliseconds * * @param {string} str - A string representing the duration. * @returns {moment.Duration} A moment duration, including an invalid moment for invalid input string. */ declare function parseDuration(str: string): moment.Duration; import moment from "moment-mini";
{ "content_hash": "a0ede8d521ee04f94d5510244e46d43b", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 102, "avg_line_length": 36.913978494623656, "alnum_prop": 0.7023011942907078, "repo_name": "cdnjs/cdnjs", "id": "b7d3f6669738c5d4f4ff4912a5640db3c8e3aa7f", "size": "3433", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ajax/libs/mermaid/9.2.0-rc1/diagrams/gantt/ganttDb.d.ts", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
namespace ssfs { namespace http { class HttpStatusLine { public: HttpStatusLine() = default; HttpStatusLine(const std::string& protocol, int status, const std::string& reason); std::string protocol_version; int status_code; std::string reason_phrase; std::string format() const { std::stringstream ss; ss << protocol_version << " " << status_code << " " << reason_phrase; return ss.str(); } }; class HttpResponse { public: HttpResponse() = default; // OK Response HttpResponse(const std::string& message, const std::string& content_type); // Error Response HttpResponse(int status, const std::string& reason, const std::string& message); HttpStatusLine status_line; std::string message; std::unordered_map<std::string, std::string> header; std::string format() const; }; } // namespace http } // namespace ssfs
{ "content_hash": "a8303254de6b297b9764098cb02408cb", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 55, "avg_line_length": 23.7, "alnum_prop": 0.6244725738396625, "repo_name": "boryas/ssfs", "id": "acb3b50d61ffedeb39340e98a059b3b97cc62545", "size": "1063", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cpp-old/lib/http/response.h", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "50859" }, { "name": "Makefile", "bytes": "7103" }, { "name": "Shell", "bytes": "45" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('myapp', '0001_initial'), ] operations = [ migrations.AddField( model_name='streetlighting', name='code', field=models.IntegerField(default=0), preserve_default=True, ), migrations.AddField( model_name='streetlighting', name='municipality', field=models.CharField(default='7', max_length=200), preserve_default=True, ), ]
{ "content_hash": "88a3652d709806039baefa76f2c07154", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 64, "avg_line_length": 24.68, "alnum_prop": 0.5688816855753647, "repo_name": "mpetyx/energagement", "id": "84136729378cb754160d92f2e5d4ba1ed3f56cb2", "size": "641", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "energagement/myapp/migrations/0002_auto_20150622_1146.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "22435" }, { "name": "HTML", "bytes": "234304" }, { "name": "JavaScript", "bytes": "58711" }, { "name": "Python", "bytes": "82307" } ], "symlink_target": "" }
title: L.esri.IdentifyImage layout: documentation.hbs --- # {{page.data.title}} Extends [`L.esri.Task`]({{assets}}api-reference/tasks/task.html) `L.esri.IdentifyImage` is an abstraction for the Identify API found in Image Services. It provides a chainable API for building request parameters and executing the request. ### Constructor <table> <thead> <tr> <th>Constructor</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td> <code>L.esri.identifyImage({{{param 'ImageService' 'endpoint' '../../api-reference/services/map-service.html'}}})</code><br><br> <code>L.esri.identifyImage({{{param 'Object' 'options'}}})</code><br></td> <td>Accepts either an `options` object or an instance of <a href="{{assets}}/api-reference/services/image-service.html">ImageService</a>.</td> </tr> </tbody> </table> ### Options | Option | Type | Default | Description | | --- | --- | --- | --- | | `url` | `String` | `''` | URL of the ArcGIS service you would like to consume. | | `proxy` | `String` | `false` | URL of an [ArcGIS API for JavaScript proxy](https://developers.arcgis.com/javascript/jshelp/ags_proxy.html) or [ArcGIS Resource Proxy](https://github.com/Esri/resource-proxy) to use for proxying POST requests. | | `useCors` | `Boolean` | `true` | If this task should use CORS when making GET requests. | ### Methods <table> <thead> <tr> <th>Method</th> <th>Returns</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>at({{{param 'LatLng' 'latlng' 'https://leafletjs.com/reference.html#latlng'}}})</code></td> <td><code>this</code></td> <td>Identifie the pixel value at a given [LatLng](https://leafletjs.com/reference.html#latlng)</td> </tr> <tr> <td><code>between({{{param 'Date' 'from'}}}, {{{param 'Date' 'to'}}})</code></td> <td><code>this</code></td> <td>Identifies pixel values within a given time range.</td> </tr> <tr> <td><code>getRenderingRule()</code></td> <td><code>Object</code></td> <td>Returns the current rendering rule of the task.</td> </tr> <tr> <td><code>setRenderingRule({{{param 'Object' 'renderingRule'}}})</code></td> <td><code>this</code></td> <td>Sets the rendering rule to apply when getting a pixel value.</td> </tr> <tr> <td><code>getMosaicRule()</code></td> <td><code>Object</code></td> <td>Returns the current mosaic rule of the task.</td> </tr> <tr> <td><code>setMosaicRule({{{param 'Object' 'mosaicRule'}}})</code></td> <td><code>this</code></td> <td>Sets the mosaic rule to apply when getting a pixel value.</td> </tr> <tr> <td><code>setPixelSize({{{param 'Array' 'pixelSize'}}} or {{{param 'String' 'pixelSize'}}})</code></td> <td><code>this</code></td> <td>Sets the pixel size to use when getting a pixel value. Either an array (<code>[x,y]</code>) or string (<code>'x,y'</code>). If not set, it will use the pixel size defined by the service.</td> </tr> <tr> <td><code>getPixelSize()</code></td> <td><code>Object</code></td> <td>Returns the current pixel size of the task.</td> </tr> <tr> <td><code>returnCatalogItems({{{param 'Boolean' 'returnCatalogItems'}}})</code></td> <td><code>this</code></td> <td>Indicates whether or not to return raster catalog items. Set it to `false` when catalog items are not needed to improve the identify operation's performance significantly. When set to `false`, neither the geometry nor attributes of catalog items will be returned. Default is `false`.</td> </tr> <tr> <td><code>returnGeometry({{{param 'Boolean' 'returnGeometry'}}})</code></td> <td><code>this</code></td> <td>Return catalog footprints (geometry) with catalog item results. Default is `false`.</td> </tr> <tr> <td><code>token({{{param 'String' 'token'}}})</code></td> <td><code>this</code></td> <td>Adds a token to this request if the service requires authentication. Will be added automatically if used with a service.</td> </tr> <tr> <td><code>run({{{param 'Function' 'callback'}}}, {{{param 'Object' 'context'}}})</code></td> <td><code>this</code></td> <td>Executes the identify request with the current parameters, identified pixel value will be passed to <code>callback</code> as a <a href="https://tools.ietf.org/html/rfc7946#appendix-A.1">GeoJSON Point</a>. Accepts an optional function context</td> </tr> </tbody> </table> ### Example ```js L.map('map').setView([36.230577, -118.253147], 10); L.esri.identifyImage({ url: 'https://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Earthquakes/CaliforniaDEM/ImageServer' }) .at([36.230577, -118.253147]) .pixelSize([30, 30]) .run(function (error, identifyImageResponse, rawResponse) { if (error) { console.log(error); return; } console.log(identifyImageResponse.pixel.properties.value); }); ```
{ "content_hash": "9f0136328b8925735b469ec9f0ecc49d", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 304, "avg_line_length": 42.333333333333336, "alnum_prop": 0.5797472990294817, "repo_name": "Esri/esri-leaflet-doc", "id": "6c0084fe5dc21f665d0dd1b32c1db1b5171ec747", "size": "5465", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/pages/api-reference/tasks/identify-image.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Handlebars", "bytes": "135885" }, { "name": "JavaScript", "bytes": "12409" }, { "name": "SCSS", "bytes": "18022" } ], "symlink_target": "" }
import logging import openpyxl logger = logging.getLogger(__name__) def write_xls(sheet_name, head, data_list): """write listed data into excel """ try: wb = openpyxl.Workbook() ws = wb.get_active_sheet() except Exception as e: logger.error(e) return None ws.title = sheet_name row_num = 0 # write table head for col_num in xrange(len(head)): c = ws.cell(row = row_num + 1, column = col_num + 1) c.value = head[col_num] # write table data for row in data_list: row_num += 1 for col_num in xrange(len(row)): c = ws.cell(row = row_num + 1, column = col_num + 1) c.value = row[col_num] return wb
{ "content_hash": "e2607e5768c0fe312c9611862ce52015", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 64, "avg_line_length": 22.21212121212121, "alnum_prop": 0.5566166439290586, "repo_name": "saukrIppl/seahub", "id": "ce0b7c6d818f5f7d487209917a492e6d631484b1", "size": "733", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "seahub/utils/ms_excel.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "329387" }, { "name": "CoffeeScript", "bytes": "21" }, { "name": "HTML", "bytes": "722728" }, { "name": "Java", "bytes": "307193" }, { "name": "JavaScript", "bytes": "7293422" }, { "name": "Makefile", "bytes": "1097" }, { "name": "PLpgSQL", "bytes": "19598" }, { "name": "Python", "bytes": "9050702" }, { "name": "Shell", "bytes": "9695" } ], "symlink_target": "" }