text
stringlengths 4
5.48M
| meta
stringlengths 14
6.54k
|
---|---|
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='IgnoredObject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('content_type', models.ForeignKey(to='contenttypes.ContentType', on_delete=django.db.models.deletion.CASCADE)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Score',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('key', models.CharField(max_length=32)),
('score', models.IntegerField()),
('votes', models.PositiveIntegerField()),
('content_type', models.ForeignKey(to='contenttypes.ContentType', on_delete=django.db.models.deletion.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SimilarUser',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('agrees', models.PositiveIntegerField(default=0)),
('disagrees', models.PositiveIntegerField(default=0)),
('exclude', models.BooleanField(default=False)),
('from_user', models.ForeignKey(related_name='similar_users', to=settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE)),
('to_user', models.ForeignKey(related_name='similar_users_from', to=settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('key', models.CharField(max_length=32)),
('score', models.IntegerField()),
('ip_address', models.IPAddressField()),
('cookie', models.CharField(max_length=32, null=True, blank=True)),
('date_added', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('date_changed', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('content_type', models.ForeignKey(related_name='votes', to='contenttypes.ContentType', on_delete=django.db.models.deletion.CASCADE)),
('user', models.ForeignKey(related_name='votes', blank=True, to=settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE, null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='vote',
unique_together=set([('content_type', 'object_id', 'key', 'user', 'ip_address', 'cookie')]),
),
migrations.AlterUniqueTogether(
name='similaruser',
unique_together=set([('from_user', 'to_user')]),
),
migrations.AlterUniqueTogether(
name='score',
unique_together=set([('content_type', 'object_id', 'key')]),
),
migrations.AlterUniqueTogether(
name='ignoredobject',
unique_together=set([('content_type', 'object_id')]),
),
]
| {'content_hash': 'f903cd504db57ed3975231cf8dd4b77d', 'timestamp': '', 'source': 'github', 'line_count': 91, 'max_line_length': 163, 'avg_line_length': 46.24175824175824, 'alnum_prop': 0.5734315589353612, 'repo_name': 'hovel/django-ratings', 'id': '613e38645beb2ded5771b3a0e52782c2beb9dddc', 'size': '4232', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'djangoratings/migrations/0001_initial.py', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'Python', 'bytes': '54217'}]} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Collectl Monitoring Graph</title>
<!--<script src="http://www.google.com/jsapi"></script>-->
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js"></script>
<script src="http://code.highcharts.com/highcharts.js"></script>
<script src="js/chartkick.js"></script>
<!-- Loading Bootstrap -->
<link href="css/vendor/bootstrap/css/bootstrap.min.css" rel="stylesheet">
<!-- Loading Flat UI -->
<link href="css/flat-ui.min.css" rel="stylesheet">
<!-- jQuery (necessary for Flat UI's JavaScript plugins) -->
<script src="js/vendor/jquery.min.js"></script>
<!-- Include all compiled plugins (below), or include individual files as needed -->
<script src="js/flat-ui.min.js"></script>
<script src="js/themes/dark-unica.js"></script>
</head>
<body>
<div class="container">
<br>
<ul class="nav nav-tabs">
<li><a href="report_cpu.html">CPU</a></li>
<li><a href="report_memory.html">Memory</a></li>
<li class="active"><a href="#">Disk Usage</a></li>
<li><a href="report_network.html">Network Usage</a></li>
</ul>
<h3>Disk Read (MB)</h3>
{% for host in hosts %}
<div class="row">
<div class="col-lg-12">
{% line_chart host.disk_read with library={"title":{"text":host.name}} %}
<br>
</div>
</div>
{% endfor %}
<br>
<h3>Disk Write (MB)</h3>
{% for host in hosts %}
<div class="row">
<div class="col-lg-12">
{% line_chart host.disk_write with library={"title":{"text":host.name}} %}
<br>
</div>
</div>
{% endfor %}
</div>
</body>
</html> | {'content_hash': '56704ea39592b5c54484efd73cc482a7', 'timestamp': '', 'source': 'github', 'line_count': 61, 'max_line_length': 91, 'avg_line_length': 29.524590163934427, 'alnum_prop': 0.5513603553581343, 'repo_name': 'shelan/collectl-monitoring', 'id': '3eb9801d73026f6f6116e8d9e74712e3b21867b4', 'size': '1801', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'templates/disk_template.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '306850'}, {'name': 'HTML', 'bytes': '528076'}, {'name': 'JavaScript', 'bytes': '1001100'}, {'name': 'Python', 'bytes': '3847'}]} |
package org.jboss.weld.tests.proxy.weld1766;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.ElementType.TYPE;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import javax.inject.Qualifier;
@Qualifier
@Retention(RetentionPolicy.RUNTIME)
@Target({ METHOD, FIELD, PARAMETER, TYPE })
public @interface Qualifier2 {
}
| {'content_hash': '44918b45cf7de77bdf61cce04b0af9ef', 'timestamp': '', 'source': 'github', 'line_count': 19, 'max_line_length': 57, 'avg_line_length': 28.68421052631579, 'alnum_prop': 0.8220183486238533, 'repo_name': 'antoinesd/weld-core', 'id': 'fe9d441393976993a88540d8f1d9a188d5ed205f', 'size': '1320', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'tests-arquillian/src/test/java/org/jboss/weld/tests/proxy/weld1766/Qualifier2.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '5226'}, {'name': 'Groovy', 'bytes': '7775'}, {'name': 'HTML', 'bytes': '97201'}, {'name': 'Java', 'bytes': '9987377'}, {'name': 'JavaScript', 'bytes': '184606'}]} |
package com.intellij.codeInsight.template.macro;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.codeInsight.template.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import groovy.lang.Binding;
import groovy.lang.GroovyShell;
import groovy.lang.Script;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.util.Arrays;
import java.util.Collection;
/**
* @author Maxim.Mossienko
*/
public class GroovyScriptMacro extends Macro {
@Override
public String getName() {
return "groovyScript";
}
@Override
public String getPresentableName() {
return CodeInsightBundle.message("macro.groovy.script");
}
@Override
public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) {
if (params.length == 0) return null;
Object o = runIt(params, context);
if (o instanceof Collection && !((Collection)o).isEmpty()) {
return new TextResult(toNormalizedString(((Collection)o).iterator().next()));
}
if (o instanceof Object[] && ((Object[])o).length > 0) {
return new TextResult(toNormalizedString(((Object[])o)[0]));
}
if (o != null) return new TextResult(toNormalizedString(o));
return null;
}
private static Object runIt(Expression[] params, ExpressionContext context) {
try {
Result result = params[0].calculateResult(context);
if (result == null) return null;
String text = result.toString();
GroovyShell shell = new GroovyShell();
File possibleFile = new File(text);
Script script = possibleFile.exists() ? shell.parse(possibleFile) : shell.parse(text);
Binding binding = new Binding();
for(int i = 1; i < params.length; ++i) {
Result paramResult = params[i].calculateResult(context);
Object value = null;
if (paramResult instanceof ListResult) {
value = ContainerUtil.map2List(((ListResult)paramResult).getComponents(), result1 -> result1.toString());
} else if (paramResult != null) {
value = paramResult.toString();
}
binding.setVariable("_"+i, value);
}
binding.setVariable("_editor", context.getEditor());
script.setBinding(binding);
return script.run();
} catch (Exception | Error e) {
return StringUtil.convertLineSeparators(e.getLocalizedMessage());
}
}
@Override
public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) {
return calculateResult(params, context);
}
@Override
public LookupElement[] calculateLookupItems(@NotNull Expression[] params, ExpressionContext context) {
Object o = runIt(params, context);
Collection collection = o instanceof Collection ? (Collection)o :
o instanceof Object[] ? Arrays.asList((Object[])o) :
ContainerUtil.createMaybeSingletonList(o);
return ContainerUtil.map2Array(collection, LookupElement.class, item -> LookupElementBuilder.create(toNormalizedString(item)));
}
private static String toNormalizedString(Object o) {
return StringUtil.convertLineSeparators(o.toString());
}
}
| {'content_hash': '786671c14c2ce676b8319db5e00bc0ac', 'timestamp': '', 'source': 'github', 'line_count': 96, 'max_line_length': 131, 'avg_line_length': 34.708333333333336, 'alnum_prop': 0.696578631452581, 'repo_name': 'paplorinc/intellij-community', 'id': '791deafe7415bafd1c3c1f9cccc921abddf31e7f', 'size': '3932', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'platform/lang-impl/src/com/intellij/codeInsight/template/macro/GroovyScriptMacro.java', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
<template name="accountProfile">
<section class="page-container page-home page-static">
<header class="fixed-title">
<div class="header-container">
<!-- <header id="header" class="page-topbar"> -->
{{> burger}}
<div class="pagetitle">
<h2>
<span class="room-title">{{_ "Profile"}}</span>
</h2>
</div>
</div>
</header>
<div class="container">
<div class="rocket-form">
<fieldset>
<!-- <div class="input-line">
<label for="realname">{{_ "Name"}}</label>
<div>
<input type="text" name="realname" id="realname" value="{{realname}}" />
</div>
<div>
<small class="settings-description">{{{_ "for_login"}}}</small>
</div>
</div> -->
<div class="input-line">
<label for="username">{{_ "Username"}}</label>
<div>
{{#if allowUsernameChange}}
<input type="text" name="username" id="username" value="{{username}}" />
{{else}}
<input type="text" name="username" id="username" value="{{username}}" readonly="readonly" title="{{usernameChangeDisabled}}" />
{{/if}}
</div>
</div>
<div class="input-line">
<label for="introduction">{{_ "Introduction"}}</label>
<div>
{{#if allowUserIntroductionChange}}
<input type="text" name="introduction" id="introduction" value="{{introduction}}" />
{{else}}
<input type="text" name="introduction" id="introduction" value="{{introduction}}" readonly="readonly" title="{{introductionChangeDisabled}}"/>
{{/if}}
</div>
</div>
<div class="input-line">
<label for="email">{{_ "Email"}}</label>
<div>
{{#if emailVerified}}
<div class="right">
<i class="icon-ok green" title="{{_ "Email_verified" }}"></i>
</div>
{{/if}}
{{#if allowEmailChange}}
<input type="email" name="email" id="email" value="{{email}}" />
{{else}}
<input type="email" name="email" id="email" value="{{email}}" readonly="readonly" title="{{emailChangeDisabled}}" />
{{/if}}
{{#unless emailVerified}}
<div class="text-right">
<button class="button waves-effect waves-light" id="resend-verification-email">{{_ "Resend_verification_email"}}</button>
</div>
{{/unless}}
</div>
</div>
<div class="input-line">
<label for="password">{{_ "New_password"}}</label>
<div>
{{#if allowPasswordChange}}
<input type="password" name="password" id="password" />
{{else}}
<input type="password" name="password" id="password" readonly="readonly" title="{{passwordChangeDisabled}}"/>
{{/if}}
</div>
</div>
</fieldset>
<div class="submit">
<button class="button waves-effect waves-light"><i class="icon-send"></i><span>{{_ "Save_changes"}}</span></button>
</div>
<div class="logoutOthers">
<button class="button waves-effect waves-light red">{{_ "Logout_Others"}}</button>
</div>
{{#if allowDeleteOwnAccount}}
<div class="delete-account text-right">
<button class="button waves-effect waves-light red"><i class="icon-trash"></i><span>{{_ "Delete_my_account"}}</span></button>
</div>
{{/if}}
</div>
</div>
</section>
</template>
| {'content_hash': '71f0638320f53db1b720872fa1637886', 'timestamp': '', 'source': 'github', 'line_count': 91, 'max_line_length': 149, 'avg_line_length': 35.86813186813187, 'alnum_prop': 0.5640318627450981, 'repo_name': 'YY030913/tp', 'id': '37ef3a323afaee49b1ef34b42b1b6daf54d69eba', 'size': '3264', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'packages/caoliao-ui-account/account/accountProfile.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Batchfile', 'bytes': '532'}, {'name': 'C', 'bytes': '61086'}, {'name': 'CSS', 'bytes': '560436'}, {'name': "Cap'n Proto", 'bytes': '3530'}, {'name': 'CoffeeScript', 'bytes': '1123987'}, {'name': 'HTML', 'bytes': '3985077'}, {'name': 'JavaScript', 'bytes': '2634415'}, {'name': 'Objective-C', 'bytes': '374432'}, {'name': 'Perl', 'bytes': '449'}, {'name': 'Ruby', 'bytes': '4237'}, {'name': 'Shell', 'bytes': '6985'}]} |
<!--
====================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<title>Paged Table Page</title>
</head>
<body><ul><li wicket:id="table"><span wicket:id="txt">one</span></li><li wicket:id="table"><span wicket:id="txt">two</span></li></ul>
<span wicket:id="navigator"><wicket:panel>
<a wicket:id="first" class="first" disabled="disabled" title="Go to first page"><<</a>
<a wicket:id="prev" class="prev" disabled="disabled" title="Go to previous page"><</a>
<span wicket:id="navigation" class="goto">
<a wicket:id="pageLink" disabled="disabled" title="Go to page 1"><span wicket:id="pageNumber">1</span></a>
</span>, <span wicket:id="navigation" class="goto">
<a wicket:id="pageLink" href="./org.apache.wicket.markup.html.list.PagedTableNavigatorWithMarginPage?0-1.ILinkListener-navigator-navigation-1-pageLink" title="Go to page 2"><span wicket:id="pageNumber">2</span></a>
</span>, <span wicket:id="navigation" class="goto">
<a wicket:id="pageLink" href="./org.apache.wicket.markup.html.list.PagedTableNavigatorWithMarginPage?0-1.ILinkListener-navigator-navigation-2-pageLink" title="Go to page 3"><span wicket:id="pageNumber">3</span></a>
</span>, <span wicket:id="navigation" class="goto">
<a wicket:id="pageLink" href="./org.apache.wicket.markup.html.list.PagedTableNavigatorWithMarginPage?0-1.ILinkListener-navigator-navigation-3-pageLink" title="Go to page 4"><span wicket:id="pageNumber">4</span></a>
</span>, <span wicket:id="navigation" class="goto">
<a wicket:id="pageLink" href="./org.apache.wicket.markup.html.list.PagedTableNavigatorWithMarginPage?0-1.ILinkListener-navigator-navigation-4-pageLink" title="Go to page 5"><span wicket:id="pageNumber">5</span></a>
</span>
<a wicket:id="next" class="next" href="./org.apache.wicket.markup.html.list.PagedTableNavigatorWithMarginPage?0-1.ILinkListener-navigator-next" title="Go to next page">></a>
<a wicket:id="last" class="last" href="./org.apache.wicket.markup.html.list.PagedTableNavigatorWithMarginPage?0-1.ILinkListener-navigator-last" title="Go to last page">>></a>
</wicket:panel></span>
</body>
</html>
| {'content_hash': 'dfdd78de2ab4eb68b506bd9db01be53a', 'timestamp': '', 'source': 'github', 'line_count': 38, 'max_line_length': 216, 'avg_line_length': 70.94736842105263, 'alnum_prop': 0.7099406528189911, 'repo_name': 'astrapi69/wicket', 'id': 'e79c549dd7565aa7aeb90a85ded77b6aed48c3f4', 'size': '2696', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'wicket-core/src/test/java/org/apache/wicket/markup/html/list/PagedTableNavigatorWithMarginExpectedResult_1.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '71908'}, {'name': 'Java', 'bytes': '11808347'}, {'name': 'JavaScript', 'bytes': '488289'}, {'name': 'Logos', 'bytes': '14214'}, {'name': 'Python', 'bytes': '1557'}, {'name': 'Shell', 'bytes': '47630'}, {'name': 'XSLT', 'bytes': '2162'}]} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>ext-lib: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.7.2 / ext-lib - 0.11.6</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
ext-lib
<small>
0.11.6
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-10-30 16:29:12 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-30 16:29:12 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-ocamlbuild base OCamlbuild binary and libraries distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.7.2 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.02.3 The OCaml compiler (virtual package)
ocaml-base-compiler 4.02.3 Official 4.02.3 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
# opam file:
opam-version: "2.0"
synopsis: "A library of Coq definitions, theorems, and tactics"
description:
"A collection of theories and plugins that may be useful in other Coq developments."
maintainer: "[email protected]"
authors: "Gregory Malecha"
license: "BSD-2-Clause"
tags: "logpath:ExtLib"
homepage: "https://github.com/coq-community/coq-ext-lib"
bug-reports: "https://github.com/coq-community/coq-ext-lib/issues"
depends: [
"ocaml"
"coq" {>= "8.8"}
]
build: [make "-j%{jobs}%" "theories"]
run-test: [make "-j%{jobs}%" "examples"]
install: [make "install"]
dev-repo: "git+https://github.com/coq-community/coq-ext-lib.git"
url {
src: "https://github.com/coq-community/coq-ext-lib/archive/v0.11.6.tar.gz"
checksum: [
"md5=442577afb6ff3a02043478690057cc21"
"sha512=5e429e291439885c57ad537d2bf644345973740e29cff42e4045367f9f1e22a406b15e17af415e8d301eb0caf179495a506ea04c1ce39f94875366a49aa2db80"
]
}</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-ext-lib.0.11.6 coq.8.7.2</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.7.2).
The following dependencies couldn't be met:
- coq-ext-lib -> coq >= 8.8 -> ocaml >= 4.05.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-ext-lib.0.11.6</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {'content_hash': 'e846dbc078e4d1ade297e7d74db1a59a', 'timestamp': '', 'source': 'github', 'line_count': 168, 'max_line_length': 159, 'avg_line_length': 41.98809523809524, 'alnum_prop': 0.5467819676779132, 'repo_name': 'coq-bench/coq-bench.github.io', 'id': 'd781f6cce02538be473c21730daed0fc5dbc7091', 'size': '7079', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'clean/Linux-x86_64-4.02.3-2.0.6/released/8.7.2/ext-lib/0.11.6.html', 'mode': '33188', 'license': 'mit', 'language': []} |
<template id="task-head">
<style scoped>
*{
box-sizing: border-box;
}
h1, h3, a {
color: #CCC;
}
div {
border-top: solid 1px #fff;
padding-left: 0px;
width:100%;
}
.id, .state, .title, .author, .target, .comment {
padding: 5px 5px;
border-right: solid 1px #fff;
}
.head{
background-color: #fc0;
color: #fff;
}
.head input{
width : 100%;
background-color: #fd8;
margin-top:1px;
border:none;
}
.head span{
display:inline-block;
}
oi-task{
display:inline-table;
width:100%;
background-color: #eee;
color: #000;
padding: 0;
}
::content oi-task{
display:inline-table;
width:100%;
background-color: #eee;
color: #000;
padding: 0;
}
oi-task span{
display:inline-block;
background-color: #eee;
margin:0;
overflow: hidden;
}
::content oi-task span{
display:inline-block;
background-color: #eee;
margin:0;
overflow: hidden;
}
.id, .state, .target {
width:5%;
}
::content .id, ::content .state, ::content .target {
width:5%;
}
.author, .comment{
width:15%;
}
::content .author, ::content .comment{
width:15%;
}
.title {
width:55%;
}
::content .title {
width:55%;
}
.titleForm {
width:100px;
}
::content .titleForm {
width:400px;
}
.formAddTask{
display:none;
}
.titleForm, .targetForm, .stateForm{
width:500px;
}
</style>
<h1>Tableau des tâches du projet</h1>
<h3 class="stitre"></h3>
<img src="../../images/add.svg" class="show-form-task" >
<div class="formAddTask">
<label>Tâche<br><input type="text" name="title" class="titleForm"></label><br>
<input type="hidden" name="author" value="http://pp.projects.openinitiative.com/user/ldpcontainer/601">
<label>lot<br>
<select name="target" class="targetForm">
</select>
</label><br>
<label>Etat<br>
<select name="state" class="stateForm">
<option value="0">Proposé</option>
<option value="1">Accepté</option>
<option value="2">Démarré</option>
<option value="3">Livré</option>
<option value="4">Validé</option>
</select>
</label>
<input type="image" name="submit" src="../../images/ok.svg" border="0" alt="Submit" class="submitForm">
<input type="image" src="../../images/cancel.svg" border="0" alt="reset" class="resetForm"><br>
</div>
<a href="./kanban.html">Accès au kanban</a><br>
<div class="head"></div>
<content></content>
</template>
<script type="text/javascript" src="http://cdn.happy-dev.fr/LDP-framework/ldpframework.js"></script>
<script>
(function(){
var localDoc = document._currentScript.ownerDocument;
var store = new MyStore({context: "http://owl.openinitiative.com/oicontext.jsonld", defaultSerializer: 'application/ld+json'});
var taskListPrototype = Object.create(HTMLElement.prototype, {
createdCallback: {
value: function() {
var clone = document.importNode(localDoc.querySelector('#task-head').content, true);
this.createShadowRoot().appendChild(clone);
store.get(this.dataset.src).then(project=>{
var content= '';
//reading of targets in this project
project.release_set.forEach(function(target, index){
target.name = (target.name==undefined)?project.target.name:target.name;
if(!target.done){content +=`<option value="${target['@id']}">${target.name}</option>`;}
}.bind(this));
this.shadowRoot.children[4].querySelector('.targetForm').innerHTML = content;
project.descendants.forEach(function(task, index){
var newTask = document.createElement('oi-task');
newTask.setAttribute('data-src', task['@id']);
this.appendChild(newTask);
}.bind(this));
//creation of sub-title
this.shadowRoot.querySelector('.stitre').innerHTML = `Project : ${project.title} , Url : ${project['@id']}`;
//creation of header of table
var content = "";
[{n:'Id',s:'id'}, {n:'Etat',s:'state'}, {n:'Titre',s:'title'}, {n:'Lot',s:'target'}, {n:'Auteur',s:'author'}, {n:'Commentaire(s)',s:'comment'}].forEach((col,i) => content +=
`<span class=${col.s}>${col.n}<br><input id="${col.s}"></input></span>`);
var tableHeader= this.shadowRoot.querySelector('.head');
tableHeader.innerHTML = content;
// click to show the form to append a task
this.shadowRoot.querySelector('.show-form-task').addEventListener('click', c => {
this.shadowRoot.querySelector('.formAddTask').style.display = (this.shadowRoot.querySelector('.formAddTask').style.display == 'block')?'none':'block';
});
// click to append a task
this.shadowRoot.querySelector('.submitForm').addEventListener('click', c => {
event.preventDefault(); //bloc function submit as HTML
var newProject = {"@context" : "http://owl.openinitiative.com/oicontext.jsonld" , 'target' : {'@id' : ''}}
newProject.title = this.shadowRoot.querySelector('.titleForm').value;
newProject.target['@id'] = this.shadowRoot.querySelector('.targetForm').value;
newProject.state = this.shadowRoot.querySelector('.stateForm').value;
store.save(newProject).then(project=>{
// console.log(project['@graph'][0]['@id']);
var newTask = document.createElement('oi-task');
newTask.setAttribute('data-src', "http://pp.projects.openinitiative.com/project/ldpcontainer/" + project['@graph'][0]['@id']);
this.appendChild(newTask);
})
this.shadowRoot.querySelector('.titleForm').value = '';
});
// click to hide the form to append a task
this.shadowRoot.querySelector('.resetForm').addEventListener('click', c => {
this.shadowRoot.querySelector('.formAddTask').style.display = 'none';
});
//detection of mouseUp in a field of the header to change the filter
tableHeader.addEventListener("keyup",function(){
// show every tasks
var razFilter = Array.prototype.map.call(this.querySelectorAll('oi-task'), elt => elt.style.display='inline-table');
//hide the tasks which not match
// chxxx contient the string to search in the xxx column
var chid = tableHeader.querySelector('#id').value;
var chstate =tableHeader.querySelector('#state').value;
var chtitle = tableHeader.querySelector('#title').value.toUpperCase();
var chauthor = tableHeader.querySelector('#author').value.toUpperCase();
var chTarget = tableHeader.querySelector('#target').value.toUpperCase();
var chcomment = tableHeader.querySelector('#comment').value.toUpperCase();
// basexxx contient all the <span class="xxx">
var idFilter = Array.prototype.map.call(this.querySelectorAll('oi-task .id'), elt =>{if(elt.innerHTML.indexOf(chid)==-1){elt.parentNode.style.display='none';}});
var stateFilter = Array.prototype.map.call(this.querySelectorAll('oi-task .state'), elt =>{if(elt.innerHTML.indexOf(chstate)==-1){elt.parentNode.style.display='none';}});
var titleFilter = Array.prototype.map.call(this.querySelectorAll('oi-task .title'), elt =>{if(elt.innerHTML.toUpperCase().indexOf(chtitle)==-1){elt.parentNode.style.display='none';}});
var authorFilter = Array.prototype.map.call(this.querySelectorAll('oi-task .author'), elt =>{if(elt.innerHTML.toUpperCase().indexOf(chauthor)==-1){elt.parentNode.style.display='none';}});
var targetFilter = Array.prototype.map.call(this.querySelectorAll('oi-task .target'), elt =>{if(elt.innerHTML.toUpperCase().indexOf(chTarget)==-1){elt.parentNode.style.display='none';}});
var commentFilter = Array.prototype.map.call(this.querySelectorAll('oi-task .comment'), elt =>{if(elt.innerHTML.toUpperCase().indexOf(chcomment)==-1){elt.parentNode.style.display='none';}});
}.bind(this));
});
}
}
});
document.registerElement('oi-tasklist',{prototype: taskListPrototype});
})();
</script>
| {'content_hash': '5f8a230834de89305b9a786bf796ef99', 'timestamp': '', 'source': 'github', 'line_count': 210, 'max_line_length': 218, 'avg_line_length': 47.7047619047619, 'alnum_prop': 0.5093831104012777, 'repo_name': 'Open-Initiative/ldp-prj-mgt', 'id': 'aafae401cded806fcce71359818c70144b5ed416', 'size': '10027', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'library/components/oi-tasklist.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '1750'}, {'name': 'HTML', 'bytes': '59201'}, {'name': 'JavaScript', 'bytes': '255960'}]} |
require 'open-uri'
require 'eventmachine'
module Rack
class DetectTor
def initialize(app, options={})
@app = app
@options = {
'external_ip' => nil,
'external_port' => nil,
'update_frequency' => 60*60
}.merge(options)
@identifier = Hash[@options.select{|k,v| k =~ /^external_/}.
sort_by{|k,v| k}].values.map{|v| v.to_s == '' ? '*' : v}.join('/')
log_message 'Fetching initial list of tor exits...'
@tor_exits = fetch_tor_exits || {}
start_update_timer unless @options['update_frequency'].to_i == 0
end
def call(env)
env['tor_exit_user'] = @tor_exits.include? Rack::Request.new(env).ip unless env['tor_exit_user'] == true
@app.call(env)
end
private
def fetch_tor_exits
begin
if @options.select{|k,v| k =~ /^external_/}.values.map{|v| v.to_s}.include? ''
log_message "WARNING: external_ip/external_port not specified. " +
"Using list of ALL exits. Results will NOT be accurate"
tor_exits = open('https://check.torproject.org/exit-addresses').read.
split("\n").select{|i| i =~ /^ExitAddress/}.map{|j| j.split(' ')[1]}
else
check_url = "https://check.torproject.org/cgi-bin/TorBulkExitList.py?" +
"ip=#{@options['external_ip']}&port=#{@options['external_port']}"
tor_exits = open(check_url).read.split("\n").select{|i| !(i =~ /^\#/)}
end
rescue OpenURI::HTTPError => e
log_error "Error fetching list of tor exits (#{e})."
return nil
end
log_message "Found #{tor_exits.count} exits."
return tor_exits
end
def start_update_timer
log_message "Starting update timer... (updating every #{@options['update_frequency']} seconds)"
Thread.new do
EventMachine.run do
@update_timer = EventMachine::PeriodicTimer.new(@options['update_frequency']) do
log_message 'Updating list of tor exits...'
@tor_exits = fetch_tor_exits || @tor_exits
end
end
end
end
def log_message(message)
$stdout.puts "Rack::DetectTor [#{@identifier}]: #{message}"
end
def log_error(message)
$stderr.puts "Rack::DetectTor [#{@identifier}]: ERROR: #{message}"
end
end
end
| {'content_hash': 'affa7b2bd0c202634a2fa1861e042457', 'timestamp': '', 'source': 'github', 'line_count': 77, 'max_line_length': 110, 'avg_line_length': 30.493506493506494, 'alnum_prop': 0.5672913117546848, 'repo_name': 'warrenguy/rack-detect-tor', 'id': 'ee197b506d2588717773ef10eb7e34d5d9808226', 'size': '2348', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/rack-detect-tor.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Ruby', 'bytes': '2903'}]} |
//// METADATA excludeEnvs="test%schema"
-- excluding these from schema-based envs as these object types are not supported with schemas
sp_addtype N'Boolean2', N'tinyint', N'not null'
GO
sp_bindrule booleanRule2, Boolean2
GO
| {'content_hash': 'd350ca5dbd8243ef9c4999138ec7986a', 'timestamp': '', 'source': 'github', 'line_count': 7, 'max_line_length': 94, 'avg_line_length': 33.142857142857146, 'alnum_prop': 0.75, 'repo_name': 'shantstepanian/obevo', 'id': '71f3adc7849d2003feaacc584677cd39755f2a38', 'size': '841', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'obevo-db-impls/obevo-db-sybase-ase/src/test/resources/platforms/sybasease/step2/oats/usertype/Boolean2.sql', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '2302'}, {'name': 'FreeMarker', 'bytes': '9115'}, {'name': 'Java', 'bytes': '2677165'}, {'name': 'JavaScript', 'bytes': '3135'}, {'name': 'Kotlin', 'bytes': '232754'}, {'name': 'PLSQL', 'bytes': '19595'}, {'name': 'PLpgSQL', 'bytes': '24497'}, {'name': 'PowerShell', 'bytes': '3558'}, {'name': 'SQLPL', 'bytes': '29981'}, {'name': 'Shell', 'bytes': '11626'}]} |
<?php
/**
* @author [email protected]
* (c) ABOUT YOU GmbH
*/
namespace AboutYou\SDK\Model\ProductSearchResult;
abstract class TermsCounts
{
/** @var integer */
protected $productCountTotal;
/** @var integer */
protected $productCountWithOtherFacet;
/** @var integer */
protected $productCountWithoutAnyFacet;
protected function __construct($productCountTotal, $productCountWithOtherFacet, $productCountWithoutAnyFacet)
{
$this->productCountTotal = $productCountTotal;
$this->productCountWithOtherFacet = $productCountWithOtherFacet;
$this->productCountWithoutAnyFacet = $productCountWithoutAnyFacet;
}
/**
* @return integer
*/
public function getProductCountTotal()
{
return $this->productCountTotal;
}
/**
* @return integer
*/
public function getProductCountWithOtherFacetId()
{
return $this->productCountWithOtherFacet;
}
/**
* @return integer
*/
public function getProductCountWithoutAnyFacet()
{
return $this->productCountWithoutAnyFacet;
}
} | {'content_hash': '8667f72d834c517470e6d1568953f7e5', 'timestamp': '', 'source': 'github', 'line_count': 50, 'max_line_length': 113, 'avg_line_length': 22.74, 'alnum_prop': 0.6569920844327177, 'repo_name': 'HossamYoussef2009/aboutyou-php-sdk', 'id': 'da5a2c5f662086a88594b6fe82ab655acbd90b77', 'size': '1137', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/AboutYou/SDK/Model/ProductSearchResult/TermsCounts.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'PHP', 'bytes': '484477'}]} |
<?xml version="1.0"?>
<!--
The MIT License
Copyright (c) 2014-2016 Ilkka Seppälä
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-->
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.iluwatar</groupId>
<artifactId>java-design-patterns</artifactId>
<version>1.17.0-SNAPSHOT</version>
</parent>
<artifactId>model-view-presenter</artifactId>
<name>model-view-presenter</name>
<url>http://maven.apache.org</url>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>net.java.dev.swing-layout</groupId>
<artifactId>swing-layout</artifactId>
<version>1.0.2</version>
</dependency>
</dependencies>
</project>
| {'content_hash': '85df116679545f44cf9d958c6cd93f3e', 'timestamp': '', 'source': 'github', 'line_count': 49, 'max_line_length': 149, 'avg_line_length': 41.30612244897959, 'alnum_prop': 0.7208498023715415, 'repo_name': 'inbreak/java-design-patterns', 'id': 'ec6f570ee7dcabaf4c4b41cdde8428e487304397', 'size': '2026', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'model-view-presenter/pom.xml', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '5808'}, {'name': 'Gherkin', 'bytes': '1078'}, {'name': 'HTML', 'bytes': '19979'}, {'name': 'Java', 'bytes': '2243524'}, {'name': 'JavaScript', 'bytes': '1191'}, {'name': 'Python', 'bytes': '2865'}, {'name': 'Shell', 'bytes': '1718'}]} |
flyPanels - responsive off canvas menu panels
## Features
FlyPanels is a responsive off canvas menu plugin for websites or web apps. It supports all modern browsers from IE11. This new version is written in vanilla JavaScript and has no other dependencies. The old jQuery version can still be accessed on a [separate branch called jQuery](https://github.com/SubZane/flyPanels/tree/jQuery). Take note that the old jQuery version will not receive as much attention from me as I'm moving away from jQuery.
Compared to many other off canvas menu plugins out there this one is more solid and behaves more like a native solution. Try it!
### [View demo](http://www.andreasnorman.com/flypanels)
## Browser Support
- Google Chrome
- Firefox 40+
- Safari 14+
- Mobile Safari iOS 12+
## Installation
```
yarn add flyPanels
```
### Setup
```html
<!-- You'll need to include flyPanels of course! -->
<script src="flyPanels.js"></script>
<!-- Some basic CSS is required of course -->
<link rel="stylesheet" href="css/flyPanels.css" />
```
## Usage
```javascript
document.addEventListener('DOMContentLoaded', function (event) {
flyPanels.init();
});
```
### Settings and Defaults
```javascript
options: {
treeMenu: {
init: false,
expandHandler: 'span.expand',
UseJSON: false,
OnExpandOpen: function () {},
OnExpandClose: function () {},
OnJSONLoaded: function () {},
JSONLoadError: function () {}
},
search = {
init: false,
saveQueryCookie: false
},
onInit: function () {},
onInitTreeMenu: function () {},
onOpen: function () {},
onClose: function () {},
onCloseAll: function () {},
afterWindowResize: function () {},
OnAttachEvents: function () {},
onWindowResize: function () {},
onEmptySearchResult: function () {},
onSearchError: function () {},
onSearchSuccess: function () {},
onInitSearch: function () {},
onDestroy: function () {}
};
```
- `treeMenu`:
- `init`: Boolean - If it should look for and init the expanding treemenu.
- `expandHandler`: String - The element that should have the click event to open/close submenu (expand/contract)
- `UseJSON`: Boolean - The treemenu can generate HTML markup from a JSON file if specified.
- `OnExpandOpen`: What to do just after a node has expanded/opened.
- `OnExpandClose`: What to do just after a node has closed.
- `OnJSONLoaded`: What to do just after the JSON has been loaded.
- `JSONLoadError`: What to do if an error occurred during the loading of the JSON.
- `search`:
- `init`: Boolean - If it should look for and init the search component.
- `saveQueryCookie`: Boolean - If the search query should be stored in a session cookie to remember the last search.
- `onInit`: What to do after the plugin is initialized.
- `onLoad`: What to do after the plugin has loaded.
- `onOpen`: What to do after a panel has opened.
- `onClose`: What to do after a panel has closed.
- `onCloseAll`: What to do after all panels has closed.
- `afterWindowResize`: What to do just after a window resize.
- `OnAttachEvents`: What to do just after events has been attached.
- `onWindowResize`: What to do just on window resize.
- `onEmptySearchResult`: What to do if search result is empty.
- `onSearchError`: What to do just if search returns an error.
- `onSearchSuccess`: What to do if search is successful.
- `onInitSearch`: What to do just after search is initialized.
- `onInitTreeMenu`: What to do just after tree menu is initialized.
- `onDestroy`: What to do just after plugin is destroyed.
### Typical setup
This could be your typical script setup.
```javascript
document.addEventListener('DOMContentLoaded', function (event) {
flyPanels.init();
});
```
### Html needed for a basic setup
```html
<div class="flypanels-overlay"></div>
<div id="flypanels-menubutton" class="flypanels-button menu" data-target="flypanels-menu"></div>
<div id="flypanels-menu" class="flypanels-panel door-left">
<div class="flypanels-content door-left">
<div class="flypanels-inner">
<p>panel content goes here</p>
</div>
</div>
</div>
```
### Using the expanding treemenu component
If you want to use the treemenu component you'll need to set it to true in the options and you'll need to add the necessary HTML markup.
To customize the appearance of the treemenu you can either modify the LESS files and rebuild or just simply override the default styles.
```javascript
document.addEventListener('DOMContentLoaded', function (event) {
flyPanels.init({
onInit: function () {
fpm_treemenu.init();
},
});
});
```
```html
<div class="flypanels-overlay"></div>
<div id="flypanels-menubutton" class="flypanels-button menu" data-target="flypanels-menu"></div>
<div id="flypanels-menu" class="flypanels-panel door-left">
<div class="flypanels-content door-left">
<div class="flypanels-inner">
<nav class="flypanels-treemenu" role="navigation" aria-label="Main navigation" id="flypanels-treemenu">
<ul>
<li class="haschildren">
<a href="#"
><span class="link">Example menu item</span> <span class="expand">2<i class="fa icon"></i></span
></a>
<ul>
<li class="haschildren">
<a href="#"
><span class="link">Example menu item</span> <span class="expand">2<i class="fa icon"></i></span
></a>
<ul>
<li class="haschildren">
<a href="#"
><span class="link">Example menu item</span> <span class="expand">2<i class="fa icon"></i></span
></a>
<ul>
<li class="haschildren">
<a href="#"
><span class="link">Example menu item</span> <span class="expand">2<i class="fa icon"></i></span
></a>
<ul>
<li>
<a href="#"><span class="link">Example menu item</span></a>
</li>
<li>
<a href="#"><span class="link">Example menu item</span></a>
</li>
</ul>
</li>
<li>
<a href="#"><span class="link">Example menu item</span></a>
</li>
</ul>
</li>
<li>
<a href="#"><span class="link">Example menu item</span></a>
</li>
</ul>
</li>
<li>
<a href="#"><span class="link">Example menu item</span></a>
</li>
</ul>
</li>
</ul>
</nav>
</div>
</div>
</div>
```
### Using the expanding treemenu component with JSON
If you want to use the treemenu component with JSON as data srouce you'll need to set it to true in the options and you'll need to add the necessary HTML markup. You will also need to specify the URL to the JSON file in the data attribute `data-json`.
To customize the appearance of the treemenu you can either modify the LESS files and rebuild or just simply override the default styles.
```javascript
document.addEventListener('DOMContentLoaded', function (event) {
flyPanels.init({
onInit: function () {
fpm_treemenu.init({
UseJSON: true,
});
},
});
});
```
```html
<div class="flypanels-overlay"></div>
<div id="flypanels-menubutton" class="flypanels-button menu" data-target="flypanels-menu"></div>
<div id="flypanels-menu" class="flypanels-panel door-left">
<div class="flypanels-content door-left">
<div class="flypanels-inner">
<nav class="flypanels-treemenu" role="navigation" aria-label="Main navigation" data-json="json/treemenu.json" id="flypanels-treemenu">
<!-- Tree Menu will render here. Please keep template below -->
<ul>
<li class="haschildren" role="treeitem" aria-expanded="false">
<div>
<a href="{url}" class="link">{title}</a
><a aria-label="Expand submenu" href="#" data-aria-label="Expand submenu" data-aria-label-active="Collapse submenu" class="expand"
>{count}<i class="fa icon" aria-hidden="true"></i
></a>
</div>
<ul>
<li class="nochildren">
<div><a href="{url}" class="link">{title}</a></div>
</li>
</ul>
</li>
</ul>
<!-- End: Tree Menu will render here. Please keep template below -->
</nav>
</div>
</div>
</div>
```
### Using the search component
If you want to use the search component you'll need to set it to true in the options and you'll need to add the necessary HTML markup.
To customize the appearance of the search panel and its result you can either modify the LESS files and rebuild or just simply override the default styles.
```javascript
document.addEventListener('DOMContentLoaded', function (event) {
flyPanels.init({
onInit: function () {
fpm_search.init({ saveQueryCookie: true });
},
});
});
```
```html
<div class="flypanels-overlay"></div>
<div id="flypanels-searchbutton" class="flypanels-button search" data-target="flypanels-search"></div>
<div id="flypanels-search" class="flypanels-panel door-right">
<div class="flypanels-content door-right">
<div class="flypanels-inner">
<div class="searchpanel">
<div class="searchbox" data-searchurl="json/searchresult.json?search=true">
<input title="search" type="text" id="flypanels-searchfield" />
<a href="#" aria-label="search" class="searchbutton"></a>
</div>
<div class="resultinfo" aria-live="polite" aria-hidden="true" hidden>
You search for "<span class="query">lorem ipsum</span>" resulted in <span class="num">5</span> hits.
</div>
<div class="errormsg" aria-live="polite" aria-hidden="true" hidden>Something went wrong, please refresh the page and try again.</div>
<div class="loading" aria-hidden="true" hidden>
<div class="loader"></div>
<span>Searching...</span>
</div>
<div class="flypanels-searchresult" aria-hidden="true" aria-live="polite" hidden></div>
</div>
</div>
</div>
</div>
```
## changelog
#### 4.0.1
- Minor fixes and cleanup of code
#### 4.0.0
- New: Re-written HTML markup. The component no longer wraps around your content, making it easier to implement and less cumbersome.
- New: Added some neat animations to the panels. Currently there's only two animations but I will add some more soon enough.
- New: The buttons to open the panels are now separated and can be placed anywhere you like.
- Note: The treemenu and search components still works exactly the same.
#### 3.2.0
- New: Added support for generating a treemenu from a JSON file.
#### 3.1.1
- FIX: Repository bug
#### 3.1.0
- Moved from Bower to Yarn
#### 3.0.0
- New modular approach. Both the search and treemenu are now separate scripts that need to be incluced if needed.
- Sizing adjusted from pixel sizing to viewport sizing for a more responsive approach. Markup and CSS Changed.
- The panel widths are now possible to adjust depending on screen size using media queries. Default it uses the breakpoints defined by Bootstrap.
- Added keyboard navigation TAB for better WCAG support.
- ESC key now closes the currently open panel.
#### 2.0.6
- Fix: Adjustments to animations en environment.
- Fix: Prevents content to "jump" due to scrollbar in desktop mode.
#### 2.0.5
- Fix: Bug with the search not removing previous search query and number of hits.
#### 2.0.4
- Fix: Bug with the search not removing previous search results if new query results in zero hits.
#### 2.0.3
- Fix: Bug with the search not handling zero results properly and not hiding the spinner after a search.
#### 2.0.2
- Fix: A case where flyPanels made RoyalSlider to not work after a panel has been opened. It seems that using 'innerHTML' to add elements to the DOM made RoyalSlider to stop working. Rewrote my function to not use innerHTML. Now it works just fine. Who would have known, eh?
#### 2.0.1
- Fix: The CSS contained some too new rules that prevented it from working at all in iOS8. Added CSS prefixes to fix it.
- Change: Added autoprefixer to the build.
#### 2.0.0
- BIG CHANGE: Rewrote the plugin in vanilla JavaScript. jQuery is no longer required. Last version to use jQuery is 0.14.0
- Change: Smoother CSS transitions and changed animation structure for faster Paint and Layout
- Change: Removed legacy support for LESS. flyPanels now only supports SCSS
#### 0.14.0
- FIX: Fixed sidepanels scrolling issue with iOS9 that can occur depending on your meta viewport settings
#### 0.13.0
- Added SASS/SCSS support. flyPanels can now build with SASS or LESS.
#### 0.12.1
- Fixed CSS issue with the tree menu. #3
#### 0.12.0
- Added CSS class for active menu item.
#### 0.11.2
- Renamed all LESS variables. Added prefix `flypanels_` to all.
#### 0.11.1
- The search result shouldn't be a `nav` element. Changed to a `div`. LESS file updated as well.
#### 0.11.0
- Updated the HTML markup for the treemenu component for a more accessible menu. Switched out `span` elements for `a` link elements
- Addressed some issues with horizontal scrollbars caused by scrollbars on units with visible scrollbars. Horizontal overflow in the panels is now set to `overflow-x:hidden` and vertical scroll is now set to auto `overflow-y:auto`. This is maybe not the best solution to address scrollbar width.
#### 0.10.4
- Fixed bug with topbar not being fixed because of `translate3d`.
#### 0.10.3
- Bug fix: Errors in the CSS preventing the panels to work in Firefox.
#### 0.10.2
- Small fix: Removing and adding classes when opening and closing panels wasn't working properly resulting in unwanted scroll.
#### 0.10.0
- Added search panel. This is a panel with an search form that calls a URL with a querystring passing along a keyword expecting a JSON response. Use this to produce a search result in the panel. Look at the dummy JSON file to understand on how the JSON format should be.
- Added search settings. Default the search features will not init, just like the tree menu component it must be set to true to init.
#### 0.9.1
- Added a `preload` class to the container wich is removed at page load, to prevent objects from animating to their starting positions.
#### 0.9.0
- Removed support for IE9
- Removed the need for jquery.transit. Making the whole script as such smaller.
- Added CSS3 translate3d animations for better and smoother animations.
- Removed the `fadedOpacity` option (The opacity value of the content when a panel is open). This is now a LESS variable you can change in the LESS file
- Please refer to the LESS files for all visual customizations you need.
#### 0.8.0
- Added a very nice and expanding treemenu component supporting up to 6 levels of depth.
#### 0.7.0
First public release.
| {'content_hash': '343363b8f9f3827ad4cf0bbafe37aa30', 'timestamp': '', 'source': 'github', 'line_count': 419, 'max_line_length': 444, 'avg_line_length': 34.627684964200476, 'alnum_prop': 0.6854366255427665, 'repo_name': 'SubZane/flyPanels', 'id': 'b5bafdda36bd8cd4e2709b93a78cd35f2c02274b', 'size': '14529', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '30703'}, {'name': 'SCSS', 'bytes': '14583'}]} |
#ifndef _LUA_REQUEST_H_
#define _LUA_REQUEST_H_
#include "mod_lua.h"
#include "util_varbuf.h"
void ap_lua_load_request_lmodule(lua_State *L, apr_pool_t *p);
void ap_lua_push_connection(lua_State *L, conn_rec *r);
void ap_lua_push_server(lua_State *L, server_rec *r);
void ap_lua_push_request(lua_State *L, request_rec *r);
#define APL_REQ_FUNTYPE_STRING 1
#define APL_REQ_FUNTYPE_INT 2
#define APL_REQ_FUNTYPE_TABLE 3
#define APL_REQ_FUNTYPE_LUACFUN 4
#define APL_REQ_FUNTYPE_BOOLEAN 5
typedef struct
{
const void *fun;
int type;
} req_fun_t;
/* Struct to use as userdata for request_rec tables */
typedef struct
{
request_rec *r; /* Request_rec */
apr_table_t *t; /* apr_table_t* */
const char *n; /* name of table */
} req_table_t;
typedef struct {
int type;
size_t size;
size_t vb_size;
lua_Number number;
struct ap_varbuf vb;
} lua_ivm_object;
#endif /* !_LUA_REQUEST_H_ */
| {'content_hash': 'bc6dea86ed97abf523db8a5a1c1b8782', 'timestamp': '', 'source': 'github', 'line_count': 43, 'max_line_length': 62, 'avg_line_length': 22.232558139534884, 'alnum_prop': 0.6464435146443515, 'repo_name': 'ZHYfeng/malicious-code-conceal', 'id': 'a39f4b97b1b1b89994cddf8b89f8117b29b33e84', 'size': '1754', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': '3-2-multi-programmes-big/httpd-8dffc15e54a846e692e17e8761b3350df2d7f254/modules/lua/lua_request.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ASP', 'bytes': '579'}, {'name': 'Assembly', 'bytes': '5001'}, {'name': 'Awk', 'bytes': '36184'}, {'name': 'Batchfile', 'bytes': '60629'}, {'name': 'C', 'bytes': '114808650'}, {'name': 'C++', 'bytes': '10029499'}, {'name': 'CMake', 'bytes': '57202'}, {'name': 'CSS', 'bytes': '41400'}, {'name': 'DTrace', 'bytes': '12419'}, {'name': 'Dockerfile', 'bytes': '4182'}, {'name': 'Forth', 'bytes': '199744'}, {'name': 'Frege', 'bytes': '3519320'}, {'name': 'GDB', 'bytes': '11041'}, {'name': 'HTML', 'bytes': '208126'}, {'name': 'JavaScript', 'bytes': '1822843'}, {'name': 'Lex', 'bytes': '9956'}, {'name': 'Lua', 'bytes': '109203'}, {'name': 'M4', 'bytes': '141034'}, {'name': 'Makefile', 'bytes': '565761'}, {'name': 'PHP', 'bytes': '2728'}, {'name': 'Perl', 'bytes': '22259'}, {'name': 'Perl 6', 'bytes': '11115'}, {'name': 'PowerShell', 'bytes': '2037'}, {'name': 'Python', 'bytes': '114162'}, {'name': 'Roff', 'bytes': '153364'}, {'name': 'Ruby', 'bytes': '11216'}, {'name': 'Shell', 'bytes': '118175'}, {'name': 'SourcePawn', 'bytes': '10547'}, {'name': 'TeX', 'bytes': '2582'}, {'name': 'Visual Basic', 'bytes': '1074'}, {'name': 'XSLT', 'bytes': '331268'}, {'name': 'Yacc', 'bytes': '9085'}]} |
'''Convert traces to/from PIX.
'''
import optparse
import os.path
import subprocess
import platform
import sys
def callProcess(cmd):
if options.verbose:
sys.stderr.write(' '.join(cmd) + '\n')
ret = subprocess.call(cmd)
if ret:
exeName = os.path.basename(cmd[0])
sys.stderr.write('error: %s failed with exit code %u\n' % (exeName, ret))
sys.exit(ret)
return ret
def verifyTrace(outTrace):
if os.path.exists(outTrace):
sys.stderr.write('info: %s written\n' % outTrace)
if options.verify:
callProcess([options.retrace, os.path.abspath(outTrace)])
else:
sys.stderr.write('error: %s not written\n' % outTrace)
sys.exit(1)
def getPixExe():
try:
programFiles = os.environ['ProgramFiles(x86)']
except KeyError:
programFiles = os.environ['ProgramFiles']
try:
dxsdkDir = os.environ['DXSDK_DIR']
except KeyError:
dxsdkDir = os.path.join(programFiles, "Microsoft DirectX SDL (June 2010)")
pixExe = os.path.join(dxsdkDir, "Utilities", "bin", 'x86', 'PIXwin.exe')
return pixExe
def convertToPix(inTrace, outPixrun):
pixExe = getPixExe()
pixExp = os.path.join(os.path.dirname(__file__), 'apitrace.PIXExp')
# http://social.msdn.microsoft.com/Forums/sv/devdocs/thread/15addc0c-036d-413a-854a-35637ccbb834
# http://src.chromium.org/svn/trunk/o3d/tests/test_driver.py
cmd = [
getPixExe(),
pixExp,
'-start',
'-runfile', os.path.abspath(outPixrun),
'-targetpath', os.path.abspath(options.retrace),
#'-targetstartfolder', ...,
'-targetargs', os.path.abspath(inTrace),
]
callProcess(cmd)
if os.path.exists(outPixrun):
sys.stderr.write('info: %s written\n' % outPixrun)
if options.verify:
subprocess.call([pixExe, os.path.abspath(outPixrun)])
else:
sys.stderr.write('error: %s not written\n' % outPixrun)
sys.exit(1)
def detectApiFromCsv(inCsv):
import csv
csvReader = csv.reader(open(inCsv, 'rt'), )
for row in csvReader:
print(row)
event = row[2]
print(event)
if event.startswith("Direct3DCreate9"):
return "d3d9"
if event.startswith("CreateDXGIFactory"):
return "dxgi"
if event.startswith("D3D10CreateDevice"):
return "d3d10"
if event.startswith("D3D11CreateDevice"):
return "d3d11"
assert False
def convertFromPix(inPix, outTrace):
pixExe = getPixExe()
api = options.api
if True:
# Use -exporttocsv option to detect which API to use
cmd = [
pixExe,
inPix,
'-exporttocsv',
]
# XXX: output filename is ignored
inPixDir, inPixFileName = os.path.split(inPix)
inPixName, inPixExt = os.path.splitext(inPixFileName)
outCsv = os.path.join(inPixDir, inPixName + '.csv')
if os.path.exists(outCsv):
os.remove(outCsv)
callProcess(cmd)
if os.path.isfile(outCsv):
api = detectApiFromCsv(outCsv)
cmd = [
options.apitrace,
'trace',
'-a', api,
'-o', outTrace,
pixExe,
inPix,
]
# XXX: Autodetect somehow
if not options.single_frame:
# Full capture
cmd += ['-playstandalone']
else:
# Single-frame capture
cmd += ['-autorenderto', 'nul:']
callProcess(cmd)
verifyTrace(outTrace)
def getDxcapExe():
winDir = os.environ['windir']
if 'ProgramFiles(x86)' in os.environ:
sysSubDir = 'SysWOW64'
else:
sysSubDir = 'System32'
dxcapExe = os.path.join(winDir, sysSubDir, 'dxcap.exe')
return dxcapExe
def convertToDxcap(inTrace, outDxcaprun):
# https://msdn.microsoft.com/en-us/library/vstudio/dn774939.aspx
dxcapExe = getDxcapExe()
cmd = [
getDxcapExe(),
'-rawmode',
'-file', os.path.abspath(outDxcaprun),
'-c',
options.retrace,
'-b',
os.path.abspath(inTrace),
]
callProcess(cmd)
if os.path.exists(outDxcaprun):
sys.stderr.write('info: %s written\n' % outDxcaprun)
if options.verify:
callProcess([dxcapExe, '-p', os.path.abspath(outDxcaprun)])
else:
sys.stderr.write('error: %s not written\n' % outDxcaprun)
sys.exit(1)
def convertFromDxcap(inDxcap, outTrace):
dxcapExe = getDxcapExe()
cmd = [
options.apitrace,
'trace',
'-a', options.api,
'-o', outTrace,
'--',
dxcapExe,
'-rawmode',
'-p', inDxcap,
]
callProcess(cmd)
verifyTrace(outTrace)
def main():
global options
# Parse command line options
optparser = optparse.OptionParser(
usage='\n\t%prog [options] <trace> ...',
version='%%prog')
optparser.add_option(
'--apitrace', metavar='PROGRAM',
type='string', dest='apitrace', default='apitrace.exe',
help='path to apitrace command [default: %default]')
optparser.add_option(
'-a', '--api', metavar='API',
type='string', dest='api', default='dxgi',
help='api [default: %default]')
optparser.add_option(
'-r', '--retrace', metavar='PROGRAM',
type='string', dest='retrace', default='d3dretrace.exe',
help='path to retrace command [default: %default]')
optparser.add_option(
'-v', '--verbose',
action='store_true', dest='verbose', default=False,
help='verbose output')
optparser.add_option(
'-o', '--output', metavar='FILE',
type="string", dest="output",
help="output file [default: stdout]")
optparser.add_option(
'--single-frame',
action='store_true', dest='single_frame', default=False,
help='single-frame PIXRun capture')
optparser.add_option(
'--verify',
action='store_true', dest='verify', default=False,
help='verify output by replaying it')
(options, args) = optparser.parse_args(sys.argv[1:])
if not args:
optparser.error("incorrect number of arguments")
for inFile in args:
name, inExt = os.path.splitext(os.path.basename(inFile))
if inExt.lower() == '.trace':
convert = convertToDxcap
outExt = '.vsglog'
if options.output:
_, outExt = os.path.splitext(options.output)
if outExt.lower() == '.pixrun':
convert = convertToPix
elif inExt.lower() == '.vsglog':
convert = convertFromDxcap
outExt = '.trace'
elif inExt.lower() == '.pixrun':
convert = convertFromPix
outExt = '.trace'
else:
optparser.error("unexpected file extensions `%s`" % inExt)
if options.output:
outFile = options.output
else:
outFile = name + outExt
if os.path.exists(outFile):
os.remove(outFile)
convert(inFile, outFile)
if __name__ == '__main__':
main()
| {'content_hash': 'dee9ba99c5f3455dc2953baa589ca615', 'timestamp': '', 'source': 'github', 'line_count': 259, 'max_line_length': 100, 'avg_line_length': 27.69111969111969, 'alnum_prop': 0.574595649749024, 'repo_name': 'apitrace/apitrace', 'id': '1f285a8cf1c666d0de41be5fc196ecb981a4ed00', 'size': '8460', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'scripts/convert.py', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '93051'}, {'name': 'C++', 'bytes': '8121808'}, {'name': 'CMake', 'bytes': '72495'}, {'name': 'Dockerfile', 'bytes': '2247'}, {'name': 'Emacs Lisp', 'bytes': '204'}, {'name': 'Makefile', 'bytes': '898'}, {'name': 'Objective-C++', 'bytes': '14172'}, {'name': 'PowerShell', 'bytes': '3604'}, {'name': 'Python', 'bytes': '2077153'}, {'name': 'Shell', 'bytes': '5199'}]} |
var k, ref, status, v;
status = require('.');
// Import default status codes.
for (k in status) {
v = status[k];
if (k === 'extra') {
continue;
}
module.exports[k] = v;
}
ref = status.extra.nginx;
// Merge default status codes with NGINX status codes.
for (k in ref) {
v = ref[k];
module.exports[k] = v;
}
| {'content_hash': '568614c6f5f1f29fbc83aa26e53f8356', 'timestamp': '', 'source': 'github', 'line_count': 19, 'max_line_length': 54, 'avg_line_length': 17.105263157894736, 'alnum_prop': 0.5938461538461538, 'repo_name': 'wdavidw/node-http-status', 'id': '172d3204bb1e98b3480fa07b1053731ee5c3bf3b', 'size': '496', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/nginx.js', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CoffeeScript', 'bytes': '4671'}, {'name': 'Makefile', 'bytes': '219'}]} |
package context
import (
"bufio"
"crypto/hmac"
"crypto/sha1"
"encoding/base64"
"errors"
"fmt"
"net"
"net/http"
"strconv"
"strings"
"time"
"github.com/astaxie/beego/utils"
)
//commonly used mime-types
const (
ApplicationJSON = "application/json"
ApplicationXML = "application/xml"
ApplicationYAML = "application/x-yaml"
TextXML = "text/xml"
)
// NewContext return the Context with Input and Output
func NewContext() *Context {
return &Context{
Input: NewInput(),
Output: NewOutput(),
}
}
// Context Http request context struct including BeegoInput, BeegoOutput, http.Request and http.ResponseWriter.
// BeegoInput and BeegoOutput provides some api to operate request and response more easily.
type Context struct {
Input *BeegoInput
Output *BeegoOutput
Request *http.Request
ResponseWriter *Response
_xsrfToken string
}
// Reset init Context, BeegoInput and BeegoOutput
func (ctx *Context) Reset(rw http.ResponseWriter, r *http.Request) {
ctx.Request = r
if ctx.ResponseWriter == nil {
ctx.ResponseWriter = &Response{}
}
ctx.ResponseWriter.reset(rw)
ctx.Input.Reset(ctx)
ctx.Output.Reset(ctx)
ctx._xsrfToken = ""
}
// Redirect does redirection to localurl with http header status code.
func (ctx *Context) Redirect(status int, localurl string) {
http.Redirect(ctx.ResponseWriter, ctx.Request, localurl, status)
}
// Abort stops this request.
// if beego.ErrorMaps exists, panic body.
func (ctx *Context) Abort(status int, body string) {
ctx.Output.SetStatus(status)
panic(body)
}
// WriteString Write string to response body.
// it sends response body.
func (ctx *Context) WriteString(content string) {
ctx.ResponseWriter.Write([]byte(content))
}
// GetCookie Get cookie from request by a given key.
// It's alias of BeegoInput.Cookie.
func (ctx *Context) GetCookie(key string) string {
return ctx.Input.Cookie(key)
}
// SetCookie Set cookie for response.
// It's alias of BeegoOutput.Cookie.
func (ctx *Context) SetCookie(name string, value string, others ...interface{}) {
ctx.Output.Cookie(name, value, others...)
}
// GetSecureCookie Get secure cookie from request by a given key.
func (ctx *Context) GetSecureCookie(Secret, key string) (string, bool) {
val := ctx.Input.Cookie(key)
if val == "" {
return "", false
}
parts := strings.SplitN(val, "|", 3)
if len(parts) != 3 {
return "", false
}
vs := parts[0]
timestamp := parts[1]
sig := parts[2]
h := hmac.New(sha1.New, []byte(Secret))
fmt.Fprintf(h, "%s%s", vs, timestamp)
if fmt.Sprintf("%02x", h.Sum(nil)) != sig {
return "", false
}
res, _ := base64.URLEncoding.DecodeString(vs)
return string(res), true
}
// SetSecureCookie Set Secure cookie for response.
func (ctx *Context) SetSecureCookie(Secret, name, value string, others ...interface{}) {
vs := base64.URLEncoding.EncodeToString([]byte(value))
timestamp := strconv.FormatInt(time.Now().UnixNano(), 10)
h := hmac.New(sha1.New, []byte(Secret))
fmt.Fprintf(h, "%s%s", vs, timestamp)
sig := fmt.Sprintf("%02x", h.Sum(nil))
cookie := strings.Join([]string{vs, timestamp, sig}, "|")
ctx.Output.Cookie(name, cookie, others...)
}
// XSRFToken creates a xsrf token string and returns.
func (ctx *Context) XSRFToken(key string, expire int64) string {
if ctx._xsrfToken == "" {
token, ok := ctx.GetSecureCookie(key, "_xsrf")
if !ok {
token = string(utils.RandomCreateBytes(32))
ctx.SetSecureCookie(key, "_xsrf", token, expire)
}
ctx._xsrfToken = token
}
return ctx._xsrfToken
}
// CheckXSRFCookie checks xsrf token in this request is valid or not.
// the token can provided in request header "X-Xsrftoken" and "X-CsrfToken"
// or in form field value named as "_xsrf".
func (ctx *Context) CheckXSRFCookie() bool {
token := ctx.Input.Query("_xsrf")
if token == "" {
token = ctx.Request.Header.Get("X-Xsrftoken")
}
if token == "" {
token = ctx.Request.Header.Get("X-Csrftoken")
}
if token == "" {
ctx.Abort(403, "'_xsrf' argument missing from POST")
return false
}
if ctx._xsrfToken != token {
ctx.Abort(403, "XSRF cookie does not match POST argument")
return false
}
return true
}
// RenderMethodResult renders the return value of a controller method to the output
func (ctx *Context) RenderMethodResult(result interface{}) {
if result != nil {
renderer, ok := result.(Renderer)
if !ok {
err, ok := result.(error)
if ok {
renderer = errorRenderer(err)
} else {
renderer = jsonRenderer(result)
}
}
renderer.Render(ctx)
}
}
//Response is a wrapper for the http.ResponseWriter
//started set to true if response was written to then don't execute other handler
type Response struct {
http.ResponseWriter
Started bool
Status int
}
func (r *Response) reset(rw http.ResponseWriter) {
r.ResponseWriter = rw
r.Status = 0
r.Started = false
}
// Write writes the data to the connection as part of an HTTP reply,
// and sets `started` to true.
// started means the response has sent out.
func (r *Response) Write(p []byte) (int, error) {
r.Started = true
return r.ResponseWriter.Write(p)
}
// WriteHeader sends an HTTP response header with status code,
// and sets `started` to true.
func (r *Response) WriteHeader(code int) {
if r.Status > 0 {
//prevent multiple response.WriteHeader calls
return
}
r.Status = code
r.Started = true
r.ResponseWriter.WriteHeader(code)
}
// Hijack hijacker for http
func (r *Response) Hijack() (net.Conn, *bufio.ReadWriter, error) {
hj, ok := r.ResponseWriter.(http.Hijacker)
if !ok {
return nil, nil, errors.New("webserver doesn't support hijacking")
}
return hj.Hijack()
}
// Flush http.Flusher
func (r *Response) Flush() {
if f, ok := r.ResponseWriter.(http.Flusher); ok {
f.Flush()
}
}
// CloseNotify http.CloseNotifier
func (r *Response) CloseNotify() <-chan bool {
if cn, ok := r.ResponseWriter.(http.CloseNotifier); ok {
return cn.CloseNotify()
}
return nil
}
// Pusher http.Pusher
func (r *Response) Pusher() (pusher http.Pusher) {
if pusher, ok := r.ResponseWriter.(http.Pusher); ok {
return pusher
}
return nil
} | {'content_hash': 'f7e1b88ad9803fe474efe172dc83a8fe', 'timestamp': '', 'source': 'github', 'line_count': 240, 'max_line_length': 111, 'avg_line_length': 25.3625, 'alnum_prop': 0.6937736159027436, 'repo_name': 'opensds/nbp', 'id': '452834e55e645d24f8ebf663402225c575aea9d8', 'size': '6919', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'vendor/github.com/astaxie/beego/context/context.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '4126'}, {'name': 'CSS', 'bytes': '25925'}, {'name': 'Dockerfile', 'bytes': '2945'}, {'name': 'Go', 'bytes': '374655'}, {'name': 'HTML', 'bytes': '366245'}, {'name': 'Java', 'bytes': '623998'}, {'name': 'JavaScript', 'bytes': '198363'}, {'name': 'Makefile', 'bytes': '5163'}, {'name': 'Shell', 'bytes': '7246'}]} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.dslplatform</groupId>
<artifactId>json</artifactId>
<name>DSL Platform Java JSON</name>
<packaging>pom</packaging>
<version>0.0.0</version>
<modules>
<module>library</module>
<module>processor</module>
<module>java8</module>
<module>joda</module>
<module>threetenbp</module>
<module>android</module>
<module>tests-java8</module>
<module>tests-java8-external-models</module>
<module>tests-kotlin</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.deploy.skip>true</maven.deploy.skip>
</properties>
</project>
| {'content_hash': '9735bb0a0cca4d2beb17b18896300af8', 'timestamp': '', 'source': 'github', 'line_count': 28, 'max_line_length': 98, 'avg_line_length': 31.821428571428573, 'alnum_prop': 0.7205387205387206, 'repo_name': 'ngs-doo/dsl-json', 'id': '0728ae6221fce2f473b1ecc9f11c5481bda9654c', 'size': '891', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'pom.xml', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Java', 'bytes': '3755021'}, {'name': 'Kotlin', 'bytes': '2563'}, {'name': 'Scala', 'bytes': '112518'}]} |
require 'rails_helper'
describe 'before_filter' do
it 'should update Fae::Change.current_user with current_user' do
user = create_super_user
post fae.user_session_path, params: { user: { 'email' => user.email, 'password' => user.password } }
get fae.root_path
expect(Fae::Change.current_user).to eq(user.id)
end
end | {'content_hash': '4d60806db913ca6013c9bd099713a1c4', 'timestamp': '', 'source': 'github', 'line_count': 14, 'max_line_length': 104, 'avg_line_length': 24.285714285714285, 'alnum_prop': 0.6823529411764706, 'repo_name': 'wearefine/fae', 'id': '9d377a796eb397b810a808b6832e7b85641a48b4', 'size': '340', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'spec/requests/fae/changes_spec.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '880'}, {'name': 'Dockerfile', 'bytes': '316'}, {'name': 'HTML', 'bytes': '22864'}, {'name': 'JavaScript', 'bytes': '117884'}, {'name': 'Ruby', 'bytes': '390658'}, {'name': 'SCSS', 'bytes': '85988'}, {'name': 'Slim', 'bytes': '76935'}]} |
title: Meeting Tonight
author: Andrew Mike
date: 2017-01-03
layout: post
---
Hello everyone! On behalf of Hacksburg, we hope that everyone has had a good winter holiday. 2016 is finally over, and we have a lot planned for 2017. We're excited about it, and we hope you are too!
We're having a general meeting tonight at 7:30 PM at the space. The agenda is [here](https://wiki.hacksburg.org/meetings:2017-01-03_general_meeting). Tonight should be light on business. If you were curious about Hacksburg but didn't know where to start, tonight's the night to stop by!
Thanks for reading, and we hope to see you tonight!
| {'content_hash': '4b03e80ee095d0981c6c8ad6df187f40', 'timestamp': '', 'source': 'github', 'line_count': 11, 'max_line_length': 286, 'avg_line_length': 56.27272727272727, 'alnum_prop': 0.7657512116316639, 'repo_name': 'bonafidegeek/hacksburg.github.io', 'id': 'fc3eec65dcc73c6fa7badeefb6cf47cf5f66195e', 'size': '623', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': '_posts/2017-01-03-1204-meeting_tonight.markdown', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '2979'}, {'name': 'HTML', 'bytes': '23972'}, {'name': 'Ruby', 'bytes': '49'}]} |
"""
<Program Name>
dorputget.py
<Started>
December 17, 2008
<Author>
[email protected]
Ivan Beschastnikh
<Purpose>
Attempt to put a (k,v) into DO registry and then get it back. On error
send an email to some folks.
<Usage>
Modify the following global var params to have this script functional:
- notify_list, a list of strings with emails denoting who will be
emailed when something goes wrong
This script takes no arguments. A typical use of this script is to
have it run periodically using something like the following crontab line:
7 * * * * /usr/bin/python /home/seattle/dorputget.py > /home/seattle/cron_log.dorputget
"""
import time
import os
import socket
import sys
import traceback
import threading
import random
import send_gmail
import integrationtestlib
import repyhelper
repyhelper.translate_and_import("/home/integrationtester/cron_tests/dorputget/DORadvertise.repy")
# Armon: This is to replace using the time command with getruntime
import nonportable
# event for communicating when the lookup is done or timedout
lookup_done_event = threading.Event()
def lookup_timedout():
"""
<Purpose>
Waits for lookup_done_event and notifies the folks on the
notify_list (global var) of the lookup timeout.
<Arguments>
None.
<Exceptions>
None.
<Side Effects>
Sends an email to the notify_list folks
<Returns>
None.
"""
integrationtestlib.log("in lookup_timedout()")
notify_msg = "DOR lookup failed -- lookup_timedout() fired after 60 seconds."
subject = "DOR with repy test failed"
# wait for the event to be set, timeout after 30 minutes
wait_time = 1800
tstamp_before_wait = nonportable.getruntime()
lookup_done_event.wait(wait_time)
tstamp_after_wait = nonportable.getruntime()
t_waited = tstamp_after_wait - tstamp_before_wait
if abs(wait_time - t_waited) < 5:
notify_msg += " And lookup stalled for over 30 minutes (max timeout value)."
else:
notify_msg += " And lookup stalled for " + str(t_waited) + " seconds"
integrationtestlib.notify(notify_msg,subject )
return
def main():
"""
<Purpose>
Program's main.
<Arguments>
None.
<Exceptions>
All exceptions are caught.
<Side Effects>
None.
<Returns>
None.
"""
# setup the gmail user/password to use when sending email
success,explanation_str = send_gmail.init_gmail()
if not success:
integrationtestlib.log(explanation_str)
sys.exit(0)
integrationtestlib.notify_list.append("[email protected]")
key = str(random.randint(4,2**30))
value = str(random.randint(4,2**30))
ttlval = 60
subject = "DOR with repy test failed"
# put(key,value) with ttlval into DOR
integrationtestlib.log("calling DORadvertise_announce(key: " + str(key) + ", val: " + str(value) + ", ttl: " + str(ttlval) + ")")
try:
DORadvertise_announce(key, value, ttlval)
except:
message = "DORadvertise_lookup() failed.\nFailed while doing DORadvertise_announce(). "
message = message + "Anouncing with key: " + key + ", value: " + value + ", ttlval: " + str(ttlval)
integrationtestlib.handle_exception("DORadvertise_announce() failed", subject)
sys.exit(0)
# a 60 second timer to email the notify_list on slow lookups
lookup_timedout_timer = threading.Timer(60, lookup_timedout)
# start the lookup timer
lookup_timedout_timer.start()
# get(key) from DOR
integrationtestlib.log("calling DORadvertise_lookup(key: " + str(key) + ")")
try:
ret_value = DORadvertise_lookup(key)
# TODO: check the return value as well
# ret_value = int(ret_value[0])
except:
message = "DORadvertise_lookup() failed.\nFailed while doing DORadvertise_lookup(). "
message = message + "Looking up with key: " + key
integrationtestlib.handle_exception(message, subject)
sys.exit(0)
lookup_timedout_timer.cancel()
lookup_done_event.set()
return
if __name__ == "__main__":
main()
| {'content_hash': 'd04290e23288e87b9ddba93efe9b8bfe', 'timestamp': '', 'source': 'github', 'line_count': 149, 'max_line_length': 133, 'avg_line_length': 27.966442953020135, 'alnum_prop': 0.6642668586513079, 'repo_name': 'sburnett/seattle', 'id': 'ef40bebd90cc2c3a0eb349674ad7be05d5dfc130', 'size': '4185', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'integrationtests/dorputget/dorputget_new.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '85039'}, {'name': 'CSS', 'bytes': '44140'}, {'name': 'Java', 'bytes': '178864'}, {'name': 'JavaScript', 'bytes': '791008'}, {'name': 'Perl', 'bytes': '36791'}, {'name': 'Python', 'bytes': '4683648'}, {'name': 'Scala', 'bytes': '2587'}, {'name': 'Shell', 'bytes': '87609'}]} |
{% extends "helpdesk/public_base.html" %}{% load i18n bootstrap %}{% load url from future %}
{% block helpdesk_body %}
{% if kb_categories %}
<h2>{% trans "Knowledgebase Articles" %}</h2>
<table class="table table-hover table-bordered">
<thead>
<tr class='row_tablehead'><td>{% trans "Knowledgebase Categories" %}</td></tr>
<tr class='row_columnheads'><th>{% trans "Category" %}</th></tr>
</thead>
<tbody>
{% for category in kb_categories %}
<tr class='row_even row_hover'><th><a href='{{ category.get_absolute_url }}'>{{ category.title }}</a></th></tr>
<tr class='row_odd'><td>{{ category.description }}</td></tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% if helpdesk_settings.HELPDESK_SUBMIT_A_TICKET_PUBLIC %}
<div class="col-xs-6">
<div class="panel panel-default">
<div class="panel-body">
<h2 name='submit'>{% trans "Submit a Ticket" %}</h2>
<p>{% trans "All fields are required. Please provide as descriptive a title and description as possible." %}</p>
<form role="form" method='post' action='./#submit' enctype='multipart/form-data'>
<fieldset>
{{ form|bootstrap }}
{% comment %}
{% for field in form %}
{% if field.is_hidden %}
{{ field }}
{% else %}
<div class="form-group {% if field.errors %}has-error{% endif %}">
<label class="control-label" for='id_{{ field.name }}'>{{ field.label }}</label>{% if not field.field.required %} <span class='form_optional'>{% trans "(Optional)" %}</span>{% endif %}</dt>
<div class="input-group">{{ field }}</div>
{% if field.errors %}<div class="help-block">{{ field.errors }}</div>{% endif %}
{% if field.help_text %}<span class='fhelp-block'>{{ field.help_text }}</span>{% endif %}
</div>
{% endif %}
{% endfor %}
{% endcomment %}
<div class='buttons form-group'>
<input type='submit' class="btn btn-primary" value='{% trans "Submit Ticket" %}' />
</div>
</fieldset>
{% csrf_token %}</form>
</div>
</div>
</div>
{% endif %}
{% if not helpdesk_settings.HELPDESK_VIEW_A_TICKET_PUBLIC and not helpdesk_settings.HELPDESK_SUBMIT_A_TICKET_PUBLIC %}
<h2>{% trans "Please use button at upper right to login first." %}</h2>
{% endif %}
{% if helpdesk_settings.HELPDESK_VIEW_A_TICKET_PUBLIC %}
<div class="col-xs-6">
<div class="panel panel-default">
<div class="panel-body">
<h2>{% trans "View a Ticket" %}</h2>
<form method='get' action="{% url 'helpdesk_public_view' %}">
<fieldset>
<div class="form-group {% if field.errors %}has-error{% endif %}">
<label for='id_ticket'>{% trans "Ticket" %}</label>
<div class="input-group"><input type='text' name='ticket' /></div>
</div>
<div class="form-group {% if field.errors %}has-error{% endif %}">
<label for='id_email'>{% trans "Your E-mail Address" %}</label>
<div class="input-group"><input type='text' name='email' /></div>
</div>
<div class='buttons form-group'>
<input type='submit' class="btn btn-primary" value='{% trans "View Ticket" %}' />
</div>
</fieldset>
{% csrf_token %}</form>
</div></div></div>
{% endif %}
{% endblock %}
| {'content_hash': '76138aa23b50a60ce387db6fb71d3869', 'timestamp': '', 'source': 'github', 'line_count': 92, 'max_line_length': 205, 'avg_line_length': 34.96739130434783, 'alnum_prop': 0.5865713397575381, 'repo_name': 'gjedeer/django-helpdesk-issue-164', 'id': '4af8a6c80cc376ba9620e5baad4ec619c74c959b', 'size': '3217', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'helpdesk/templates/helpdesk/public_homepage.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CSS', 'bytes': '5692'}, {'name': 'JavaScript', 'bytes': '46236'}, {'name': 'Python', 'bytes': '356886'}, {'name': 'Shell', 'bytes': '708'}]} |
<body>
<center>
<div class="filenav">
<a href="../../../cdshop/index.htm"><img src="../../../cdshop/cdinfo.jpg" border="0"></a><br>
<a href="../../../index.htm">Sacred Texts</a>
<a href="../../index.htm">Legends and Sagas</a>
<a href="../index.htm">Celtic</a>
<a href="index.htm">Index</a>
<a href="swc229.htm">Previous</a>
<a href="swc231.htm">Next</a>
</div>
<hr>
<p align="CENTER"><font size="-1" color="GREEN"><a href="index.htm"><img src="img/tease.jpg"></a><br><i>Traditions and Hearthside Stories of West Cornwall, Vol. 2</i>, by William Bottrell, [1873], at sacred-texts.com</font></p>
<hr> <h3 align="center">
<span style="font-variant:small-caps;">The St. Levan Stone</span>.</h3>
<p>In St. Levan Churchyard is a cloven rock called St. Levan's stone. For some reason, now unknown, this must have been a venerated object when the church was built, or it would have been used in the building. The common notion, however, is that long before St. Levan's time this rock was regarded as sacred, because Merlin prophecied—</p>
<p></p>
<div style="margin-left: 25%">"When, with panniers astride,<br> A pack-horse one can ride<br> Through St. Levan stone,<br> The world will be done."</div>
<p><a name="page_148"><font size="1" color="green">p. 148</font></a></p>
<p>It is stated that Merlin came here with King Arthur, when he slaughtered the Danes at the battle of Velan-drùchar. The separation of this prophetic stone is so slow that there appears to be no danger of the world's ending just yet.</p>
<p></p>
<hr width="50%">
<p></p>
<div class="filenav">
<hr>
<center>
<a href="swc231.htm">Next: Parchapel Well</a>
</center>
</div>
</center>
</body>
| {'content_hash': '606cd5e289a1be1dbf283ced5dd05abc', 'timestamp': '', 'source': 'github', 'line_count': 32, 'max_line_length': 341, 'avg_line_length': 52.4375, 'alnum_prop': 0.6710369487485102, 'repo_name': 'daphnei/nn_chatbot', 'id': '7f070a93d8cfa1a49234b60f7c3802a82d00898b', 'size': '1690', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'story_corpus/swc2/swc230.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '857'}, {'name': 'HTML', 'bytes': '16928055'}, {'name': 'JavaScript', 'bytes': '4063'}, {'name': 'Jupyter Notebook', 'bytes': '13841'}, {'name': 'Makefile', 'bytes': '409'}, {'name': 'Perl', 'bytes': '4826'}, {'name': 'Python', 'bytes': '618978'}, {'name': 'Shell', 'bytes': '26118'}]} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
Reinwardtia 1: 498 (1952)
#### Original name
Peziza nigricans Fr.
### Remarks
null | {'content_hash': '776863b054d19dd1acb1a8749e539640', 'timestamp': '', 'source': 'github', 'line_count': 13, 'max_line_length': 25, 'avg_line_length': 11.23076923076923, 'alnum_prop': 0.6986301369863014, 'repo_name': 'mdoering/backbone', 'id': '68dd86ac520a2681e56314c5d040c0dcec429fad', 'size': '199', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'life/Fungi/Basidiomycota/Agaricomycetes/Auriculariales/Auriculariaceae/Auricularia/Hirneola nigricans/README.md', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>higman-cf: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.7.1+2 / higman-cf - 8.6.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
higman-cf
<small>
8.6.0
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-02-24 14:39:15 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-02-24 14:39:15 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.7.1+2 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.05.0 The OCaml compiler (virtual package)
ocaml-base-compiler 4.05.0 Official 4.05.0 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.3 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "[email protected]"
homepage: "https://github.com/coq-contribs/higman-cf"
license: "LGPL 2.1"
build: [make "-j%{jobs}%"]
install: [make "install"]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/HigmanCF"]
depends: [
"ocaml"
"coq" {>= "8.6" & < "8.7~"}
]
tags: [
"keyword: Higman's lemma"
"keyword: extraction"
"category: Mathematics/Combinatorics and Graph Theory"
"category: Miscellaneous/Extracted Programs/Combinatorics"
]
authors: [ "Stefan Berghofer" ]
bug-reports: "https://github.com/coq-contribs/higman-cf/issues"
dev-repo: "git+https://github.com/coq-contribs/higman-cf.git"
synopsis: "A direct constructive proof of Higman's Lemma"
description: """
This development formalizes in Coq the Coquand-Friedlender proof of
Higman's lemma for a two-letter alphabet.
An efficient program can be extracted from the proof."""
flags: light-uninstall
url {
src: "https://github.com/coq-contribs/higman-cf/archive/v8.6.0.tar.gz"
checksum: "md5=da619ecd7a965fd5f44a94f7390ceef4"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-higman-cf.8.6.0 coq.8.7.1+2</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.7.1+2).
The following dependencies couldn't be met:
- coq-higman-cf -> coq < 8.7~ -> ocaml < 4.03.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-higman-cf.8.6.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {'content_hash': '62d601b58ba84b6a26ea01e04bebb1c4', 'timestamp': '', 'source': 'github', 'line_count': 173, 'max_line_length': 159, 'avg_line_length': 41.32947976878613, 'alnum_prop': 0.5462937062937063, 'repo_name': 'coq-bench/coq-bench.github.io', 'id': '52f64fdb2456b65553fe47948fc03a3c5d5dece3', 'size': '7175', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'clean/Linux-x86_64-4.05.0-2.0.1/released/8.7.1+2/higman-cf/8.6.0.html', 'mode': '33188', 'license': 'mit', 'language': []} |
(function () {
'use strict';
angular
.module('password.help')
.controller('HelpController', HelpController);
function HelpController(config, $window, $location, $scope) {
var vm = this;
vm.config = config;
vm.selectedTab = 0;
vm.history = $window.history;
activate();
//////////////////////////////////////////////////////////////////
function activate() {
$scope.$on('$viewContentLoaded', checkForRequestedSection);
}
function checkForRequestedSection() {
// NodeList.forEach won't work on IE11
var tabs = Array.prototype.slice.call(document.querySelectorAll('md-tab'));
tabs.forEach(function (tab, i) {
if (tab.id === $location.hash()) {
vm.selectedTab = i;
}
});
}
}
})();
| {'content_hash': 'e5591c0388be47e00212e4fa77d60833', 'timestamp': '', 'source': 'github', 'line_count': 34, 'max_line_length': 87, 'avg_line_length': 26.58823529411765, 'alnum_prop': 0.48008849557522126, 'repo_name': 'silinternational/idp-pw-ui', 'id': 'fa688f3f30391abbfb1c676d3806264929b423da', 'size': '904', 'binary': False, 'copies': '1', 'ref': 'refs/heads/develop', 'path': 'app/help/help.controller.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '3482'}, {'name': 'Dockerfile', 'bytes': '807'}, {'name': 'HTML', 'bytes': '66884'}, {'name': 'JavaScript', 'bytes': '153625'}, {'name': 'Makefile', 'bytes': '303'}, {'name': 'Python', 'bytes': '10764'}, {'name': 'Shell', 'bytes': '123'}]} |
package pp.block1.cc.antlr;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
public class ExampleUsage {
public static void main(String[] args) {
scan("while do");
scan("while while do");
scan("whiledo done");
}
public static void scan(String text) {
CharStream stream = new ANTLRInputStream(text);
Lexer lexer = new Example(stream);
for (Token token : lexer.getAllTokens()) {
System.out.print(token.toString() + " ");
}
System.out.println();
}
}
| {'content_hash': '07620c2f393dd1a9c06269c1da8fc824', 'timestamp': '', 'source': 'github', 'line_count': 23, 'max_line_length': 55, 'avg_line_length': 28.217391304347824, 'alnum_prop': 0.6363636363636364, 'repo_name': 'wouwouwou/module_8', 'id': '79358478f98fb4080cec93b8a76cfc2be90a3de7', 'size': '649', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/pp/block1/cc/antlr/ExampleUsage.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ANTLR', 'bytes': '15791'}, {'name': 'Haskell', 'bytes': '55885'}, {'name': 'Java', 'bytes': '327119'}, {'name': 'Prolog', 'bytes': '4866'}, {'name': 'TeX', 'bytes': '3794'}]} |
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script>window.jQuery || document.write('<script src="{{ site.url }}/assets/js/vendor/jquery-1.9.1.min.js"><\/script>')</script>
<script src="{{ site.url }}/assets/js/scripts.min.js"></script>
{% if site.google_analytics %}
<!-- Asynchronous Google Analytics snippet -->
<script>
(function (i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r;
i[r] = i[r] || function () {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date();
a = s.createElement(o),
m = s.getElementsByTagName(o)[0];
a.async = 1;
a.src = g;
m.parentNode.insertBefore(a, m)
})(window, document, 'script', '//www.google-analytics.com/analytics.js', 'ga');
ga('create', '{{ site.google_analytics }}', 'auto');
ga('require', 'linkid', 'linkid.js');
ga('send', 'pageview');
</script>
{% endif %}
{% if page.comments != false %}{% include disqus_comments.html %}{% endif %} | {'content_hash': 'd493f63feb083c07690c7ff76b63d4a8', 'timestamp': '', 'source': 'github', 'line_count': 26, 'max_line_length': 128, 'avg_line_length': 40.65384615384615, 'alnum_prop': 0.5572374645222328, 'repo_name': 'redmoses/blog', 'id': '976569b4a75a4b7b5e1a298633322e0ac3352b80', 'size': '1057', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '_includes/scripts.html', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '49252'}, {'name': 'HTML', 'bytes': '28289'}, {'name': 'JavaScript', 'bytes': '67911'}, {'name': 'Ruby', 'bytes': '4765'}, {'name': 'Shell', 'bytes': '52'}]} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {'content_hash': '244f92cd191888fae2617cbbac15c876', 'timestamp': '', 'source': 'github', 'line_count': 13, 'max_line_length': 31, 'avg_line_length': 9.692307692307692, 'alnum_prop': 0.7063492063492064, 'repo_name': 'mdoering/backbone', 'id': '0db9d9a09d1ebb6747f4f755f063c78f69a9e0d5', 'size': '190', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Orobanchaceae/Pedicularis/Pedicularis tommasinii/README.md', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {'content_hash': 'ad36ff740e827fc6f3c2d491cfda40da', 'timestamp': '', 'source': 'github', 'line_count': 13, 'max_line_length': 39, 'avg_line_length': 10.307692307692308, 'alnum_prop': 0.6940298507462687, 'repo_name': 'mdoering/backbone', 'id': '290bf1bfae859d086270e64f6b5123e42b3ca43e', 'size': '199', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'life/Plantae/Germinella/Germinella scalariformis/Germinella scalariformis marina/README.md', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
/*
* Abstraction for GL window system specific APIs (GLX, WGL).
*/
#pragma once
#include <assert.h>
#include <vector>
#include <string>
#include "glprofile.hpp"
namespace glws {
using glprofile::Profile;
class Drawable;
bool
checkExtension(const char *extName, const char *extString);
// Extra info for creating PBuffers
struct pbuffer_info
{
int texFormat; // GL_RGB, GL_RGBA, or GL_NONE
int texTarget; // GL_TEXTURE_1D/2D/CUBE_MAP or GL_NONE
bool texMipmap; // 0 or 1 (false/true)
Drawable *hdc_drawable; // Needed for WGL Pbuffers
};
template< class T >
class Attributes {
protected:
std::vector<T> attribs;
public:
void add(T param) {
attribs.push_back(param);
}
void add(T pname, T pvalue) {
add(pname);
add(pvalue);
}
void end(T terminator = 0) {
add(terminator);
}
operator T * (void) {
return &attribs[0];
}
operator const T * (void) const {
return &attribs[0];
}
};
class Visual
{
public:
Profile profile;
/* TODO */
#if 0
unsigned long redMask;
unsigned long greenMask;
unsigned long blueMask;
unsigned long alphaMask;
#endif
bool doubleBuffer;
Visual(Profile prof) :
profile(prof)
{}
virtual ~Visual() {}
};
class Drawable
{
public:
const Visual *visual;
int width;
int height;
bool pbuffer;
bool visible;
// For WGL_ARB_render_texture
glws::pbuffer_info pbInfo;
int mipmapLevel, cubeFace;
Drawable(const Visual *vis, int w, int h, bool pb) :
visual(vis),
width(w),
height(h),
pbuffer(pb),
visible(false),
mipmapLevel(0),
cubeFace(0)
{}
virtual ~Drawable() {}
virtual void
resize(int w, int h) {
width = w;
height = h;
}
virtual void
show(void) {
assert(!pbuffer);
visible = true;
}
virtual void copySubBuffer(int x, int y, int width, int height);
virtual void swapBuffers(void) = 0;
};
class Context
{
public:
const Visual *visual;
// Requested profile
Profile profile;
// Created profile
Profile actualProfile;
glprofile::Extensions actualExtensions;
Context(const Visual *vis) :
visual(vis),
profile(vis->profile),
actualProfile(profile),
initialized(false)
{}
virtual ~Context() {}
// Context must have been made current once
inline bool
hasExtension(const char *extension) const {
assert(initialized);
return actualExtensions.has(extension);
}
private:
bool initialized;
void initialize(void);
friend bool makeCurrent(Drawable *, Context *);
};
void
init(void);
void
cleanup(void);
Visual *
createVisual(bool doubleBuffer, unsigned samples, Profile profile);
Drawable *
createDrawable(const Visual *visual, int width, int height,
const glws::pbuffer_info *pbInfo = NULL);
Context *
createContext(const Visual *visual, Context *shareContext = 0, bool debug = false);
bool
makeCurrentInternal(Drawable *drawable, Context *context);
inline bool
makeCurrent(Drawable *drawable, Context *context)
{
bool success = makeCurrentInternal(drawable, context);
if (success && context && !context->initialized) {
context->initialize();
}
return success;
}
bool
processEvents(void);
// iBuffer is one of GL_FRONT/BACK_LEFT/RIGHT, GL_AUX0...
bool
bindTexImage(Drawable *pBuffer, int iBuffer);
// iBuffer is one of GL_FRONT/BACK_LEFT/RIGHT, GL_AUX0...
bool
releaseTexImage(Drawable *pBuffer, int iBuffer);
bool
setPbufferAttrib(Drawable *pBuffer, const int *attribList);
} /* namespace glws */
| {'content_hash': '3e185c3664108adfbf586c1cdb5b04e1', 'timestamp': '', 'source': 'github', 'line_count': 214, 'max_line_length': 83, 'avg_line_length': 17.44859813084112, 'alnum_prop': 0.6331012319228709, 'repo_name': 'EoD/apitrace', 'id': 'fd234c5a9cf4e37ef0f0d460ee13dc98e352ce23', 'size': '5022', 'binary': False, 'copies': '1', 'ref': 'refs/heads/d3d9-profiling-v2', 'path': 'retrace/glws.hpp', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '7727'}, {'name': 'C++', 'bytes': '2323688'}, {'name': 'CMake', 'bytes': '67754'}, {'name': 'Emacs Lisp', 'bytes': '204'}, {'name': 'Java', 'bytes': '16257'}, {'name': 'Makefile', 'bytes': '4900'}, {'name': 'Objective-C++', 'bytes': '10252'}, {'name': 'Python', 'bytes': '1976081'}, {'name': 'Shell', 'bytes': '1037'}]} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {'content_hash': 'd0d775d807f696d4f53b875ac2ee2318', 'timestamp': '', 'source': 'github', 'line_count': 13, 'max_line_length': 31, 'avg_line_length': 9.692307692307692, 'alnum_prop': 0.7063492063492064, 'repo_name': 'mdoering/backbone', 'id': '526a157db3e49f28b950ddda12610c58efaf7903', 'size': '192', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'life/Plantae/Pteridophyta/Polypodiopsida/Polypodiales/Dryopteridaceae/Polystichum/Polystichum pseudodeltoden/README.md', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
Meteor.publish("configurations", function () {
return Configurations.find({public: true});
});
Meteor.publish("serverConfigurations", function () {
if (!PermissionsEnum.Configurations.isAdmin(this.userId)) {
return null;
}
return Configurations.find({});
});
| {'content_hash': 'b288117930156ce7f4c70c3946655f61', 'timestamp': '', 'source': 'github', 'line_count': 10, 'max_line_length': 60, 'avg_line_length': 26.6, 'alnum_prop': 0.7218045112781954, 'repo_name': 'mondora/mondora-website-back', 'id': '3e53ddb8894a29494483f4bf480496ede1aa78f6', 'size': '266', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'server/models/configurations/publications.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '336'}, {'name': 'JavaScript', 'bytes': '71767'}]} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package client;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author wilkerrj
*/
public class Client {
public static void main(String[] args) throws ClassNotFoundException {
try {
String fileSeparator = File.separator;
String pathSeparator = File.pathSeparator;
boolean exit = false;
Menu.inicial();
Socket clientSocket = new Socket("wilkerrj.zapto.org", 9999);
BufferedReader inFromUser = new BufferedReader(new InputStreamReader(System.in)); //cria um leitor e associa ao teclado
DataOutputStream outToServer = new DataOutputStream(clientSocket.getOutputStream()); // cria uma conexão de saída com o servidor
BufferedReader inFromServer = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
String currentDirectory = "init";
while (!exit) {
try {
String in = inFromUser.readLine();
if (in.equals("sair")) {
exit = true;
break;
}
if (in.equals("..")) {
outToServer.writeBytes(in + pathSeparator + currentDirectory + '\n');
String resp = inFromServer.readLine();
currentDirectory = resp;
System.out.println(resp);
}
if (in.equals("ls")) {
outToServer.writeBytes(in + pathSeparator + currentDirectory + '\n');
String resp = inFromServer.readLine();
if (resp.equals("null")) {
System.out.println("permissão negada");
} else {
String[] dir = resp.split(pathSeparator);
for (int i = 1; i < dir.length; i++) {
System.out.println(dir[i]);
}
}
} else {
String[] s = in.split(" ");
if (s[0].equals("cd")) {
if (s.length == 2) {
outToServer.writeBytes(s[0] + pathSeparator + s[1] + pathSeparator + currentDirectory + "\n");
} else {
System.out.println("Comando inválido");
}
String resp = inFromServer.readLine();
String[] r = resp.split(pathSeparator);
System.out.println(r[0]);
currentDirectory = r[1];
} else if (s[0].equals("get")) {
System.out.println(in.subSequence(4, in.length()));
String saveDirectory;
saveDirectory = System.getProperty("user.dir");
outToServer.writeBytes(s[0] + pathSeparator + in.subSequence(4, in.length()) + pathSeparator + currentDirectory + "\n");
String[] resp = inFromServer.readLine().split(pathSeparator);
if (resp[0].equals("")) {
System.out.println("Arquivo inexistente");
} else {
byte[] buffer = new byte[clientSocket.getReceiveBufferSize()];
int bytesRead;
FileOutputStream file = new FileOutputStream(new File(saveDirectory + fileSeparator + resp[1]));
InputStream receive = clientSocket.getInputStream();
while ((bytesRead = receive.read(buffer)) != -1) {
file.write(buffer, 0, bytesRead);
}
file.close();
System.out.println("Arquivo recebido, encerrando conexão...");
receive.close();
clientSocket.close();
break;
}
}
}
} catch (SocketException ex) {
System.out.println("Conexao encerrada");
} catch (IOException ex) {
System.out.println("Erro na comunicação com o servidor");
}
}
} catch (IOException ex) {
System.out.println("Erro"); //vou colocar uma msg melhor aqui, prometo!
}
}
}
| {'content_hash': '48f59d4e4857d7aace0bdefa786d7f60', 'timestamp': '', 'source': 'github', 'line_count': 110, 'max_line_length': 148, 'avg_line_length': 46.95454545454545, 'alnum_prop': 0.4720232333010649, 'repo_name': 'Wilker/Client', 'id': '68c6d7d7c9eb82eb9b16a63c053f239020f6dbc7', 'size': '5172', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Client/src/client/Client.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '6121'}]} |
if CAddonTemplateGameMode == nil then
CAddonTemplateGameMode = class({})
end
function Precache( context )
--[[
Precache things we know we'll use. Possible file types include (but not limited to):
PrecacheResource( "model", "*.vmdl", context )
PrecacheResource( "soundfile", "*.vsndevts", context )
PrecacheResource( "particle", "*.vpcf", context )
PrecacheResource( "particle_folder", "particles/folder", context )
]]
end
-- Create the game mode when we activate
function Activate()
GameRules.AddonTemplate = CAddonTemplateGameMode()
GameRules.AddonTemplate:InitGameMode()
end
function CAddonTemplateGameMode:InitGameMode()
print( "Template addon is loaded." )
GameRules:GetGameModeEntity():SetThink( "OnThink", self, "GlobalThink", 2 )
end
-- Evaluate the state of the game
function CAddonTemplateGameMode:OnThink()
if GameRules:State_Get() == DOTA_GAMERULES_STATE_GAME_IN_PROGRESS then
--print( "Template addon script is running." )
elseif GameRules:State_Get() >= DOTA_GAMERULES_STATE_POST_GAME then
return nil
end
return 1
end | {'content_hash': '640cb35eb4b9397a0519e08563d597dc', 'timestamp': '', 'source': 'github', 'line_count': 34, 'max_line_length': 87, 'avg_line_length': 31.294117647058822, 'alnum_prop': 0.7453007518796992, 'repo_name': 'Ryan--Yang/HumanGuard', 'id': 'f116275955d7ddf61b9f8a9edb18143df9ed7242', 'size': '1092', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'scripts/vscripts/addon_game_mode.lua', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Lua', 'bytes': '1092'}]} |
###**Introduction**###
The PackageManagementProviderResource (used to be called OneGetProviderResource) module contains the following Windows PowerShell Desired State Configuration (DSC) resources to allow you to manage packages and Windows PowerShell modules.
* **NugetPackage** – lets you download packages from the NuGet source location (e.g., http://nuget.org/api/v2/), and install or uninstall the package.
* **PSModule** – lets you download Windows PowerShell modules from the PowerShell Gallery, "PSGallery" (e.g., https://www.powershellgallery.com/api/v2/ ), and install them on your computer.
* **PackageManagementSource** – lets you register or unregister a package source on your computer
**NugetPackage** DSC resource has the following properties:
<table>
<tr>
<td> <b>Property</b> </td>
<td><b>Description</b> </td>
</tr>
<tr>
<td>Name</td>
<td>Specifies the name of the package to be installed or uninstalled.</td>
</tr>
<tr>
<td>DestinationPath</td>
<td>Specifies a file location where you want the package to be installed.</td>
</tr>
<tr>
<td>Ensure</td>
<td>Determines whether the package is to be installed or uninstalled.</td>
</tr>
<tr>
<td>InstallationPolicy</td>
<td>Determines whether you trust the package's source.</td>
</tr>
<tr>
<td>RequiredVersion</td>
<td>Specifies the exact version of the package you want to install or uninstall.</td>
</tr>
<tr>
<td>MinimumVersion</td>
<td>Specifies the minimum version of the package you want to install or uninstall.</td>
</tr>
<tr>
<td>MaximumVersion</td>
<td>Specifies the maximum version of the package you want to install or uninstall.</td>
</tr>
<tr>
<td>Source</td>
<td>Specifies the URI or name of the registered package source.</td>
</tr>
<tr>
<td>SourceCredential</td>
<td>Provides access to the package on a remote source. This property is not used to install the package. The package is always installed on the local system account.</td>
</tr>
</table>
**PSModule** DSC resource has the following properties:
<table>
<tr>
<td><b>Property</b></td>
<td><b>Description</b></td>
</tr>
<tr>
<td>Name</td>
<td>Specifies the name of the PowerShell module to be installed or uninstalled.</td>
</tr>
<tr>
<td>Ensure</td>
<td>Determines whether the module to be installed or uninstalled.</td>
</tr>
<tr>
<td>InstallationPolicy</td>
<td>Determines whether you trust the source repository where the module resides.</td>
</tr>
<tr>
<td>RequiredVersion</td>
<td>Specifies the exact version of the module you want to install or uninstall.</td>
</tr>
<tr>
<td>MinimumVersion</td>
<td>Specifies the minimum version of the module you want to install or uninstall.</td>
</tr>
<tr>
<td>Repository</td>
<td>Specifies the name of the module source repository where the module can found.</td>
</tr>
</table>
**PackageManagementSource** has the following properties:
<table>
<tr>
<td><b>Property</b></td>
<td><b>Description</b></td>
</tr>
<tr>
<td>Name</td>
<td>Specifies the name of the package source to be registered or unregistered on your system.</td>
</tr>
<tr>
<td>ProviderName</td>
<td>Specifies the name of the OneGet provider through which you can interop with the package source.</td>
</tr>
<tr>
<td>Ensure</td>
<td>Determines whether the package source is to be registered or unregistered.</td>
</tr>
<tr>
<td>InstallationPolicy</td>
<td>Determines whether you trust the package source.</td>
</tr>
<tr>
<td>SourceUri</td>
<td>Specifies the URI of the package source.</td>
</tr>
<tr>
<td>SourceCredential</td>
<td>Provides access to the package on a remote source.</td>
</tr>
</table>
<br/>
###**Requirements**###
Before you install OneGetProviderResource, you must be running [Windows Management Framework 5.0 Preview April 2015](http://blogs.msdn.com/b/powershell/archive/2015/04/29/windows-management-framework-5-0-preview-april-2015-is-now-available.aspx).
<br/>
###**Installation**###
To use the **PackageManagementProviderResource** module,
* Copy the content of the download to the $env:ProgramFiles\WindowsPowerShell\Modules folder.
To confirm installation,
* Run **Get-DSCResource** to verify that NugetPackage, OneGetSource, PSModule are among the DSC Resources are listed in your DSC resources.
<br/>
###**Building the Code**###
The code is a Windows PowerShell script and interpreted by the Windows PowerShell engine at runtime.
<br/>
###**Running Test**###
To test the modules, run the following commands. The NuGetPackage resource is used here as an example.
* cd $env:ProgramFiles\WindowsPowerShell\Modules\PackageManagementProviderResource\Test
* .\NugetPackage\NugetPackage.Get.Tests.ps1
* .\NugetPackage\NugetPackage.Set.Tests.ps1
* .\NugetPackage\NugetPackage.Test.Tests.ps1
You can repeat these commands similarly for testing PSModule and OneGetSource DSC resources.
<br/>
###**Contributing to the Code**###
You are welcome to contribute to this project. There are many ways to contribute:
1. Submit a bug report via [Issues]( https://github.com/PowerShell/PackageManagementProviderResource/issues). For a guide to submitting good bug reports, please read [Painless Bug Tracking](http://www.joelonsoftware.com/articles/fog0000000029.html).
2. Verify fixes for bugs.
3. Submit your fixes for a bug. Before submitting, please make sure you have:
- Performed code reviews of your own
- Updated the test cases if needed
- Run the test cases to ensure no feature breaks or test breaks
- Added the test cases for new code
4. Submit a feature request.
5. Help answer questions in the discussions list.
6. Submit test cases.
7. Tell others about the project.
8. Tell the developers how much you appreciate the product!
You might also read these two blog posts about contributing code: [Open Source Contribution Etiquette](http://tirania.org/blog/archive/2010/Dec-31.html) by Miguel de Icaza, and [Don’t “Push” Your Pull Requests](http://www.igvita.com/2011/12/19/dont-push-your-pull-requests/) by Ilya Grigorik.
Before submitting a feature or substantial code contribution, please discuss it with the Windows PowerShell team via [Issues]( https://github.com/WindowsPowerShell/OneGetResource/issues), and ensure it follows the product roadmap. Note that all code submissions will be rigorously reviewed by the Windows PowerShell Team. Only those that meet a high bar for both quality and roadmap fit will be merged into the source.
| {'content_hash': '5ac5dda0f2a62a2ab14feb082a67e805', 'timestamp': '', 'source': 'github', 'line_count': 173, 'max_line_length': 418, 'avg_line_length': 40.16763005780347, 'alnum_prop': 0.6867175133112678, 'repo_name': 'randorfer/RunbookExample', 'id': '8277c1c58f0eb718f60b784548f7eb982f2646a0', 'size': '6961', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'PowerShellModules/PackageManagementProviderResource/1.0.2/README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '150327'}, {'name': 'PHP', 'bytes': '1097'}, {'name': 'PowerShell', 'bytes': '2137801'}]} |
"""
Problem: Permutation: Given a string, write a function to check
if it is a permutation of a palindrome. A palindrome
is a word or phrase that is the same forwards and backwards.
A permutation is a rearrangement of letters. The palindrome
does not need to be limited to just dictionary words.
"""
# This implementation is O(n) in time and space.
# If the character count is limited, space is O(len(unique_characters))
def is_perm_palindrome(s):
"""
Returns a boolean which is True if the string
is a permutation of a palindrome
>>> is_perm_palindrome('Tact Coa')
True`
Taco Cat
>>> is_perm_palindrome('ab')
False
"""
matches = {}
for c in s:
if c != ' ':
l = c.lower()
if l in matches:
matches[l] = not matches[l]
else:
matches[l] = False
unmatched = 0
for match in matches.values():
if not match:
unmatched += 1
return unmatched <= 1
| {'content_hash': '26fab562d31d8cfeaef3767634532bad', 'timestamp': '', 'source': 'github', 'line_count': 39, 'max_line_length': 71, 'avg_line_length': 26.487179487179485, 'alnum_prop': 0.5876089060987415, 'repo_name': 'cjoverbay/cracking-the-code-interview-solutions', 'id': 'af13bd34928ed4d5131c9d18648072b629ecd66d', 'size': '1033', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'python/solution/chapter_01_arrays_and_strings/problem_1_4_palindrome_permutation.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Python', 'bytes': '12230'}]} |
@interface NewsNormalCellNode ()
//UI
@property (nonatomic, strong) UIImageView *imageNode;
@property (nonatomic, strong) UILabel *titleTextNode;
@property (nonatomic, strong) UIButton *replyBtnNode;
@property (nonatomic, strong) UIView *underLineNode;
//Data
@property (nonatomic, strong) NewsListInfoModel *listInfoModel;
@end
@implementation NewsNormalCellNode
+ (instancetype)cellWithTableView:(UITableView *)tableView
{
static NSString *ID = @"NewsNormalCellNode";
NewsNormalCellNode *cell = [tableView dequeueReusableCellWithIdentifier:ID];
if (cell == nil) {
cell = [[NewsNormalCellNode alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:ID];
}
return cell;
}
- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier
{
self = [super initWithStyle:style reuseIdentifier:reuseIdentifier];
if (self) {
self.selectionStyle = UITableViewCellSelectionStyleNone;
[self addSubnodes];
[self mas_subViews];
}
return self;
}
#pragma mark - private
- (void)addSubnodes
{
[self.contentView addSubview:self.imageNode];
[self.contentView addSubview:self.titleTextNode];
[self.contentView addSubview:self.replyBtnNode];
[self.contentView addSubview:self.underLineNode];
}
- (void)mas_subViews
{
[_imageNode mas_makeConstraints:^(MASConstraintMaker *make) {
make.width.and.height.mas_equalTo(80);
make.top.equalTo(self.contentView).offset(10);
make.left.equalTo(self.contentView).offset(10);
}];
[_titleTextNode mas_makeConstraints:^(MASConstraintMaker *make) {
make.top.equalTo(_imageNode);
make.left.equalTo(_imageNode.mas_right).offset(10);
make.right.equalTo(self.contentView).offset(-10);
}];
[_replyBtnNode mas_makeConstraints:^(MASConstraintMaker *make) {
make.width.mas_equalTo(50);
make.height.mas_equalTo(20);
make.left.equalTo(_titleTextNode.mas_left);
make.bottom.equalTo(_imageNode.mas_bottom);
}];
[_underLineNode mas_makeConstraints:^(MASConstraintMaker *make) {
make.height.mas_equalTo(0.5);
make.top.equalTo(_imageNode.mas_bottom).offset(10);
make.left.bottom.right.equalTo(self.contentView);
}];
}
#pragma mark - public
- (void)setupListInfoModel:(NewsListInfoModel *)listInfoModel
{
_listInfoModel = listInfoModel;
_imageNode.imageURL = [NSURL URLWithString:listInfoModel.imgsrc.firstObject];
_titleTextNode.text = listInfoModel.title;
[_replyBtnNode setTitle:[NSString stringWithFormat:@"%ld", listInfoModel.replyCount] forState:UIControlStateNormal];
}
#pragma mark - setter / getter
- (UIImageView *)imageNode
{
if (!_imageNode) {
UIImageView *imageNode = [[UIImageView alloc] init];
_imageNode = imageNode;
}
return _imageNode;
}
- (UILabel *)titleTextNode
{
if (!_titleTextNode) {
UILabel *titleTextNode = [[UILabel alloc] init];
titleTextNode.numberOfLines = 2;
titleTextNode.font = [UIFont systemFontOfSize:16];
titleTextNode.textColor = RGB(36, 36, 36);
_titleTextNode = titleTextNode;
}
return _titleTextNode;
}
- (UIButton *)replyBtnNode
{
if (!_replyBtnNode) {
UIButton *replyBtnNode = [[UIButton alloc] init];
replyBtnNode.titleLabel.font = [UIFont systemFontOfSize:10];
[replyBtnNode setTitleColor:RGB(150, 150, 150) forState:UIControlStateNormal];
[replyBtnNode setImage:[UIImage imageNamed:@"common_chat_new"] forState:UIControlStateNormal];
[replyBtnNode setTitle:@"0" forState:UIControlStateNormal];
replyBtnNode.contentHorizontalAlignment = UIControlContentHorizontalAlignmentLeft;
replyBtnNode.contentVerticalAlignment = UIControlContentVerticalAlignmentBottom;
replyBtnNode.imageEdgeInsets = UIEdgeInsetsMake(0, 0, 0, 5);
replyBtnNode.titleEdgeInsets = UIEdgeInsetsMake(0, 5, 0, 0);
_replyBtnNode = replyBtnNode;
}
return _replyBtnNode;
}
- (UIView *)underLineNode
{
if (!_underLineNode) {
UIView *underLineNode = [[UIView alloc] init];
underLineNode.backgroundColor = RGB(222, 222, 222);
_underLineNode = underLineNode;
}
return _underLineNode;
}
@end
| {'content_hash': '4338b3a5013cf248dc3fe7f080f19ade', 'timestamp': '', 'source': 'github', 'line_count': 132, 'max_line_length': 120, 'avg_line_length': 32.68939393939394, 'alnum_prop': 0.6987253765932793, 'repo_name': 'YunsChou/LovePlay', 'id': '3b650ab015f817168866265dd12137c150972348', 'size': '4511', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'LovePlay-UIKit/LovePlay/Class/News/View/NewsNormalCellNode.m', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '30997'}, {'name': 'C++', 'bytes': '1308'}, {'name': 'CSS', 'bytes': '4590'}, {'name': 'DTrace', 'bytes': '412'}, {'name': 'Objective-C', 'bytes': '13743125'}, {'name': 'Objective-C++', 'bytes': '1171232'}, {'name': 'Ruby', 'bytes': '984'}, {'name': 'Shell', 'bytes': '52108'}, {'name': 'Swift', 'bytes': '892253'}]} |
using System;
using System.Text;
using Simple.Xml.Structure.Constructs;
namespace Simple.Xml.Structure.Output
{
public class PrettyPrintStringXmlBuilder : IXmlBuilder
{
private const char IndentationCharacter = ' ';
private readonly IXmlBuilder xmlBuilder;
private readonly StringBuilder stringBuilder;
private int indentationLevel;
public PrettyPrintStringXmlBuilder(IXmlBuilder xmlBuilder, StringBuilder stringBuilder)
{
if (xmlBuilder == null)
{
throw new ArgumentNullException(nameof(xmlBuilder));
}
if (stringBuilder == null)
{
throw new ArgumentNullException(nameof(stringBuilder));
}
this.indentationLevel = 0;
this.xmlBuilder = xmlBuilder;
this.stringBuilder = stringBuilder;
}
public void WriteStartTagFor(Tag tag)
{
if (stringBuilder.Length != 0)
{
stringBuilder.AppendLine();
}
AppendIndent();
xmlBuilder.WriteStartTagFor(tag);
IncreaseIndent();
}
public void WriteEndTag()
{
AppendLine();
DecreaseIndent();
AppendIndent();
xmlBuilder.WriteEndTag();
}
public void WriteContent(string content)
{
AppendLine();
AppendIndent();
xmlBuilder.WriteContent(content);
}
public void UseNamespaces(Namespaces namespaces)
{
xmlBuilder.UseNamespaces(namespaces);
}
public override string ToString()
{
return xmlBuilder.ToString();
}
private void AppendIndent()
{
stringBuilder.Append(IndentationCharacter, indentationLevel * 4);
}
private void AppendLine()
{
if (indentationLevel > 0)
{
stringBuilder.AppendLine();
}
}
private void IncreaseIndent()
{
indentationLevel++;
}
private void DecreaseIndent()
{
indentationLevel--;
}
}
} | {'content_hash': '831881ebd495097943730a41c94c7994', 'timestamp': '', 'source': 'github', 'line_count': 90, 'max_line_length': 95, 'avg_line_length': 24.977777777777778, 'alnum_prop': 0.5387010676156584, 'repo_name': 'baks/Simple.Xml', 'id': '6282ce97a7da02569c4be7cd23b2859a4376854e', 'size': '2248', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Simple.Xml/Simple.Xml/Output/PrettyPrintStringXmlBuilder.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C#', 'bytes': '88906'}]} |
**THIS IS ALPHA, UNSTABLE, WORK IN PROGRESS !**
[](http://badge.fury.io/js/zuvor)
[](https://travis-ci.org/snd/zuvor/branches)
[](https://david-dm.org/snd/zuvor)
> simple and reasonably fast implementation of DAGs (directed acyclic graphs) and sets
> as building blocks for dynamically finding the optimal execution order
> of interdependent tasks with nondeterministic execution times
a task will run at the earliest possible point when it can run
- [is it any good?](#is-it-any-good)
- [how do i install it?](#how-do-i-install-it)
- [Set-API](#set-api)
- [Dag-API](#dag-api)
- [is it fast?](#is-it-fast)
- [is it stable?](#is-it-stable)
- [how is it implemented?](#how-is-it-implemented)
- [how can i contribute?](#how-can-i-contribute)
- [license](#license-mit)
### why?
i had a list of tasks where some tasks needed to run before other tasks.
for example: `A before B`, `C before A`, `D before B`, ...
i needed a programatic way to run those tasks in the most efficient order.
i built *vorrang* to model and query the underlying [partial order](http://en.wikipedia.org/wiki/Partially_ordered_set):
zuvor gives to the building blocks to do exactly that.
browser?
vorrang can be used to model
- shutdown orders
- task orders
- class hierarchies
- ancestor relationships
- taxonomies
- partial orders
- event orders
- production chains
- dependency graphs
- ...
no dependencies
first let's model the task order:
only works with strings
``` js
var Vorrang = require('vorrang');
var dag = new Vorrang()
.before('A', 'B')
.before('C', 'A')
.before('D', 'B');
```
then i can find out what i can run immediately:
``` js
dag.minElements();
// => ['C', 'D']
```
sweet - we can already start tasks `C` and `D`.
let's assume `C` has finished.
``` js
vorrang.minUpperBound(dag, ['C', 'D'])
// => ['A']
```
nice - we can start `A` now!
you get the idea...
[see the full example again](example.js)
### what can i do with it?
### is it stable?
[](https://travis-ci.org/snd/zuvor/branches)
it has a large [testsuite](test)!
there are probably bugs.
if you find one i appreciate it if you [make an issue](https://github.com/snd/zuvor/issues/new).
### is it fast?
the current focus is on functionality and correctness rather than performance.
i did not optimize prematurely.
i chose the data types carefully.
its fast enough for my use case.
its probably fast enough for your use case.
if you need something faster and have an idea definitely send me an email or make an issue.
query performance
memory usage
it currently uses too much memory
### how is it implemented?
here's the code its just x lines
``` js
parents
children
ancestors
descendants
in
out
upstream
downstream
```
### how can i contribute?
if you need a function that is not in the API just [make an issue](https://github.com/snd/zuvor/issues/new)
and we can discuss and how to implement it best.
i appreciate it if you open an issue first before
## API
### `run(options)` -> `Promise`
run will call `callback` once for every id in `ids` that is not in `done`.
call them
options:
- `ids` an `Array` or [`Set`](#set) of ids to run
- `callback` a `Function` that is called for each id that is not in `done`
- can return a promise
- `graph` a `Graph` (optional) that models the dependencies/order between the `ids`
- `done` an *optional* `Set` (default `new Set()`) that contains
- ids that are done are added to the set
- can be used to blacklist `ids`
- things that are already done are not run
- `pending` an *optional* `Set` (default `new Set()`) that contains the ids
that have been called and have returned a promise that is not yet resolved
- `ids` in this set will not be called. can be used to blacklist `ids`.
- `reversed` a `Boolean` (optional, default `false`) whether to treat the `graph` (if present) in reverse order
- `strict` an *optional* `Boolean` (default `false`)
- `debug`
strict ignore orderings that dont exist
order between some of them
run returns a promise that is resolved when all things have been run
### `Set`
follows the ECMA6 set API where sensible.
##### create a set: `new Set(Nothing or Array or Set)` -> `Set`
``` js
var emptySet = new Set();
// or
var setFromArgs = new Set(1, 2, 3);
// or
var setFromArray = new Set([1, 2, 3]);
// or
var clonedSet = new Set(setFromArray);
```
*O(n) where n = number of elements in argument array or set*
##### number of elements in the set: `.size` = `Integer`
``` js
new Set().size; // -> 0
new Set(1, 2, 3).size; // -> 3
```
*O(1)*
##### return an array of all the elements in the set: `.values()` or `.keys()` -> `Array`
``` js
new Set().values(); // -> []
new Set(1, 2, 3).values(); // -> [1, 2, 3]
new Set(1, 2, 3).keys(); // -> [1, 2, 3]
```
*O(n) where n = number of elements in the set*
##### return a string representation of the set: `.toString()` -> `String`
``` js
new Set().toString(); // -> '#{}'
new Set(1, 2, 3).toString(); // -> '#{1 2 3}'
```
*O(n) where n = number of elements in the set*
##### return whether two sets contain the same elements: `.equals(Set or Array)` -> `Boolean`
``` js
new Set().equals(new Set()); // -> true
new Set().equals(new Set(1, 2, 3)); // -> false
var set = new Set(1, 2, 3);
set.equals(new Set(1, 2)); // -> false
set.equals(new Set(1, 2, 3)); // -> true
set.equals(set); // -> true
set.equals([1, 2, 3]); // -> true
set.equals([1, 2]); // -> false
```
*best case if size differs is O(1). worst case is O(n) where n = number of elements in the set*
##### return whether a value is in the set: `.has(Value)` -> `Boolean`
``` js
var set = new Set(1, 2, 3);
set.has(1); // -> true
set.has(4); // -> false
```
*O(1)*
##### add elements to the set and return set: `.add(Value or Array or Set)` -> `Set`
``` js
var set = new Set();
set.add(1);
// add side effects original set!
set.values(); // -> [1]
set.add(2, 3);
set.values(); // -> [1, 2, 3]
set.add([4, 5]);
set.values(); // -> [1, 2, 3, 4, 5]
set.add(new Set([6, 7]));
set.values(); // -> [1, 2, 3, 4, 5, 6, 7]
// add can be chained
set
.add(8)
.add(9)
.add(10);
set.values(); // -> [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
```
*O(1) for a single value. O(n) for a set (array) where n = number of elements in the set (array)*
##### delete elements from the set and return set: `.delete(Value or Array or Set)` -> `Set`
``` js
var set = new Set(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
set.delete(1);
// delete side effects original set!
set.values(); // -> [2, 3, 4, 5, 6, 7, 8, 9, 10]
set.delete(2, 3);
set.values(); // -> [4, 5, 6, 7, 8, 9, 10]
set.delete([4, 5]);
set.values(); // -> [6, 7, 8, 9, 10]
set.delete(new Set([6, 7]));
set.values(); // -> [8, 9, 10]
// delete can be chained
set
.delete(8)
.delete(9)
.delete(10);
set.values(); // -> []
```
*O(1) for a single value. O(n) for a set (array) where n = number of elements in the set (array)*
##### return a new set that has the same elements as the set: `.clone()` -> `Set`
``` js
var set = new Set(1, 2, 3);
var clone = set.clone();
set.equals(clone); // -> true
```
*O(n) where n = number of elements in the set*
##### delete all elements from the set and return set: `.clear()` -> `Set`
``` js
var set = new Set(1, 2, 3);
set.size; // -> 3
set.clear();
set.size; // -> 0
```
*O(1)*
### `Graph`
`Value` = `String` or `Number`
##### create a graph: `new Graph` -> `Graph`
``` js
var graph = new Graph();
```
*O(1)*
##### add an edge and return graph: `.add(from Value, to Value)` -> `Graph`
``` js
var graph = new Graph()
.add('a', 'b')
.add('b', 'c')
.add('a', 'c');
```
*O(1)*
##### return whether node `a` or path from `a` to `b` exists: `.has(a Value, [b Value])` -> `Boolean`
``` js
var graph = new Graph()
.add('a', 'b')
.add('b', 'c')
graph.has('a'); // -> true
graph.has('d'); // -> false
graph.has('a', 'b'); // -> true
graph.has('b', 'a'); // -> false
// transitive path: a to c via b
graph.has('a', 'c'); // -> true
```
*whether node exists: O(1)*
*if direct edge between a and b exists: O(1)*
*if a transitive path between a and b exists:
worst case O(n * m) where n is the number of edges in the path and m is the max number of edges connected to any node in the graph.*
##### return an array of all the nodes in the graph: `.values()` or `.keys()` -> `Array`
``` js
new Graph().values(); // -> []
new Graph()
.add('a', 'b')
.values(); // -> ['a', 'b']
new Graph()
.add('a', 'b')
.keys(); // -> ['a', 'b']
```
*O(n) where n = number of nodes in the graph*
##### return an array of all the edges in the graph: `.edges()` -> `Array`
``` js
new Graph() .edges(); // -> []
new Graph()
.add('a', 'b')
.edges(); // -> [['a', 'b']]
```
*O(n) where n = number of edges in the graph*
##### return nodes that have no parents (no incoming edges): `.parentless()` -> `Array`
``` js
new Graph()
.add('a', 'b')
.parentless(); // -> ['a']
```
*O(n) where n = number of nodes in the graph*
##### return nodes that have no children (no outgoing edges): `.childless()` -> `Array`
``` js
new Graph()
.add('a', 'b')
.childless(); // -> ['b']
```
*O(n) where n = number of nodes in the graph*
##### return nodes whose parents are all in array: `.whereAllParentsIn(Array or Set)` -> `Array`
``` js
var graph = new Graph()
.add('a', 'b')
.add('a', 'c')
.add('c', 'd')
.add('b', 'd')
graph.whereAllParentsIn(['a']); // -> ['b', 'c']
// nodes in the source array are not in the output array
graph.whereAllParentsIn(['a', 'b']); // -> ['c']
graph.whereAllParentsIn(new Set('a', 'b', 'c')); // -> ['d']
```
*worst case: O(n * m) where n = number of elements in the array/set and
m = max number of parents of any node in the array/set*
##### return nodes whose children are all in array: `.whereAllChildrenIn(Array or Set)` -> `Array`
``` js
var graph = new Graph()
.add('a', 'b')
.add('a', 'c')
.add('c', 'd')
.add('b', 'd')
graph.whereAllChildrenIn(['d']); // -> ['c', 'b']
// nodes in the source array are not in the output array
graph.whereAllChildrenIn(['d', 'b']); // -> ['c']
graph.whereAllChildrenIn(new Set('d', 'b', 'c')); // -> ['a']
```
*worst case: O(n * m) where n = number of elements in the array/set and
m = max number of children of any node in the array/set*
## [license: MIT](LICENSE)
## TODO
- handle strictness
- build a scenario where that is a problem
- in graph but not in ids and blockings children that depend on it
- test edge cases of the run function
- document run function in readme
- use zuvor run function from blaze for shutdown
- finish readme
- read api again
- description
- question sections
- example
- example.js (taken from integration test)
- npm publish
- publish
- make sure travis is working
---
uses and includes a set and a graph datatype
where an order exists only for some services
you can use it for
tasks are run as soon as they are ready to run
| {'content_hash': 'e865e63bdbc65c44ee178e01a4b72200', 'timestamp': '', 'source': 'github', 'line_count': 482, 'max_line_length': 132, 'avg_line_length': 25.311203319502074, 'alnum_prop': 0.5681967213114754, 'repo_name': 'snd/zuvor', 'id': '10f3d9d3214d241eacb4efd1eb2cfc58ecb843fa', 'size': '12209', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CoffeeScript', 'bytes': '38155'}, {'name': 'JavaScript', 'bytes': '5720'}]} |
package com.datamountaineer.streamreactor.connect.voltdb.writers
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
class PrepareProcedureFieldsFnTest extends AnyWordSpec with Matchers {
"PrepareProcedureFieldsFn" should {
// "return null for all fields if they are not found" in {
// val actual = PrepareProcedureFieldsFn(Seq("A", "B", "C"), Map("d" -> 1, "e" -> "aa"))
// //val expected = Seq(null, null, null)
// actual shouldBe expected
// }
// "return the values in order of the fields" in {
// val map = Map("A" -> 1, "C" -> Seq(1.toByte, 2.toByte), "B" -> "aa")
// val actual = PrepareProcedureFieldsFn(Seq("A", "B", "C"), map)
// val expected = Seq(1, "aa", Seq(1.toByte, 2.toByte))
// actual shouldBe expected
// }
}
}
| {'content_hash': '8df4cf4cae76b775c30b68faf2fe4ec0', 'timestamp': '', 'source': 'github', 'line_count': 23, 'max_line_length': 97, 'avg_line_length': 37.608695652173914, 'alnum_prop': 0.6115606936416185, 'repo_name': 'datamountaineer/stream-reactor', 'id': '6e1c5ec108cff0b78e0baa1bca9425a35d4ee425', 'size': '1460', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'kafka-connect-voltdb/src/test/scala/com/datamountaineer/streamreactor/connect/voltdb/writers/PrepareProcedureFieldsFnTest.scala', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '11639'}, {'name': 'Scala', 'bytes': '1693832'}, {'name': 'Smarty', 'bytes': '393'}]} |
class GURL;
class Profile;
namespace content {
struct NotificationResources;
} // namespace content
// The platform notification service is the profile-specific entry point through
// which Web Notifications can be controlled.
class PlatformNotificationServiceImpl
: public content::PlatformNotificationService,
public content_settings::Observer,
public KeyedService {
public:
explicit PlatformNotificationServiceImpl(Profile* profile);
~PlatformNotificationServiceImpl() override;
// Register profile-specific prefs.
static void RegisterProfilePrefs(user_prefs::PrefRegistrySyncable* registry);
// Returns whether the notification identified by |notification_id| was
// closed programmatically through ClosePersistentNotification().
bool WasClosedProgrammatically(const std::string& notification_id);
// content::PlatformNotificationService implementation.
void DisplayNotification(
const std::string& notification_id,
const GURL& origin,
const blink::PlatformNotificationData& notification_data,
const blink::NotificationResources& notification_resources) override;
void DisplayPersistentNotification(
const std::string& notification_id,
const GURL& service_worker_scope,
const GURL& origin,
const blink::PlatformNotificationData& notification_data,
const blink::NotificationResources& notification_resources) override;
void CloseNotification(const std::string& notification_id) override;
void ClosePersistentNotification(const std::string& notification_id) override;
void GetDisplayedNotifications(
DisplayedNotificationsCallback callback) override;
void ScheduleTrigger(base::Time timestamp) override;
base::Time ReadNextTriggerTimestamp() override;
int64_t ReadNextPersistentNotificationId() override;
void RecordNotificationUkmEvent(
const content::NotificationDatabaseData& data) override;
void set_ukm_recorded_closure_for_testing(base::OnceClosure closure) {
ukm_recorded_closure_for_testing_ = std::move(closure);
}
NotificationTriggerScheduler* GetNotificationTriggerScheduler();
private:
friend class NotificationTriggerSchedulerTest;
friend class PersistentNotificationHandlerTest;
friend class PlatformNotificationServiceBrowserTest;
friend class PlatformNotificationServiceTest;
friend class PushMessagingBrowserTest;
FRIEND_TEST_ALL_PREFIXES(PlatformNotificationServiceTest,
CreateNotificationFromData);
FRIEND_TEST_ALL_PREFIXES(PlatformNotificationServiceTest,
DisplayNameForContextMessage);
FRIEND_TEST_ALL_PREFIXES(PlatformNotificationServiceTest,
RecordNotificationUkmEvent);
// KeyedService implementation.
void Shutdown() override;
// content_settings::Observer implementation.
void OnContentSettingChanged(const ContentSettingsPattern& primary_pattern,
const ContentSettingsPattern& secondary_pattern,
ContentSettingsType content_type,
const std::string& resource_identifier) override;
static void DidGetBackgroundSourceId(
base::OnceClosure recorded_closure,
const content::NotificationDatabaseData& data,
base::Optional<ukm::SourceId> source_id);
// Creates a new Web Notification-based Notification object. Should only be
// called when the notification is first shown.
message_center::Notification CreateNotificationFromData(
const GURL& origin,
const std::string& notification_id,
const blink::PlatformNotificationData& notification_data,
const blink::NotificationResources& notification_resources) const;
// Returns a display name for an origin, to be used in the context message
base::string16 DisplayNameForContextMessage(const GURL& origin) const;
// Clears |closed_notifications_|. Should only be used for testing purposes.
void ClearClosedNotificationsForTesting() { closed_notifications_.clear(); }
// The profile for this instance or NULL if the initial profile has been
// shutdown already.
Profile* profile_;
// Tracks the id of persistent notifications that have been closed
// programmatically to avoid dispatching close events for them.
std::unordered_set<std::string> closed_notifications_;
// Scheduler for notifications with a trigger.
std::unique_ptr<NotificationTriggerScheduler> trigger_scheduler_;
// Testing-only closure to observe when a UKM event has been recorded.
base::OnceClosure ukm_recorded_closure_for_testing_;
DISALLOW_COPY_AND_ASSIGN(PlatformNotificationServiceImpl);
};
#endif // CHROME_BROWSER_NOTIFICATIONS_PLATFORM_NOTIFICATION_SERVICE_IMPL_H_
| {'content_hash': 'a58ec8210b50f3e4bed8e8748a23e546', 'timestamp': '', 'source': 'github', 'line_count': 111, 'max_line_length': 80, 'avg_line_length': 42.67567567567568, 'alnum_prop': 0.7610301878826261, 'repo_name': 'endlessm/chromium-browser', 'id': 'c7ee3ff8b5dff3189040fc3281754090adbfa22e', 'size': '5810', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'chrome/browser/notifications/platform_notification_service_impl.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []} |
package se.vidstige.jadb;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
public class JadbDevice {
private final String serial;
private final ITransportFactory transportFactory;
JadbDevice(String serial, String type, ITransportFactory tFactory) {
this.serial = serial;
this.transportFactory = tFactory;
}
static JadbDevice createAny(JadbConnection connection) {
return new JadbDevice(connection);
}
private JadbDevice(ITransportFactory tFactory) {
serial = null;
this.transportFactory = tFactory;
}
private Transport getTransport() throws IOException, JadbException {
Transport transport = transportFactory.createTransport();
if (serial == null) {
transport.send("host:transport-any");
transport.verifyResponse();
} else {
transport.send("host:transport:" + serial);
transport.verifyResponse();
}
return transport;
}
public String getSerial() {
return serial;
}
public String getState() throws IOException, JadbException {
Transport transport = getTransport();
transport.send("get-state");
transport.verifyResponse();
return transport.readString();
}
public void executeShell(String command, String... args) throws IOException, JadbException {
executeShell(null, command, args);
}
public void executeShell(OutputStream stdout, String command, String... args) throws IOException, JadbException {
Transport transport = getTransport();
StringBuilder shellLine = new StringBuilder(command);
for (String arg : args) {
shellLine.append(" ");
// TODO: throw if arg contains double quote
// TODO: quote arg if it contains space
shellLine.append(arg);
}
send(transport, "shell:" + shellLine.toString());
if (stdout != null) {
transport.readResponseTo(new AdbFilterOutputStream(stdout));
}
}
public List<RemoteFile> list(String remotePath) throws IOException, JadbException {
Transport transport = getTransport();
SyncTransport sync = transport.startSync();
sync.send("LIST", remotePath);
List<RemoteFile> result = new ArrayList<RemoteFile>();
for (RemoteFileRecord dent = sync.readDirectoryEntry(); dent != RemoteFileRecord.DONE; dent = sync.readDirectoryEntry()) {
result.add(dent);
}
return result;
}
private int getMode(File file) {
//noinspection OctalInteger
return 0664;
}
public void push(InputStream source, long lastModified, int mode, RemoteFile remote) throws IOException, JadbException {
Transport transport = getTransport();
SyncTransport sync = transport.startSync();
sync.send("SEND", remote.getPath() + "," + Integer.toString(mode));
sync.sendStream(source);
sync.sendStatus("DONE", (int) lastModified);
sync.verifyStatus();
}
public void push(File local, RemoteFile remote) throws IOException, JadbException {
FileInputStream fileStream = new FileInputStream(local);
push(fileStream, local.lastModified(), getMode(local), remote);
fileStream.close();
}
public void pull(RemoteFile remote, OutputStream destination) throws IOException, JadbException {
Transport transport = getTransport();
SyncTransport sync = transport.startSync();
sync.send("RECV", remote.getPath());
sync.readChunksTo(destination);
}
public void pull(RemoteFile remote, File local) throws IOException, JadbException {
FileOutputStream fileStream = new FileOutputStream(local);
pull(remote, fileStream);
fileStream.close();
}
private void send(Transport transport, String command) throws IOException, JadbException {
transport.send(command);
transport.verifyResponse();
}
@Override
public String toString() {
return "Android Device with serial " + serial;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((serial == null) ? 0 : serial.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
JadbDevice other = (JadbDevice) obj;
if (serial == null) {
if (other.serial != null)
return false;
} else if (!serial.equals(other.serial))
return false;
return true;
}
}
| {'content_hash': 'b5477c7001d46111861688ae7447c28d', 'timestamp': '', 'source': 'github', 'line_count': 149, 'max_line_length': 130, 'avg_line_length': 32.36241610738255, 'alnum_prop': 0.6289921194525093, 'repo_name': 'tbognar76/APKing', 'id': 'd86d3aeb8b3acf7863c31849f5ac624fb4c78bc5', 'size': '4822', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/se/vidstige/jadb/JadbDevice.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '146'}, {'name': 'HTML', 'bytes': '1376'}, {'name': 'Java', 'bytes': '103014'}, {'name': 'Shell', 'bytes': '152'}]} |
[](https://app.wercker.com/project/bykey/b741bbe7bbe8a8d42ae60dacece7d064)
Reliable redis based simple queue
| {'content_hash': 'fe40c28ee20c3509db39baa787387701', 'timestamp': '', 'source': 'github', 'line_count': 3, 'max_line_length': 175, 'avg_line_length': 70.33333333333333, 'alnum_prop': 0.8246445497630331, 'repo_name': 'canthefason/r2dq', 'id': '7f98b03d6bc7afcd6faf2dfdc75809aa4113e74c', 'size': '218', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Go', 'bytes': '7783'}]} |
package it.unibz.inf.ontop.dbschema;
import com.google.common.collect.ImmutableList;
import it.unibz.inf.ontop.iq.IQ;
import javax.annotation.Nonnull;
/**
* Ontop view definitions are temporarily mutable, until their IQs are stabilized.
*/
public interface OntopViewDefinition extends NamedRelationDefinition {
/**
* IQ defining views from lower-level relations
*/
IQ getIQ();
/**
* Level must be at least 1 for an Ontop view (0 refers to database relations)
*
* A level 1 view is defined from database relations only
*
* A Level 2 view is from at least an Ontop view of level 1 and none of higher level.
*
*/
int getLevel();
/**
* If called after freezing, throw an IllegalStateException
*/
void updateIQ(@Nonnull IQ newIQ) throws IllegalStateException;
/**
* After freezing the IQ cannot be changed anymore.
*/
void freeze();
}
| {'content_hash': '5514d6aa291c601b0274066f0546a015', 'timestamp': '', 'source': 'github', 'line_count': 38, 'max_line_length': 89, 'avg_line_length': 24.657894736842106, 'alnum_prop': 0.6723585912486659, 'repo_name': 'ontop/ontop', 'id': '5fdb4e68dfe0262ba76dad5a7a8e67d2c4f02058', 'size': '937', 'binary': False, 'copies': '1', 'ref': 'refs/heads/version4', 'path': 'core/model/src/main/java/it/unibz/inf/ontop/dbschema/OntopViewDefinition.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ANTLR', 'bytes': '8054'}, {'name': 'Batchfile', 'bytes': '524'}, {'name': 'CSS', 'bytes': '2859'}, {'name': 'Dockerfile', 'bytes': '1577'}, {'name': 'FreeMarker', 'bytes': '1846'}, {'name': 'HTML', 'bytes': '1428712'}, {'name': 'Java', 'bytes': '8862515'}, {'name': 'JavaScript', 'bytes': '4759'}, {'name': 'Ruby', 'bytes': '28650'}, {'name': 'Shell', 'bytes': '24861'}, {'name': 'TSQL', 'bytes': '3316'}, {'name': 'TeX', 'bytes': '5188'}, {'name': 'XSLT', 'bytes': '19056'}, {'name': 'q', 'bytes': '93020'}]} |
<?php
print ("Hello World");
?> | {'content_hash': 'afbef7bf4a9662d1612efc2e26428766', 'timestamp': '', 'source': 'github', 'line_count': 3, 'max_line_length': 22, 'avg_line_length': 10.333333333333334, 'alnum_prop': 0.5806451612903226, 'repo_name': 'saxmanbf/HelloWorld', 'id': '83b08c3d42c64500aee6f401abdad73eb5265a4c', 'size': '31', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'hello.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Go', 'bytes': '72'}, {'name': 'Java', 'bytes': '114'}, {'name': 'JavaScript', 'bytes': '27'}, {'name': 'PHP', 'bytes': '31'}, {'name': 'Python', 'bytes': '20'}, {'name': 'Rebol', 'bytes': '21'}, {'name': 'Ruby', 'bytes': '20'}]} |
Add multiple categories
=======================
Routing example for a collection that called `label`.
.. code-block:: yaml
#routing
enhavo_category_label_index:
options:
expose: true
path: /enhavo/category/label/index
methods: [GET]
defaults:
_controller: enhavo_category.controller.category:indexAction
_sylius:
template: EnhavoAppBundle:Resource:index.html.twig
_viewer:
title: category.label.category
translationDomain: EnhavoCategoryBundle
actions:
create:
type: create
route: enhavo_category_label_create
blocks:
table:
type: table
table_route: enhavo_category_label_table
enhavo_category_label_create:
options:
expose: true
path: /enhavo/category/label/create
methods: [GET,POST]
defaults:
_controller: enhavo_category.controller.category:createAction
_sylius:
template: EnhavoAppBundle:Resource:create.html.twig
factory:
method: createWithCollection
arguments:
collection: label
_viewer:
translationDomain: EnhavoCategoryBundle
buttons:
save:
route: enhavo_category_label_create
tabs:
category:
label: category.label.category
template: EnhavoCategoryBundle:Tab:category.html.twig
enhavo_category_label_table:
options:
expose: true
path: /enhavo/category/label/table/{page}
methods: [GET]
defaults:
page: 1
_controller: enhavo_category.controller.category:tableAction
_sylius:
template: EnhavoAppBundle:Resource:table.html.twig
repository:
method: findByCollection
criteria:
collection: label
paging: true
_viewer:
translationDomain: EnhavoCategoryBundle
table:
width: 12
columns:
id:
label: category.label.id
property: id
name:
width: 11
label: category.label.name
property: name
| {'content_hash': 'eeec606a54b96ccdfa835d805852636c', 'timestamp': '', 'source': 'github', 'line_count': 80, 'max_line_length': 77, 'avg_line_length': 33.7625, 'alnum_prop': 0.47278785634950016, 'repo_name': 'npakai/enhavo', 'id': 'e20cfc6fe20f86a7af9006568680222607337361', 'size': '2701', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'docs/source/guides/category/add-multiple-categories.rst', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '81970'}, {'name': 'Dockerfile', 'bytes': '2165'}, {'name': 'Gherkin', 'bytes': '11978'}, {'name': 'HTML', 'bytes': '195018'}, {'name': 'JavaScript', 'bytes': '3103'}, {'name': 'Makefile', 'bytes': '233'}, {'name': 'PHP', 'bytes': '2118492'}, {'name': 'Ruby', 'bytes': '1164'}, {'name': 'Shell', 'bytes': '579'}, {'name': 'TypeScript', 'bytes': '14563'}]} |
//
// LoadingTitleView.m
// MVCHub
//
// Created by daniel on 2016/10/23.
// Copyright © 2016年 Daniel. All rights reserved.
//
#import "LoadingTitleView.h"
@interface LoadingTitleView ()
@property (nonatomic, strong) UIActivityIndicatorView *activityIndicatorView;
@property (nonatomic, strong) UILabel *title;
@end
@implementation LoadingTitleView
#pragma mark - Init
- (instancetype)init {
self = [super init];
if (self) {
[self initialize];
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initialize];
}
return self;
}
- (void)initialize {
[self addSubview:self.activityIndicatorView];
[self.activityIndicatorView mas_makeConstraints:^(MASConstraintMaker *make) {
make.size.mas_equalTo(CGSizeMake(20, 20));
make.left.equalTo(self.mas_left);
make.centerY.equalTo(self);
}];
[self addSubview:self.title];
[self.title mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.equalTo(self.activityIndicatorView.mas_right).offset(4);
make.right.equalTo(self.mas_right);
make.centerY.equalTo(self);
}];
}
#pragma mark - Getter
- (UIView *)activityIndicatorView {
if (!_activityIndicatorView) {
_activityIndicatorView = [[UIActivityIndicatorView alloc] initWithFrame:CGRectMake(0, 0, 20, 20)];
[_activityIndicatorView startAnimating];
}
return _activityIndicatorView;
}
- (UILabel *)title {
if (!_title) {
_title = [[UILabel alloc] init];
_title.text = @"Loading";
_title.textColor = [UIColor whiteColor];
}
return _title;
}
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
// Drawing code
}
*/
@end
| {'content_hash': 'b8a3d98ef174faa1bfd65ba882c10794', 'timestamp': '', 'source': 'github', 'line_count': 80, 'max_line_length': 106, 'avg_line_length': 23.6625, 'alnum_prop': 0.6640253565768621, 'repo_name': 'QiuDaniel/MVCHub', 'id': '348e76f195cd2c569c91cce9efe0f4dc97cb75cb', 'size': '1896', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'MVCHub/MVCHub/Views/LoadingTitleView.m', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Awk', 'bytes': '2666'}, {'name': 'C', 'bytes': '73331'}, {'name': 'C++', 'bytes': '54687'}, {'name': 'CSS', 'bytes': '7851'}, {'name': 'DTrace', 'bytes': '412'}, {'name': 'HTML', 'bytes': '769678'}, {'name': 'JavaScript', 'bytes': '1147721'}, {'name': 'Makefile', 'bytes': '1385'}, {'name': 'Objective-C', 'bytes': '8363755'}, {'name': 'Objective-C++', 'bytes': '164109'}, {'name': 'Python', 'bytes': '1063'}, {'name': 'Ruby', 'bytes': '11710'}, {'name': 'Shell', 'bytes': '56184'}, {'name': 'Swift', 'bytes': '819015'}]} |
package com.fasterxml.jackson.core.json;
import java.io.*;
import com.fasterxml.jackson.core.*;
public class TestRootValues
extends com.fasterxml.jackson.core.BaseTest
{
private final JsonFactory JSON_F = new JsonFactory();
public void testSimpleNumbers() throws Exception
{
_testSimpleNumbers(false);
_testSimpleNumbers(true);
}
private void _testSimpleNumbers(boolean useStream) throws Exception
{
final String DOC = "1 2\t3\r4\n5\r\n6\r\n 7";
JsonParser jp = useStream ?
createParserUsingStream(JSON_F, DOC, "UTF-8")
: createParserUsingReader(JSON_F, DOC);
for (int i = 1; i <= 7; ++i) {
assertToken(JsonToken.VALUE_NUMBER_INT, jp.nextToken());
assertEquals(i, jp.getIntValue());
}
assertNull(jp.nextToken());
jp.close();
}
public void testBrokenNumber() throws Exception
{
_testBrokenNumber(false);
_testBrokenNumber(true);
}
private void _testBrokenNumber(boolean useStream) throws Exception
{
JsonFactory f = new JsonFactory();
final String DOC = "14:89:FD:D3:E7:8C";
JsonParser p = useStream ?
createParserUsingStream(f, DOC, "UTF-8")
: createParserUsingReader(f, DOC);
// Should fail, right away
try {
p.nextToken();
fail("Ought to fail! Instead, got token: "+p.currentToken());
} catch (JsonParseException e) {
verifyException(e, "unexpected character");
}
p.close();
}
public void testSimpleBooleans() throws Exception
{
_testSimpleBooleans(false);
_testSimpleBooleans(true);
}
private void _testSimpleBooleans(boolean useStream) throws Exception
{
final String DOC = "true false\ttrue\rfalse\ntrue\r\nfalse\r\n true";
JsonParser jp = useStream ?
createParserUsingStream(JSON_F, DOC, "UTF-8")
: createParserUsingReader(JSON_F, DOC);
boolean exp = true;
for (int i = 1; i <= 7; ++i) {
assertToken(exp ? JsonToken.VALUE_TRUE : JsonToken.VALUE_FALSE, jp.nextToken());
exp = !exp;
}
assertNull(jp.nextToken());
jp.close();
}
public void testSimpleWrites() throws Exception
{
_testSimpleWrites(false);
_testSimpleWrites(true);
}
public void _testSimpleWrites(boolean useStream) throws Exception
{
ByteArrayOutputStream out = new ByteArrayOutputStream();
StringWriter w = new StringWriter();
JsonGenerator gen;
if (useStream) {
gen = JSON_F.createGenerator(ObjectWriteContext.empty(), out, JsonEncoding.UTF8);
} else {
gen = JSON_F.createGenerator(ObjectWriteContext.empty(), w);
}
gen.writeNumber(123);
gen.writeString("abc");
gen.writeBoolean(true);
gen.close();
out.close();
w.close();
// and verify
String json = useStream ? out.toString("UTF-8") : w.toString();
assertEquals("123 \"abc\" true", json);
}
}
| {'content_hash': '134544864cef1a5d605d5824d3f29fab', 'timestamp': '', 'source': 'github', 'line_count': 105, 'max_line_length': 93, 'avg_line_length': 30.409523809523808, 'alnum_prop': 0.588161603507673, 'repo_name': 'weiwenqiang/GitHub', 'id': '596b59925856d5d285555e7434ee9643b09065fb', 'size': '3193', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'expert/jackson-core/src/test/java/com/fasterxml/jackson/core/json/TestRootValues.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '87'}, {'name': 'C', 'bytes': '42062'}, {'name': 'C++', 'bytes': '12137'}, {'name': 'CMake', 'bytes': '202'}, {'name': 'CSS', 'bytes': '75087'}, {'name': 'Clojure', 'bytes': '12036'}, {'name': 'FreeMarker', 'bytes': '21704'}, {'name': 'Groovy', 'bytes': '55083'}, {'name': 'HTML', 'bytes': '61549'}, {'name': 'Java', 'bytes': '42222825'}, {'name': 'JavaScript', 'bytes': '216823'}, {'name': 'Kotlin', 'bytes': '24319'}, {'name': 'Makefile', 'bytes': '19490'}, {'name': 'Perl', 'bytes': '280'}, {'name': 'Prolog', 'bytes': '1030'}, {'name': 'Python', 'bytes': '13032'}, {'name': 'Scala', 'bytes': '310450'}, {'name': 'Shell', 'bytes': '27802'}]} |
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.equalTo;
public class NullValueTests extends ESSingleNodeTestCase {
public void testNullNullValue() throws Exception {
IndexService indexService = createIndex("test", Settings.builder().build());
String[] typesToTest = {"integer", "long", "double", "float", "short", "date", "ip", "keyword", "boolean", "byte", "geo_point"};
for (String type : typesToTest) {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("numeric")
.field("type", type)
.field("null_value", (String) null)
.endObject()
.endObject()
.endObject()
.endObject());
try {
indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
fail("Test should have failed because [null_value] was null.");
} catch (MapperParsingException e) {
assertThat(e.getMessage(),
either(equalTo("Property [null_value] cannot be null."))
.or(containsString("must not have a [null] value")));
}
}
}
}
| {'content_hash': '487f7456538c80cdb7a1c711dbf831f3', 'timestamp': '', 'source': 'github', 'line_count': 44, 'max_line_length': 136, 'avg_line_length': 43.56818181818182, 'alnum_prop': 0.5884194053208138, 'repo_name': 'gingerwizard/elasticsearch', 'id': 'fce744553456a6b912947de1e9977274dcd60dc1', 'size': '2705', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '10862'}, {'name': 'Groovy', 'bytes': '510'}, {'name': 'HTML', 'bytes': '1502'}, {'name': 'Java', 'bytes': '29923429'}, {'name': 'Perl', 'bytes': '264378'}, {'name': 'Perl6', 'bytes': '103207'}, {'name': 'Python', 'bytes': '91186'}, {'name': 'Ruby', 'bytes': '17776'}, {'name': 'Shell', 'bytes': '85779'}]} |
package update
import (
"net/http"
"testing"
"bytes"
"io/ioutil"
. "github.com/bborbe/assert"
booking_authentication "github.com/bborbe/booking/authentication"
booking_handler "github.com/bborbe/booking/handler"
booking_model "github.com/bborbe/booking/model"
booking_shooting "github.com/bborbe/booking/shooting"
server_mock "github.com/bborbe/http/mock"
)
func createProtocol() Protocol {
return func(request *http.Request, action string, obj interface{}) {
}
}
func createGetByToken(model *booking_model.Model, err error) GetByToken {
return func(token string) (*booking_model.Model, error) {
return model, err
}
}
func createHttpRequestToAuthentication(authentication *booking_authentication.Authentication, err error) HttpRequestToAuthentication {
return func(request *http.Request) (*booking_authentication.Authentication, error) {
return authentication, err
}
}
func createRequest(body string) (*http.Request, error) {
req, err := server_mock.NewHttpRequestMock("")
if err != nil {
return nil, err
}
req.Method = "GET"
req.Body = ioutil.NopCloser(bytes.NewBufferString(body))
return req, nil
}
func createIsParticipant(result bool) IsParticipant {
return func(authentication *booking_authentication.Authentication) bool {
return result
}
}
func createUpdate(shooting *booking_shooting.Shooting, err error) Update {
return func(*booking_shooting.Shooting) (*booking_shooting.Shooting, error) {
return shooting, err
}
}
func createGet(shooting *booking_shooting.Shooting, err error) Get {
return func(int64) (*booking_shooting.Shooting, error) {
return shooting, err
}
}
func TestImplementsHttpHandler(t *testing.T) {
token := "token123"
handler := New(createHttpRequestToAuthentication(&booking_authentication.Authentication{Token: token}, nil), createGetByToken(nil, nil), createUpdate(nil, nil), createGet(nil, nil), createIsParticipant(false), createProtocol())
var i *booking_handler.Handler
err := AssertThat(handler, Implements(i))
if err != nil {
t.Fatal(err)
}
}
func TestBookShooting(t *testing.T) {
updateFunc := func(shooting *booking_shooting.Shooting) (*booking_shooting.Shooting, error) {
if err := AssertThat(int(shooting.Id), Is(12)); err != nil {
t.Fatal(err)
}
if err := AssertThat(shooting.DateId.Valid, Is(true)); err != nil {
t.Fatal(err)
}
if err := AssertThat(int(shooting.DateId.Int64), Is(13)); err != nil {
t.Fatal(err)
}
return nil, nil
}
token := "token123"
handler := New(createHttpRequestToAuthentication(&booking_authentication.Authentication{Token: token}, nil), createGetByToken(nil, nil), updateFunc, createGet(nil, nil), createIsParticipant(false), createProtocol())
resp := server_mock.NewHttpResponseWriterMock()
req, err := createRequest(`{"id":12,"date_id":13}`)
if err := AssertThat(err, NilValue()); err != nil {
t.Fatal(err)
}
err = handler.ServeHTTP(resp, req)
if err := AssertThat(err, NilValue()); err != nil {
t.Fatal(err)
}
if err := AssertThat(resp.Status(), Is(200)); err != nil {
t.Fatal(err)
}
}
| {'content_hash': 'ec32222033f8f6c703062f16eb03aad8', 'timestamp': '', 'source': 'github', 'line_count': 101, 'max_line_length': 228, 'avg_line_length': 30.257425742574256, 'alnum_prop': 0.7280759162303665, 'repo_name': 'bborbe/booking', 'id': '3402bf7c528367fcd407e17d2ac89fb9027aaf58', 'size': '3056', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'shooting/handler/update/booking_shooting_handler_book_test.go', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'CSS', 'bytes': '48427'}, {'name': 'Go', 'bytes': '265477'}, {'name': 'HTML', 'bytes': '3018899'}, {'name': 'JavaScript', 'bytes': '613822'}, {'name': 'Makefile', 'bytes': '842'}, {'name': 'PLpgSQL', 'bytes': '11121'}, {'name': 'Shell', 'bytes': '4894'}]} |
namespace tadpole {
enum class Precedence {
NONE,
ASSIGN, // =
TERM, // - +
FACTOR, // / *
CALL, // ()
PRIMARY,
};
template <typename T> inline Precedence operator+(Precedence a, T b) noexcept {
return as_type<Precedence>(as_type<int>(a) + as_type<int>(b));
}
class GlobalParser;
struct ParseRule {
using ParseFn = std::function<void (GlobalParser& parser, bool can_assign)>;
ParseFn prefix;
ParseFn infix;
Precedence precedence;
};
struct LocalVar {
Token name;
int depth{};
bool is_upvalue{};
LocalVar(const Token& arg_name, int arg_depth = -1, bool arg_upvalue = false) noexcept
: name(arg_name), depth(arg_depth), is_upvalue(arg_upvalue) {
}
};
struct Upvalue {
u8_t index{};
bool is_local{};
Upvalue(u8_t arg_index = 0, bool arg_local = false) noexcept
: index(arg_index), is_local(arg_local) {
}
inline bool operator==(Upvalue r) const noexcept {
return index == r.index && is_local == r.is_local;
}
inline bool operator!=(Upvalue r) const noexcept { return !(*this == r); }
inline bool is_equal(u8_t arg_index, bool arg_local) const noexcept {
return index == arg_index && is_local == arg_local;
}
};
}
| {'content_hash': '0a839e8d5eb25f6f091ce3f90c518412', 'timestamp': '', 'source': 'github', 'line_count': 57, 'max_line_length': 88, 'avg_line_length': 20.964912280701753, 'alnum_prop': 0.6426778242677824, 'repo_name': 'ASMlover/study', 'id': '20ce49f2096bf6061016ca725e0ff74a40982ec0', 'size': '3111', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'cplusplus/Tadpole6/compiler_helper.hh', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'Assembly', 'bytes': '3055440'}, {'name': 'Batchfile', 'bytes': '4662'}, {'name': 'Brainfuck', 'bytes': '571'}, {'name': 'C', 'bytes': '13569580'}, {'name': 'C#', 'bytes': '3959'}, {'name': 'C++', 'bytes': '14741264'}, {'name': 'CMake', 'bytes': '543917'}, {'name': 'CSS', 'bytes': '11505'}, {'name': 'Common Lisp', 'bytes': '114'}, {'name': 'Emacs Lisp', 'bytes': '6042'}, {'name': 'Go', 'bytes': '105203'}, {'name': 'Groovy', 'bytes': '2907'}, {'name': 'HTML', 'bytes': '911945'}, {'name': 'Lex', 'bytes': '9370'}, {'name': 'Lua', 'bytes': '32829'}, {'name': 'Makefile', 'bytes': '1000611'}, {'name': 'NASL', 'bytes': '3609'}, {'name': 'NewLisp', 'bytes': '5805'}, {'name': 'Perl', 'bytes': '594'}, {'name': 'Python', 'bytes': '2752752'}, {'name': 'SWIG', 'bytes': '91'}, {'name': 'Shell', 'bytes': '9993'}, {'name': 'Vim script', 'bytes': '92204'}, {'name': 'Yacc', 'bytes': '6278'}]} |
/**
* @namespace
*/
vonline.CategoryItem = function(name, data, canvas) {
var that = this;
this.container = $('<div/>').addClass('item').attr('title', name);
this.size = 50;
this.padding = 20;
this.data = null;
this.canvas = Raphael(this.container[0], this.size + 2*this.padding, this.size + 2*this.padding);
switch (data) {
case 'rectangle':
this.canvas.rect(this.padding, this.padding, this.size, this.size);
this.data = {type: 'rectangle'};
break;
default: // path
var path = this.canvas.path(data),
bbox = path.getBBox(),
// scale
scale = Math.min(this.size / bbox.width, this.size / bbox.height);
path.scale(scale, scale, 0, 0);
bbox = path.getBBox(); // new bbox after scaling
path.translate(this.padding-bbox.x, this.padding-bbox.y);
this.data = {type: 'path', path: path.attr('path')}
break;
}
this.container.mousedown(function(event) {
var x = event.pageX,
y = event.pageY,
offset = that.container.offset(),
element = that.container.clone().css({position:'absolute', zIndex:2, top: offset.top+'px', left: offset.left+'px'}).appendTo(document.body);
that.wasDragging = false;
var dragEvent = function(event) {
that.wasDragging = true;
element.css({left: (offset.left + event.pageX - x)+'px', top: (offset.top + event.pageY - y)+'px'});
}
$(window).mousemove(dragEvent);
$(window).one('mouseup', function(event) {
if (that.wasDragging) {
that.data.x = offset.left + event.pageX - x - $('#sidebar').width() + that.padding - canvas.offset.x;
that.data.y = offset.top + event.pageY - y + that.padding - canvas.offset.y;
if (that.data.x > 0) {
// see vonline.Document
vonline.events.trigger('drop', that.data);
}
}
else {
that.data.x = $('#canvas').width() / 2 - that.size / 2;
that.data.y = $('#canvas').height() / 2 - that.size / 2
// see vonline.Document
vonline.events.trigger('drop', that.data);
}
that.wasDragging = false;
$(window).unbind('mousemove', dragEvent);
element.detach();
});
});
}
vonline.CategoryItem.prototype.getHTML = function() {
return this.container;
} | {'content_hash': '7efcbd7623ac72950e5433629373048c', 'timestamp': '', 'source': 'github', 'line_count': 64, 'max_line_length': 143, 'avg_line_length': 33.0625, 'alnum_prop': 0.6389413988657845, 'repo_name': 'diagraph/diagraph', 'id': '65a8acd39397357eb7edfd38b2bed96dcea2dd87', 'size': '2116', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/js/categoryitem.js', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'JavaScript', 'bytes': '250570'}, {'name': 'PHP', 'bytes': '15576'}]} |
'use strict';
module.exports = {
configure: function (moduleDescriptor, config) {
var files = {};
if (config === undefined || config === true)
files[moduleDescriptor.cssDebugDistributionFile] = 'src/**/*.less';
else if (typeof config === 'string' || Array.isArray(config))
files[moduleDescriptor.cssDebugDistributionFile] = config;
return {
default: {
files: files,
options: {
strictMath: true,
strictUnits: true
}
}
}
}
};
| {'content_hash': 'a44fe45081d3d104801c46bf4f764eb3', 'timestamp': '', 'source': 'github', 'line_count': 22, 'max_line_length': 79, 'avg_line_length': 27.772727272727273, 'alnum_prop': 0.4909983633387889, 'repo_name': 'benschulz/grunt-commons', 'id': 'fcf44dcd84de2a6c3be2f5d4f74dfa45c34ade5d', 'size': '611', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/adapters/less.js', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'JavaScript', 'bytes': '53406'}]} |
package net.caseif.cubic.util.helper.math;
public class NumberHelper {
public static double clamp(double val, double min, double max) {
return Math.max(min, Math.min(max, val));
}
}
| {'content_hash': 'f8d24cd47806e191a4cabce3d94d68c3', 'timestamp': '', 'source': 'github', 'line_count': 11, 'max_line_length': 68, 'avg_line_length': 18.454545454545453, 'alnum_prop': 0.6748768472906403, 'repo_name': 'caseif/Cubic', 'id': '4d3a62156c5421e58f487386d50df296fb52d319', 'size': '1368', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/net/caseif/cubic/util/helper/math/NumberHelper.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'GLSL', 'bytes': '752'}, {'name': 'Java', 'bytes': '124462'}]} |
using System;
using System.Linq;
using System.Collections.Generic;
using System.Text;
namespace DroppedBoxx.Web
{
/// <summary>
/// Representation of an HTTP header
/// </summary>
public class HttpHeader
{
/// <summary>
/// Name of the header
/// </summary>
public string Name { get; set; }
/// <summary>
/// Value of the header
/// </summary>
public string Value { get; set; }
}
}
| {'content_hash': '01c397c2abe059c51d534a7bd81bfb98', 'timestamp': '', 'source': 'github', 'line_count': 22, 'max_line_length': 41, 'avg_line_length': 21.454545454545453, 'alnum_prop': 0.5508474576271186, 'repo_name': 'dkarzon/DroppedBoxx', 'id': '5e51688ea9d889c87b7c84d9ad01d2fbdc708896', 'size': '474', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'DroppedBoxx/Web/HttpHeader.cs', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C#', 'bytes': '913953'}]} |
. $PSScriptRoot\tasks.ps1
Build
| {'content_hash': '9b311c4d8d110cd4d925388b502de940', 'timestamp': '', 'source': 'github', 'line_count': 3, 'max_line_length': 25, 'avg_line_length': 11.0, 'alnum_prop': 0.7575757575757576, 'repo_name': 'zarusz/SlimMessageBus', 'id': '5d4a7336cd87cde967a6d3550964804371ef8640', 'size': '33', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'build/do_build.ps1', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C#', 'bytes': '830315'}, {'name': 'PowerShell', 'bytes': '3790'}]} |
<component name="ProjectDictionaryState">
<dictionary name="Christian">
<words>
<w>northing</w>
</words>
</dictionary>
</component> | {'content_hash': '9e0982425b0a91ecb4717224508176de', 'timestamp': '', 'source': 'github', 'line_count': 7, 'max_line_length': 41, 'avg_line_length': 21.285714285714285, 'alnum_prop': 0.6510067114093959, 'repo_name': 'carentsen/RMUAST', 'id': '80393d8f13a4d03e995b6a149a01c9b21c972e20', 'size': '149', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'rmuast_s17_module_1/exercise_accuracy/transverse_mercator_py/.idea/dictionaries/Christian.xml', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'HTML', 'bytes': '2281'}, {'name': 'Python', 'bytes': '146166'}, {'name': 'Shell', 'bytes': '3126'}, {'name': 'TeX', 'bytes': '115343'}]} |
/** @module vertx-web-js/virtual_host_handler */
var utils = require('vertx-js/util/utils');
var RoutingContext = require('vertx-web-js/routing_context');
var io = Packages.io;
var JsonObject = io.vertx.core.json.JsonObject;
var JVirtualHostHandler = io.vertx.ext.web.handler.VirtualHostHandler;
/**
Handler that will filter requests based on the request Host name.
@class
*/
var VirtualHostHandler = function(j_val) {
var j_virtualHostHandler = j_val;
var that = this;
/**
@public
@param arg0 {RoutingContext}
*/
this.handle = function(arg0) {
var __args = arguments;
if (__args.length === 1 && typeof __args[0] === 'object' && __args[0]._jdel) {
j_virtualHostHandler["handle(io.vertx.ext.web.RoutingContext)"](arg0._jdel);
} else throw new TypeError('function invoked with invalid arguments');
};
// A reference to the underlying Java delegate
// NOTE! This is an internal API and must not be used in user code.
// If you rely on this property your code is likely to break if we change it / remove it without warning.
this._jdel = j_virtualHostHandler;
};
VirtualHostHandler._jclass = utils.getJavaClass("io.vertx.ext.web.handler.VirtualHostHandler");
VirtualHostHandler._jtype = {
accept: function(obj) {
return VirtualHostHandler._jclass.isInstance(obj._jdel);
},
wrap: function(jdel) {
var obj = Object.create(VirtualHostHandler.prototype, {});
VirtualHostHandler.apply(obj, arguments);
return obj;
},
unwrap: function(obj) {
return obj._jdel;
}
};
VirtualHostHandler._create = function(jdel) {
var obj = Object.create(VirtualHostHandler.prototype, {});
VirtualHostHandler.apply(obj, arguments);
return obj;
}
/**
Create a handler
@memberof module:vertx-web-js/virtual_host_handler
@param hostname {string}
@param handler {function}
@return {VirtualHostHandler} the handler
*/
VirtualHostHandler.create = function(hostname, handler) {
var __args = arguments;
if (__args.length === 2 && typeof __args[0] === 'string' && typeof __args[1] === 'function') {
return utils.convReturnVertxGen(VirtualHostHandler, JVirtualHostHandler["create(java.lang.String,io.vertx.core.Handler)"](hostname, function(jVal) {
handler(utils.convReturnVertxGen(RoutingContext, jVal));
}));
} else throw new TypeError('function invoked with invalid arguments');
};
module.exports = VirtualHostHandler; | {'content_hash': '0fa9a9de51e2c68cb82d27730b4e1224', 'timestamp': '', 'source': 'github', 'line_count': 75, 'max_line_length': 152, 'avg_line_length': 32.026666666666664, 'alnum_prop': 0.7060782681099084, 'repo_name': 'sibay/vertx-web', 'id': '8350c65ffd443d5a291cf142411cc9b095d7ed78', 'size': '3022', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'vertx-web/src/main/resources/vertx-web-js/virtual_host_handler.js', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Groovy', 'bytes': '145776'}, {'name': 'HTML', 'bytes': '7124'}, {'name': 'Java', 'bytes': '944689'}, {'name': 'JavaScript', 'bytes': '200579'}, {'name': 'Python', 'bytes': '1978283'}, {'name': 'Ruby', 'bytes': '153551'}, {'name': 'Shell', 'bytes': '3765'}]} |
<?php
namespace Kendo\Dataviz\UI;
class ChartSeriesItemTargetBorder extends \kendo\SerializableObject {
//>> Properties
/**
* The color of the border.
* @param string|\Kendo\JavaScriptFunction $value
* @return \Kendo\Dataviz\UI\ChartSeriesItemTargetBorder
*/
public function color($value) {
return $this->setProperty('color', $value);
}
/**
* The following dash types are supported:
* @param string|\Kendo\JavaScriptFunction $value
* @return \Kendo\Dataviz\UI\ChartSeriesItemTargetBorder
*/
public function dashType($value) {
return $this->setProperty('dashType', $value);
}
/**
* The width of the border in pixels. By default the border width is set to zero which means that the border will not appear.
* @param float|\Kendo\JavaScriptFunction $value
* @return \Kendo\Dataviz\UI\ChartSeriesItemTargetBorder
*/
public function width($value) {
return $this->setProperty('width', $value);
}
//<< Properties
}
?>
| {'content_hash': '67f4f16792fc319cce50e6a2986af5f3', 'timestamp': '', 'source': 'github', 'line_count': 38, 'max_line_length': 128, 'avg_line_length': 26.973684210526315, 'alnum_prop': 0.6682926829268293, 'repo_name': 'dammeheli75/blx', 'id': 'bd27c49a2db950b35dd37fb91dd0c2f964388b31', 'size': '1025', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'vendor/telerik/kendoui/Kendo/Dataviz/UI/ChartSeriesItemTargetBorder.php', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CSS', 'bytes': '365191'}, {'name': 'JavaScript', 'bytes': '2506823'}, {'name': 'PHP', 'bytes': '225200'}]} |
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<script><!--#include virtual="js/templateData.js" --></script>
<script id="upgrade-template" type="text/x-handlebars-template">
<h5><a id="upgrade_240_notable" href="#upgrade_240_notable">Notable changes in 2.4.0</a></h5>
<ul>
<li>The <code>bin/kafka-preferred-replica-election.sh</code> command line tool has been deprecated. It has been replaced by <code>bin/kafka-leader-election.sh</code>.</li>
<li>The methods <code>electPreferredLeaders</code> in the Java <code>AdminClient</code> class have been deprecated in favor of the methods <code>electLeaders</code>.</li>
<li>Scala code leveraging the <code>NewTopic(String, int, short)</code> constructor with literal values will need to explicitly call <code>toShort</code> on the second literal.</li>
</ul>
<h4><a id="upgrade_2_3_0" href="#upgrade_2_3_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x, 0.10.1.x, 0.10.2.x, 0.11.0.x, 1.0.x, 1.1.x, 2.0.x or 2.1.x or 2.2.x to 2.3.0</a></h4>
<!-- TODO core-team -->
<h5><a id="upgrade_230_notable" href="#upgrade_230_notable">Notable changes in 2.3.0</a></h5>
<ul>
<li> We are introducing a new rebalancing protocol for Kafka Connect based on
<a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-415%3A+Incremental+Cooperative+Rebalancing+in+Kafka+Connect">incremental cooperative rebalancing</a>.
The new protocol does not require stopping all the tasks during a rebalancing phase between Connect workers. Instead, only the tasks that need to be exchanged
between workers are stopped and they are started in a follow up rebalance. The new Connect protocol is enabled by default beginning with 2.3.0.
For more details on how it works and how to enable the old behavior of eager rebalancing, checkout
<a href="/{{version}}/documentation/#connect_administration">incremental cooperative rebalancing design</a>.
</li>
<li> We are introducing static membership towards consumer user. This feature reduces unnecessary rebalances during normal application upgrades or rolling bounces.
For more details on how to use it, checkout <a href="/{{version}}/design/#static_membership">static membership design</a>.
</li>
<li> Kafka Streams DSL switches its used store types. While this change is mainly transparent to users, there are some corner cases that may require code changes.
See the <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_230">Kafka Streams upgrade section</a> for more details.
</li>
</ul>
<h4><a id="upgrade_2_2_0" href="#upgrade_2_2_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x, 0.10.1.x, 0.10.2.x, 0.11.0.x, 1.0.x, 1.1.x, 2.0.x or 2.1.x to 2.2.0</a></h4>
<p><b>If you are upgrading from a version prior to 2.1.x, please see the note below about the change to the schema used to store consumer offsets.
Once you have changed the inter.broker.protocol.version to the latest version, it will not be possible to downgrade to a version prior to 2.1.</b></p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties on all brokers and add the following properties. CURRENT_KAFKA_VERSION refers to the version you
are upgrading from. CURRENT_MESSAGE_FORMAT_VERSION refers to the message format version currently in use. If you have previously
overridden the message format version, you should keep its current value. Alternatively, if you are upgrading from a version prior
to 0.11.0.x, then CURRENT_MESSAGE_FORMAT_VERSION should be set to match CURRENT_KAFKA_VERSION.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2, 0.9.0, 0.10.0, 0.10.1, 0.10.2, 0.11.0, 1.0, 1.1).</li>
<li>log.message.format.version=CURRENT_MESSAGE_FORMAT_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact
following the upgrade</a> for the details on what this configuration does.)</li>
</ul>
If you are upgrading from 0.11.0.x, 1.0.x, 1.1.x, or 2.0.x and you have not overridden the message format, then you only need to override
the inter-broker protocol version.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (0.11.0, 1.0, 1.1, 2.0).</li>
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. Once you have done so, the
brokers will be running the latest version and you can verify that the cluster's behavior and performance meets expectations.
It is still possible to downgrade at this point if there are any problems.
</li>
<li> Once the cluster's behavior and performance has been verified, bump the protocol version by editing
<code>inter.broker.protocol.version</code> and setting it to 2.2.
</li>
<li> Restart the brokers one by one for the new protocol version to take effect. Once the brokers begin using the latest
protocol version, it will no longer be possible to downgrade the cluster to an older version.
</li>
<li> If you have overridden the message format version as instructed above, then you need to do one more rolling restart to
upgrade it to its latest version. Once all (or most) consumers have been upgraded to 0.11.0 or later,
change log.message.format.version to 2.2 on each broker and restart them one by one. Note that the older Scala clients,
which are no longer maintained, do not support the message format introduced in 0.11, so to avoid conversion costs
(or to take advantage of <a href="#upgrade_11_exactly_once_semantics">exactly once semantics</a>),
the newer Java clients must be used.
</li>
</ol>
<h5><a id="upgrade_220_notable" href="#upgrade_220_notable">Notable changes in 2.2.0</a></h5>
<ul>
<li>The default consumer group id has been changed from the empty string (<code>""</code>) to <code>null</code>. Consumers who use the new default group id will not be able to subscribe to topics,
and fetch or commit offsets. The empty string as consumer group id is deprecated but will be supported until a future major release. Old clients that rely on the empty string group id will now
have to explicitly provide it as part of their consumer config. For more information see
<a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-289%3A+Improve+the+default+group+id+behavior+in+KafkaConsumer">KIP-289</a>.</li>
<li>The <code>bin/kafka-topics.sh</code> command line tool is now able to connect directly to brokers with <code>--bootstrap-server</code> instead of zookeeper. The old <code>--zookeeper</code>
option is still available for now. Please read <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-377%3A+TopicCommand+to+use+AdminClient">KIP-377</a> for more information.</li>
<li>Kafka Streams depends on a newer version of RocksDBs that requires MacOS 10.13 or higher.</li>
</ul>
<h4><a id="upgrade_2_1_0" href="#upgrade_2_1_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x, 0.10.1.x, 0.10.2.x, 0.11.0.x, 1.0.x, 1.1.x, or 2.0.0 to 2.1.0</a></h4>
<p><b>Note that 2.1.x contains a change to the internal schema used to store consumer offsets. Once the upgrade is
complete, it will not be possible to downgrade to previous versions. See the rolling upgrade notes below for more detail.</b></p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties on all brokers and add the following properties. CURRENT_KAFKA_VERSION refers to the version you
are upgrading from. CURRENT_MESSAGE_FORMAT_VERSION refers to the message format version currently in use. If you have previously
overridden the message format version, you should keep its current value. Alternatively, if you are upgrading from a version prior
to 0.11.0.x, then CURRENT_MESSAGE_FORMAT_VERSION should be set to match CURRENT_KAFKA_VERSION.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2, 0.9.0, 0.10.0, 0.10.1, 0.10.2, 0.11.0, 1.0, 1.1).</li>
<li>log.message.format.version=CURRENT_MESSAGE_FORMAT_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact
following the upgrade</a> for the details on what this configuration does.)</li>
</ul>
If you are upgrading from 0.11.0.x, 1.0.x, 1.1.x, or 2.0.x and you have not overridden the message format, then you only need to override
the inter-broker protocol version.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (0.11.0, 1.0, 1.1, 2.0).</li>
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. Once you have done so, the
brokers will be running the latest version and you can verify that the cluster's behavior and performance meets expectations.
It is still possible to downgrade at this point if there are any problems.
</li>
<li> Once the cluster's behavior and performance has been verified, bump the protocol version by editing
<code>inter.broker.protocol.version</code> and setting it to 2.1.
</li>
<li> Restart the brokers one by one for the new protocol version to take effect. Once the brokers begin using the latest
protocol version, it will no longer be possible to downgrade the cluster to an older version.
</li>
<li> If you have overridden the message format version as instructed above, then you need to do one more rolling restart to
upgrade it to its latest version. Once all (or most) consumers have been upgraded to 0.11.0 or later,
change log.message.format.version to 2.1 on each broker and restart them one by one. Note that the older Scala clients,
which are no longer maintained, do not support the message format introduced in 0.11, so to avoid conversion costs
(or to take advantage of <a href="#upgrade_11_exactly_once_semantics">exactly once semantics</a>),
the newer Java clients must be used.
</li>
</ol>
<p><b>Additional Upgrade Notes:</b></p>
<ol>
<li>Offset expiration semantics has slightly changed in this version. According to the new semantics, offsets of partitions in a group will
not be removed while the group is subscribed to the corresponding topic and is still active (has active consumers). If group becomes
empty all its offsets will be removed after default offset retention period (or the one set by broker) has passed (unless the group becomes
active again). Offsets associated with standalone (simple) consumers, that do not use Kafka group management, will be removed after default
offset retention period (or the one set by broker) has passed since their last commit.</li>
<li>The default for console consumer's <code>enable.auto.commit</code> property when no <code>group.id</code> is provided is now set to <code>false</code>.
This is to avoid polluting the consumer coordinator cache as the auto-generated group is not likely to be used by other consumers.</li>
<li>The default value for the producer's <code>retries</code> config was changed to <code>Integer.MAX_VALUE</code>, as we introduced <code>delivery.timeout.ms</code>
in <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-91+Provide+Intuitive+User+Timeouts+in+The+Producer">KIP-91</a>,
which sets an upper bound on the total time between sending a record and receiving acknowledgement from the broker. By default,
the delivery timeout is set to 2 minutes.</li>
<li>By default, MirrorMaker now overrides <code>delivery.timeout.ms</code> to <code>Integer.MAX_VALUE</code> when
configuring the producer. If you have overridden the value of <code>retries</code> in order to fail faster,
you will instead need to override <code>delivery.timeout.ms</code>.</li>
<li>The <code>ListGroup</code> API now expects, as a recommended alternative, <code>Describe Group</code> access to the groups a user should be able to list.
Even though the old <code>Describe Cluster</code> access is still supported for backward compatibility, using it for this API is not advised.</li>
<li><a href="https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=87298242">KIP-336</a> deprecates the ExtendedSerializer and ExtendedDeserializer interfaces and
propagates the usage of Serializer and Deserializer. ExtendedSerializer and ExtendedDeserializer were introduced with
<a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-82+-+Add+Record+Headers">KIP-82</a> to provide record headers for serializers and deserializers
in a Java 7 compatible fashion. Now we consolidated these interfaces as Java 7 support has been dropped since.</li>
</ol>
<h5><a id="upgrade_210_notable" href="#upgrade_210_notable">Notable changes in 2.1.0</a></h5>
<ul>
<li>Jetty has been upgraded to 9.4.12, which excludes TLS_RSA_* ciphers by default because they do not support forward
secrecy, see https://github.com/eclipse/jetty.project/issues/2807 for more information.</li>
<li>Unclean leader election is automatically enabled by the controller when <code>unclean.leader.election.enable</code> config is dynamically updated by using per-topic config override.</li>
<li>The <code>AdminClient</code> has added a method <code>AdminClient#metrics()</code>. Now any application using the <code>AdminClient</code> can gain more information
and insight by viewing the metrics captured from the <code>AdminClient</code>. For more information
see <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-324%3A+Add+method+to+get+metrics%28%29+in+AdminClient">KIP-324</a>
</li>
<li>Kafka now supports Zstandard compression from <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-110%3A+Add+Codec+for+ZStandard+Compression">KIP-110</a>.
You must upgrade the broker as well as clients to make use of it. Consumers prior to 2.1.0 will not be able to read from topics which use
Zstandard compression, so you should not enable it for a topic until all downstream consumers are upgraded. See the KIP for more detail.
</li>
</ul>
<h4><a id="upgrade_2_0_0" href="#upgrade_2_0_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x, 0.10.1.x, 0.10.2.x, 0.11.0.x, 1.0.x, or 1.1.x to 2.0.0</a></h4>
<p>Kafka 2.0.0 introduces wire protocol changes. By following the recommended rolling upgrade plan below,
you guarantee no downtime during the upgrade. However, please review the <a href="#upgrade_200_notable">notable changes in 2.0.0</a> before upgrading.
</p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties on all brokers and add the following properties. CURRENT_KAFKA_VERSION refers to the version you
are upgrading from. CURRENT_MESSAGE_FORMAT_VERSION refers to the message format version currently in use. If you have previously
overridden the message format version, you should keep its current value. Alternatively, if you are upgrading from a version prior
to 0.11.0.x, then CURRENT_MESSAGE_FORMAT_VERSION should be set to match CURRENT_KAFKA_VERSION.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2, 0.9.0, 0.10.0, 0.10.1, 0.10.2, 0.11.0, 1.0, 1.1).</li>
<li>log.message.format.version=CURRENT_MESSAGE_FORMAT_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact
following the upgrade</a> for the details on what this configuration does.)</li>
</ul>
If you are upgrading from 0.11.0.x, 1.0.x, or 1.1.x and you have not overridden the message format, then you only need to override
the inter-broker protocol format.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (0.11.0, 1.0, 1.1).</li>
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing <code>inter.broker.protocol.version</code> and setting it to 2.0.
<li> Restart the brokers one by one for the new protocol version to take effect.</li>
<li> If you have overridden the message format version as instructed above, then you need to do one more rolling restart to
upgrade it to its latest version. Once all (or most) consumers have been upgraded to 0.11.0 or later,
change log.message.format.version to 2.0 on each broker and restart them one by one. Note that the older Scala consumer
does not support the new message format introduced in 0.11, so to avoid the performance cost of down-conversion (or to
take advantage of <a href="#upgrade_11_exactly_once_semantics">exactly once semantics</a>), the newer Java consumer must be used.</li>
</ol>
<p><b>Additional Upgrade Notes:</b></p>
<ol>
<li>If you are willing to accept downtime, you can simply take all the brokers down, update the code and start them back up. They will start
with the new protocol by default.</li>
<li>Bumping the protocol version and restarting can be done any time after the brokers are upgraded. It does not have to be immediately after.
Similarly for the message format version.</li>
<li>If you are using Java8 method references in your Kafka Streams code you might need to update your code to resolve method ambiguities.
Hot-swapping the jar-file only might not work.</li>
<li>ACLs should not be added to prefixed resources,
(added in <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-290%3A+Support+for+Prefixed+ACLs">KIP-290</a>),
until all brokers in the cluster have been updated.
<p><b>NOTE:</b> any prefixed ACLs added to a cluster, even after the cluster is fully upgraded, will be ignored should the cluster be downgraded again.
</li>
</ol>
<h5><a id="upgrade_200_notable" href="#upgrade_200_notable">Notable changes in 2.0.0</a></h5>
<ul>
<li><a href="https://cwiki.apache.org/confluence/x/oYtjB">KIP-186</a> increases the default offset retention time from 1 day to 7 days. This makes it less likely to "lose" offsets in an application that commits infrequently. It also increases the active set of offsets and therefore can increase memory usage on the broker. Note that the console consumer currently enables offset commit by default and can be the source of a large number of offsets which this change will now preserve for 7 days instead of 1. You can preserve the existing behavior by setting the broker config <code>offsets.retention.minutes</code> to 1440.</li>
<li>Support for Java 7 has been dropped, Java 8 is now the minimum version required.</li>
<li> The default value for <code>ssl.endpoint.identification.algorithm</code> was changed to <code>https</code>, which performs hostname verification (man-in-the-middle attacks are possible otherwise). Set <code>ssl.endpoint.identification.algorithm</code> to an empty string to restore the previous behaviour. </li>
<li><a href="https://issues.apache.org/jira/browse/KAFKA-5674">KAFKA-5674</a> extends the lower interval of <code>max.connections.per.ip</code> minimum to zero and therefore allows IP-based filtering of inbound connections.</li>
<li><a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-272%3A+Add+API+version+tag+to+broker%27s+RequestsPerSec+metric">KIP-272</a>
added API version tag to the metric <code>kafka.network:type=RequestMetrics,name=RequestsPerSec,request={Produce|FetchConsumer|FetchFollower|...}</code>.
This metric now becomes <code>kafka.network:type=RequestMetrics,name=RequestsPerSec,request={Produce|FetchConsumer|FetchFollower|...},version={0|1|2|3|...}</code>. This will impact
JMX monitoring tools that do not automatically aggregate. To get the total count for a specific request type, the tool needs to be
updated to aggregate across different versions.
</li>
<li><a href="https://cwiki.apache.org/confluence/x/uaBzB">KIP-225</a> changed the metric "records.lag" to use tags for topic and partition. The original version with the name format "{topic}-{partition}.records-lag" has been removed.</li>
<li>The Scala consumers, which have been deprecated since 0.11.0.0, have been removed. The Java consumer has been the recommended option
since 0.10.0.0. Note that the Scala consumers in 1.1.0 (and older) will continue to work even if the brokers are upgraded to 2.0.0.</li>
<li>The Scala producers, which have been deprecated since 0.10.0.0, have been removed. The Java producer has been the recommended option
since 0.9.0.0. Note that the behaviour of the default partitioner in the Java producer differs from the default partitioner
in the Scala producers. Users migrating should consider configuring a custom partitioner that retains the previous behaviour.
Note that the Scala producers in 1.1.0 (and older) will continue to work even if the brokers are upgraded to 2.0.0.</li>
<li>MirrorMaker and ConsoleConsumer no longer support the Scala consumer, they always use the Java consumer.</li>
<li>The ConsoleProducer no longer supports the Scala producer, it always uses the Java producer.</li>
<li>A number of deprecated tools that rely on the Scala clients have been removed: ReplayLogProducer, SimpleConsumerPerformance, SimpleConsumerShell, ExportZkOffsets, ImportZkOffsets, UpdateOffsetsInZK, VerifyConsumerRebalance.</li>
<li>The deprecated kafka.tools.ProducerPerformance has been removed, please use org.apache.kafka.tools.ProducerPerformance.</li>
<li>New Kafka Streams configuration parameter <code>upgrade.from</code> added that allows rolling bounce upgrade from older version. </li>
<li><a href="https://cwiki.apache.org/confluence/x/DVyHB">KIP-284</a> changed the retention time for Kafka Streams repartition topics by setting its default value to <code>Long.MAX_VALUE</code>.</li>
<li>Updated <code>ProcessorStateManager</code> APIs in Kafka Streams for registering state stores to the processor topology. For more details please read the Streams <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_200">Upgrade Guide</a>.</li>
<li>
In earlier releases, Connect's worker configuration required the <code>internal.key.converter</code> and <code>internal.value.converter</code> properties.
In 2.0, these are <a href="https://cwiki.apache.org/confluence/x/AZQ7B">no longer required</a> and default to the JSON converter.
You may safely remove these properties from your Connect standalone and distributed worker configurations:<br />
<code>internal.key.converter=org.apache.kafka.connect.json.JsonConverter</code>
<code>internal.key.converter.schemas.enable=false</code>
<code>internal.value.converter=org.apache.kafka.connect.json.JsonConverter</code>
<code>internal.value.converter.schemas.enable=false</code>
</li>
<li><a href="https://cwiki.apache.org/confluence/x/5kiHB">KIP-266</a> adds a new consumer configuration <code>default.api.timeout.ms</code>
to specify the default timeout to use for <code>KafkaConsumer</code> APIs that could block. The KIP also adds overloads for such blocking
APIs to support specifying a specific timeout to use for each of them instead of using the default timeout set by <code>default.api.timeout.ms</code>.
In particular, a new <code>poll(Duration)</code> API has been added which does not block for dynamic partition assignment.
The old <code>poll(long)</code> API has been deprecated and will be removed in a future version. Overloads have also been added
for other <code>KafkaConsumer</code> methods like <code>partitionsFor</code>, <code>listTopics</code>, <code>offsetsForTimes</code>,
<code>beginningOffsets</code>, <code>endOffsets</code> and <code>close</code> that take in a <code>Duration</code>.</li>
<li>Also as part of KIP-266, the default value of <code>request.timeout.ms</code> has been changed to 30 seconds.
The previous value was a little higher than 5 minutes to account for maximum time that a rebalance would take.
Now we treat the JoinGroup request in the rebalance as a special case and use a value derived from
<code>max.poll.interval.ms</code> for the request timeout. All other request types use the timeout defined
by <code>request.timeout.ms</code></li>
<li>The internal method <code>kafka.admin.AdminClient.deleteRecordsBefore</code> has been removed. Users are encouraged to migrate to <code>org.apache.kafka.clients.admin.AdminClient.deleteRecords</code>.</li>
<li>The AclCommand tool <code>--producer</code> convenience option uses the <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-277+-+Fine+Grained+ACL+for+CreateTopics+API">KIP-277</a> finer grained ACL on the given topic. </li>
<li><a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-176%3A+Remove+deprecated+new-consumer+option+for+tools">KIP-176</a> removes
the <code>--new-consumer</code> option for all consumer based tools. This option is redundant since the new consumer is automatically
used if --bootstrap-server is defined.
</li>
<li><a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-290%3A+Support+for+Prefixed+ACLs">KIP-290</a> adds the ability
to define ACLs on prefixed resources, e.g. any topic starting with 'foo'.</li>
<li><a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-283%3A+Efficient+Memory+Usage+for+Down-Conversion">KIP-283</a> improves message down-conversion
handling on Kafka broker, which has typically been a memory-intensive operation. The KIP adds a mechanism by which the operation becomes less memory intensive
by down-converting chunks of partition data at a time which helps put an upper bound on memory consumption. With this improvement, there is a change in
<code>FetchResponse</code> protocol behavior where the broker could send an oversized message batch towards the end of the response with an invalid offset.
Such oversized messages must be ignored by consumer clients, as is done by <code>KafkaConsumer</code>.
<p>KIP-283 also adds new topic and broker configurations <code>message.downconversion.enable</code> and <code>log.message.downconversion.enable</code> respectively
to control whether down-conversion is enabled. When disabled, broker does not perform any down-conversion and instead sends an <code>UNSUPPORTED_VERSION</code>
error to the client.</p></li>
<li>Dynamic broker configuration options can be stored in ZooKeeper using kafka-configs.sh before brokers are started.
This option can be used to avoid storing clear passwords in server.properties as all password configs may be stored encrypted in ZooKeeper.</li>
<li>ZooKeeper hosts are now re-resolved if connection attempt fails. But if your ZooKeeper host names resolve
to multiple addresses and some of them are not reachable, then you may need to increase the connection timeout
<code>zookeeper.connection.timeout.ms</code>.</li>
</ul>
<h5><a id="upgrade_200_new_protocols" href="#upgrade_200_new_protocols">New Protocol Versions</a></h5>
<ul>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-279%3A+Fix+log+divergence+between+leader+and+follower+after+fast+leader+fail+over">KIP-279</a>: OffsetsForLeaderEpochResponse v1 introduces a partition-level <code>leader_epoch</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-219+-+Improve+quota+communication">KIP-219</a>: Bump up the protocol versions of non-cluster action requests and responses that are throttled on quota violation.</li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-290%3A+Support+for+Prefixed+ACLs">KIP-290</a>: Bump up the protocol versions ACL create, describe and delete requests and responses.</li>
</ul>
<h5><a id="upgrade_200_streams_from_11" href="#upgrade_200_streams_from_11">Upgrading a 1.1 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 1.1 to 2.0 does not require a broker upgrade.
A Kafka Streams 2.0 application can connect to 2.0, 1.1, 1.0, 0.11.0, 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though). </li>
<li> Note that in 2.0 we have removed the public APIs that are deprecated prior to 1.0; users leveraging on those deprecated APIs need to make code changes accordingly.
See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_200">Streams API changes in 2.0.0</a> for more details. </li>
</ul>
<h4><a id="upgrade_1_1_0" href="#upgrade_1_1_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x, 0.10.1.x, 0.10.2.x, 0.11.0.x, or 1.0.x to 1.1.x</a></h4>
<p>Kafka 1.1.0 introduces wire protocol changes. By following the recommended rolling upgrade plan below,
you guarantee no downtime during the upgrade. However, please review the <a href="#upgrade_110_notable">notable changes in 1.1.0</a> before upgrading.
</p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties on all brokers and add the following properties. CURRENT_KAFKA_VERSION refers to the version you
are upgrading from. CURRENT_MESSAGE_FORMAT_VERSION refers to the message format version currently in use. If you have previously
overridden the message format version, you should keep its current value. Alternatively, if you are upgrading from a version prior
to 0.11.0.x, then CURRENT_MESSAGE_FORMAT_VERSION should be set to match CURRENT_KAFKA_VERSION.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2, 0.9.0, 0.10.0, 0.10.1, 0.10.2, 0.11.0, 1.0).</li>
<li>log.message.format.version=CURRENT_MESSAGE_FORMAT_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact
following the upgrade</a> for the details on what this configuration does.)</li>
</ul>
If you are upgrading from 0.11.0.x or 1.0.x and you have not overridden the message format, then you only need to override
the inter-broker protocol format.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (0.11.0 or 1.0).</li>
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing <code>inter.broker.protocol.version</code> and setting it to 1.1.
<li> Restart the brokers one by one for the new protocol version to take effect. </li>
<li> If you have overridden the message format version as instructed above, then you need to do one more rolling restart to
upgrade it to its latest version. Once all (or most) consumers have been upgraded to 0.11.0 or later,
change log.message.format.version to 1.1 on each broker and restart them one by one. Note that the older Scala consumer
does not support the new message format introduced in 0.11, so to avoid the performance cost of down-conversion (or to
take advantage of <a href="#upgrade_11_exactly_once_semantics">exactly once semantics</a>), the newer Java consumer must be used.</li>
</ol>
<p><b>Additional Upgrade Notes:</b></p>
<ol>
<li>If you are willing to accept downtime, you can simply take all the brokers down, update the code and start them back up. They will start
with the new protocol by default.</li>
<li>Bumping the protocol version and restarting can be done any time after the brokers are upgraded. It does not have to be immediately after.
Similarly for the message format version.</li>
<li>If you are using Java8 method references in your Kafka Streams code you might need to update your code to resolve method ambiguties.
Hot-swapping the jar-file only might not work.</li>
</ol>
<h5><a id="upgrade_111_notable" href="#upgrade_111_notable">Notable changes in 1.1.1</a></h5>
<ul>
<li> New Kafka Streams configuration parameter <code>upgrade.from</code> added that allows rolling bounce upgrade from version 0.10.0.x </li>
<li> See the <a href="/{{version}}/documentation/streams/upgrade-guide.html"><b>Kafka Streams upgrade guide</b></a> for details about this new config.
</ul>
<h5><a id="upgrade_110_notable" href="#upgrade_110_notable">Notable changes in 1.1.0</a></h5>
<ul>
<li>The kafka artifact in Maven no longer depends on log4j or slf4j-log4j12. Similarly to the kafka-clients artifact, users
can now choose the logging back-end by including the appropriate slf4j module (slf4j-log4j12, logback, etc.). The release
tarball still includes log4j and slf4j-log4j12.</li>
<li><a href="https://cwiki.apache.org/confluence/x/uaBzB">KIP-225</a> changed the metric "records.lag" to use tags for topic and partition. The original version with the name format "{topic}-{partition}.records-lag" is deprecated and will be removed in 2.0.0.</li>
<li>Kafka Streams is more robust against broker communication errors. Instead of stopping the Kafka Streams client with a fatal exception,
Kafka Streams tries to self-heal and reconnect to the cluster. Using the new <code>AdminClient</code> you have better control of how often
Kafka Streams retries and can <a href="/{{version}}/documentation/streams/developer-guide/config-streams">configure</a>
fine-grained timeouts (instead of hard coded retries as in older version).</li>
<li>Kafka Streams rebalance time was reduced further making Kafka Streams more responsive.</li>
<li>Kafka Connect now supports message headers in both sink and source connectors, and to manipulate them via simple message transforms. Connectors must be changed to explicitly use them. A new <code>HeaderConverter</code> is introduced to control how headers are (de)serialized, and the new "SimpleHeaderConverter" is used by default to use string representations of values.</li>
<li>kafka.tools.DumpLogSegments now automatically sets deep-iteration option if print-data-log is enabled
explicitly or implicitly due to any of the other options like decoder.</li>
</ul>
<h5><a id="upgrade_110_new_protocols" href="#upgrade_110_new_protocols">New Protocol Versions</a></h5>
<ul>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-226+-+Dynamic+Broker+Configuration">KIP-226</a> introduced DescribeConfigs Request/Response v1.</li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-227%3A+Introduce+Incremental+FetchRequests+to+Increase+Partition+Scalability">KIP-227</a> introduced Fetch Request/Response v7.</li>
</ul>
<h5><a id="upgrade_110_streams_from_10" href="#upgrade_110_streams_from_10">Upgrading a 1.0 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 1.0 to 1.1 does not require a broker upgrade.
A Kafka Streams 1.1 application can connect to 1.0, 0.11.0, 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though). </li>
<li> See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_110">Streams API changes in 1.1.0</a> for more details. </li>
</ul>
<h4><a id="upgrade_1_0_0" href="#upgrade_1_0_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x, 0.10.1.x, 0.10.2.x or 0.11.0.x to 1.0.0</a></h4>
<p>Kafka 1.0.0 introduces wire protocol changes. By following the recommended rolling upgrade plan below,
you guarantee no downtime during the upgrade. However, please review the <a href="#upgrade_100_notable">notable changes in 1.0.0</a> before upgrading.
</p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties on all brokers and add the following properties. CURRENT_KAFKA_VERSION refers to the version you
are upgrading from. CURRENT_MESSAGE_FORMAT_VERSION refers to the message format version currently in use. If you have previously
overridden the message format version, you should keep its current value. Alternatively, if you are upgrading from a version prior
to 0.11.0.x, then CURRENT_MESSAGE_FORMAT_VERSION should be set to match CURRENT_KAFKA_VERSION.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2, 0.9.0, 0.10.0, 0.10.1, 0.10.2, 0.11.0).</li>
<li>log.message.format.version=CURRENT_MESSAGE_FORMAT_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact
following the upgrade</a> for the details on what this configuration does.)</li>
</ul>
If you are upgrading from 0.11.0.x and you have not overridden the message format, you must set
both the message format version and the inter-broker protocol version to 0.11.0.
<ul>
<li>inter.broker.protocol.version=0.11.0</li>
<li>log.message.format.version=0.11.0</li>
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing <code>inter.broker.protocol.version</code> and setting it to 1.0.
<li> Restart the brokers one by one for the new protocol version to take effect. </li>
<li> If you have overridden the message format version as instructed above, then you need to do one more rolling restart to
upgrade it to its latest version. Once all (or most) consumers have been upgraded to 0.11.0 or later,
change log.message.format.version to 1.0 on each broker and restart them one by one. If you are upgrading from
0.11.0 and log.message.format.version is set to 0.11.0, you can update the config and skip the rolling restart.
Note that the older Scala consumer does not support the new message format introduced in 0.11, so to avoid the
performance cost of down-conversion (or to take advantage of <a href="#upgrade_11_exactly_once_semantics">exactly once semantics</a>),
the newer Java consumer must be used.</li>
</ol>
<p><b>Additional Upgrade Notes:</b></p>
<ol>
<li>If you are willing to accept downtime, you can simply take all the brokers down, update the code and start them back up. They will start
with the new protocol by default.</li>
<li>Bumping the protocol version and restarting can be done any time after the brokers are upgraded. It does not have to be immediately after.
Similarly for the message format version.</li>
</ol>
<h5><a id="upgrade_102_notable" href="#upgrade_102_notable">Notable changes in 1.0.2</a></h5>
<ul>
<li> New Kafka Streams configuration parameter <code>upgrade.from</code> added that allows rolling bounce upgrade from version 0.10.0.x </li>
<li> See the <a href="/{{version}}/documentation/streams/upgrade-guide.html"><b>Kafka Streams upgrade guide</b></a> for details about this new config.
</ul>
<h5><a id="upgrade_101_notable" href="#upgrade_101_notable">Notable changes in 1.0.1</a></h5>
<ul>
<li>Restored binary compatibility of AdminClient's Options classes (e.g. CreateTopicsOptions, DeleteTopicsOptions, etc.) with
0.11.0.x. Binary (but not source) compatibility had been broken inadvertently in 1.0.0.</li>
</ul>
<h5><a id="upgrade_100_notable" href="#upgrade_100_notable">Notable changes in 1.0.0</a></h5>
<ul>
<li>Topic deletion is now enabled by default, since the functionality is now stable. Users who wish to
to retain the previous behavior should set the broker config <code>delete.topic.enable</code> to <code>false</code>. Keep in mind that topic deletion removes data and the operation is not reversible (i.e. there is no "undelete" operation)</li>
<li>For topics that support timestamp search if no offset can be found for a partition, that partition is now included in the search result with a null offset value. Previously, the partition was not included in the map.
This change was made to make the search behavior consistent with the case of topics not supporting timestamp search.
<li>If the <code>inter.broker.protocol.version</code> is 1.0 or later, a broker will now stay online to serve replicas
on live log directories even if there are offline log directories. A log directory may become offline due to IOException
caused by hardware failure. Users need to monitor the per-broker metric <code>offlineLogDirectoryCount</code> to check
whether there is offline log directory. </li>
<li>Added KafkaStorageException which is a retriable exception. KafkaStorageException will be converted to NotLeaderForPartitionException in the response
if the version of client's FetchRequest or ProducerRequest does not support KafkaStorageException. </li>
<li>-XX:+DisableExplicitGC was replaced by -XX:+ExplicitGCInvokesConcurrent in the default JVM settings. This helps
avoid out of memory exceptions during allocation of native memory by direct buffers in some cases.</li>
<li>The overridden <code>handleError</code> method implementations have been removed from the following deprecated classes in
the <code>kafka.api</code> package: <code>FetchRequest</code>, <code>GroupCoordinatorRequest</code>, <code>OffsetCommitRequest</code>,
<code>OffsetFetchRequest</code>, <code>OffsetRequest</code>, <code>ProducerRequest</code>, and <code>TopicMetadataRequest</code>.
This was only intended for use on the broker, but it is no longer in use and the implementations have not been maintained.
A stub implementation has been retained for binary compatibility.</li>
<li>The Java clients and tools now accept any string as a client-id.</li>
<li>The deprecated tool <code>kafka-consumer-offset-checker.sh</code> has been removed. Use <code>kafka-consumer-groups.sh</code> to get consumer group details.</li>
<li>SimpleAclAuthorizer now logs access denials to the authorizer log by default.</li>
<li>Authentication failures are now reported to clients as one of the subclasses of <code>AuthenticationException</code>.
No retries will be performed if a client connection fails authentication.</li>
<li>Custom <code>SaslServer</code> implementations may throw <code>SaslAuthenticationException</code> to provide an error
message to return to clients indicating the reason for authentication failure. Implementors should take care not to include
any security-critical information in the exception message that should not be leaked to unauthenticated clients.</li>
<li>The <code>app-info</code> mbean registered with JMX to provide version and commit id will be deprecated and replaced with
metrics providing these attributes.</li>
<li>Kafka metrics may now contain non-numeric values. <code>org.apache.kafka.common.Metric#value()</code> has been deprecated and
will return <code>0.0</code> in such cases to minimise the probability of breaking users who read the value of every client
metric (via a <code>MetricsReporter</code> implementation or by calling the <code>metrics()</code> method).
<code>org.apache.kafka.common.Metric#metricValue()</code> can be used to retrieve numeric and non-numeric metric values.</li>
<li>Every Kafka rate metric now has a corresponding cumulative count metric with the suffix <code>-total</code>
to simplify downstream processing. For example, <code>records-consumed-rate</code> has a corresponding
metric named <code>records-consumed-total</code>.</li>
<li>Mx4j will only be enabled if the system property <code>kafka_mx4jenable</code> is set to <code>true</code>. Due to a logic
inversion bug, it was previously enabled by default and disabled if <code>kafka_mx4jenable</code> was set to <code>true</code>.</li>
<li>The package <code>org.apache.kafka.common.security.auth</code> in the clients jar has been made public and added to the javadocs.
Internal classes which had previously been located in this package have been moved elsewhere.</li>
<li>When using an Authorizer and a user doesn't have required permissions on a topic, the broker
will return TOPIC_AUTHORIZATION_FAILED errors to requests irrespective of topic existence on broker.
If the user have required permissions and the topic doesn't exists, then the UNKNOWN_TOPIC_OR_PARTITION
error code will be returned. </li>
<li>config/consumer.properties file updated to use new consumer config properties.</li>
</ul>
<h5><a id="upgrade_100_new_protocols" href="#upgrade_100_new_protocols">New Protocol Versions</a></h5>
<ul>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-112%3A+Handle+disk+failure+for+JBOD">KIP-112</a>: LeaderAndIsrRequest v1 introduces a partition-level <code>is_new</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-112%3A+Handle+disk+failure+for+JBOD">KIP-112</a>: UpdateMetadataRequest v4 introduces a partition-level <code>offline_replicas</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-112%3A+Handle+disk+failure+for+JBOD">KIP-112</a>: MetadataResponse v5 introduces a partition-level <code>offline_replicas</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-112%3A+Handle+disk+failure+for+JBOD">KIP-112</a>: ProduceResponse v4 introduces error code for KafkaStorageException. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-112%3A+Handle+disk+failure+for+JBOD">KIP-112</a>: FetchResponse v6 introduces error code for KafkaStorageException. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-152+-+Improve+diagnostics+for+SASL+authentication+failures">KIP-152</a>:
SaslAuthenticate request has been added to enable reporting of authentication failures. This request will
be used if the SaslHandshake request version is greater than 0. </li>
</ul>
<h5><a id="upgrade_100_streams_from_0110" href="#upgrade_100_streams_from_0110">Upgrading a 0.11.0 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.11.0 to 1.0 does not require a broker upgrade.
A Kafka Streams 1.0 application can connect to 0.11.0, 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though).
However, Kafka Streams 1.0 requires 0.10 message format or newer and does not work with older message formats. </li>
<li> If you are monitoring on streams metrics, you will need make some changes to the metrics names in your reporting and monitoring code, because the metrics sensor hierarchy was changed. </li>
<li> There are a few public APIs including <code>ProcessorContext#schedule()</code>, <code>Processor#punctuate()</code> and <code>KStreamBuilder</code>, <code>TopologyBuilder</code> are being deprecated by new APIs.
We recommend making corresponding code changes, which should be very minor since the new APIs look quite similar, when you upgrade.
<li> See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_100">Streams API changes in 1.0.0</a> for more details. </li>
</ul>
<h5><a id="upgrade_100_streams_from_0102" href="#upgrade_100_streams_from_0102">Upgrading a 0.10.2 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.2 to 1.0 does not require a broker upgrade.
A Kafka Streams 1.0 application can connect to 1.0, 0.11.0, 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though). </li>
<li> If you are monitoring on streams metrics, you will need make some changes to the metrics names in your reporting and monitoring code, because the metrics sensor hierarchy was changed. </li>
<li> There are a few public APIs including <code>ProcessorContext#schedule()</code>, <code>Processor#punctuate()</code> and <code>KStreamBuilder</code>, <code>TopologyBuilder</code> are being deprecated by new APIs.
We recommend making corresponding code changes, which should be very minor since the new APIs look quite similar, when you upgrade.
<li> If you specify customized <code>key.serde</code>, <code>value.serde</code> and <code>timestamp.extractor</code> in configs, it is recommended to use their replaced configure parameter as these configs are deprecated. </li>
<li> See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0110">Streams API changes in 0.11.0</a> for more details. </li>
</ul>
<h5><a id="upgrade_100_streams_from_0101" href="#upgrade_1100_streams_from_0101">Upgrading a 0.10.1 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.1 to 1.0 does not require a broker upgrade.
A Kafka Streams 1.0 application can connect to 1.0, 0.11.0, 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though). </li>
<li> You need to recompile your code. Just swapping the Kafka Streams library jar file will not work and will break your application. </li>
<li> If you are monitoring on streams metrics, you will need make some changes to the metrics names in your reporting and monitoring code, because the metrics sensor hierarchy was changed. </li>
<li> There are a few public APIs including <code>ProcessorContext#schedule()</code>, <code>Processor#punctuate()</code> and <code>KStreamBuilder</code>, <code>TopologyBuilder</code> are being deprecated by new APIs.
We recommend making corresponding code changes, which should be very minor since the new APIs look quite similar, when you upgrade.
<li> If you specify customized <code>key.serde</code>, <code>value.serde</code> and <code>timestamp.extractor</code> in configs, it is recommended to use their replaced configure parameter as these configs are deprecated. </li>
<li> If you use a custom (i.e., user implemented) timestamp extractor, you will need to update this code, because the <code>TimestampExtractor</code> interface was changed. </li>
<li> If you register custom metrics, you will need to update this code, because the <code>StreamsMetric</code> interface was changed. </li>
<li> See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_100">Streams API changes in 1.0.0</a>,
<a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0110">Streams API changes in 0.11.0</a> and
<a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0102">Streams API changes in 0.10.2</a> for more details. </li>
</ul>
<h5><a id="upgrade_100_streams_from_0100" href="#upgrade_100_streams_from_0100">Upgrading a 0.10.0 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.0 to 1.0 does require a <a href="#upgrade_10_1">broker upgrade</a> because a Kafka Streams 1.0 application can only connect to 0.1, 0.11.0, 0.10.2, or 0.10.1 brokers. </li>
<li> There are couple of API changes, that are not backward compatible (cf. <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_100">Streams API changes in 1.0.0</a>,
<a href="/{{version}}/documentation/streams#streams_api_changes_0110">Streams API changes in 0.11.0</a>,
<a href="/{{version}}/documentation/streams#streams_api_changes_0102">Streams API changes in 0.10.2</a>, and
<a href="/{{version}}/documentation/streams#streams_api_changes_0101">Streams API changes in 0.10.1</a> for more details).
Thus, you need to update and recompile your code. Just swapping the Kafka Streams library jar file will not work and will break your application. </li>
<li> Upgrading from 0.10.0.x to 1.0.2 requires two rolling bounces with config <code>upgrade.from="0.10.0"</code> set for first upgrade phase
(cf. <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-268%3A+Simplify+Kafka+Streams+Rebalance+Metadata+Upgrade">KIP-268</a>).
As an alternative, an offline upgrade is also possible.
<ul>
<li> prepare your application instances for a rolling bounce and make sure that config <code>upgrade.from</code> is set to <code>"0.10.0"</code> for new version 0.11.0.3 </li>
<li> bounce each instance of your application once </li>
<li> prepare your newly deployed 1.0.2 application instances for a second round of rolling bounces; make sure to remove the value for config <code>upgrade.mode</code> </li>
<li> bounce each instance of your application once more to complete the upgrade </li>
</ul>
</li>
<li> Upgrading from 0.10.0.x to 1.0.0 or 1.0.1 requires an offline upgrade (rolling bounce upgrade is not supported)
<ul>
<li> stop all old (0.10.0.x) application instances </li>
<li> update your code and swap old code and jar file with new code and new jar file </li>
<li> restart all new (1.0.0 or 1.0.1) application instances </li>
</ul>
</li>
</ul>
<h4><a id="upgrade_11_0_0" href="#upgrade_11_0_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x, 0.10.1.x or 0.10.2.x to 0.11.0.0</a></h4>
<p>Kafka 0.11.0.0 introduces a new message format version as well as wire protocol changes. By following the recommended rolling upgrade plan below,
you guarantee no downtime during the upgrade. However, please review the <a href="#upgrade_1100_notable">notable changes in 0.11.0.0</a> before upgrading.
</p>
<p>Starting with version 0.10.2, Java clients (producer and consumer) have acquired the ability to communicate with older brokers. Version 0.11.0
clients can talk to version 0.10.0 or newer brokers. However, if your brokers are older than 0.10.0, you must upgrade all the brokers in the
Kafka cluster before upgrading your clients. Version 0.11.0 brokers support 0.8.x and newer clients.
</p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties on all brokers and add the following properties. CURRENT_KAFKA_VERSION refers to the version you
are upgrading from. CURRENT_MESSAGE_FORMAT_VERSION refers to the current message format version currently in use. If you have
not overridden the message format previously, then CURRENT_MESSAGE_FORMAT_VERSION should be set to match CURRENT_KAFKA_VERSION.
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2, 0.9.0, 0.10.0, 0.10.1 or 0.10.2).</li>
<li>log.message.format.version=CURRENT_MESSAGE_FORMAT_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact
following the upgrade</a> for the details on what this configuration does.)</li>
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing <code>inter.broker.protocol.version</code> and setting it to 0.11.0, but
do not change <code>log.message.format.version</code> yet. </li>
<li> Restart the brokers one by one for the new protocol version to take effect. </li>
<li> Once all (or most) consumers have been upgraded to 0.11.0 or later, then change log.message.format.version to 0.11.0 on each
broker and restart them one by one. Note that the older Scala consumer does not support the new message format, so to avoid
the performance cost of down-conversion (or to take advantage of <a href="#upgrade_11_exactly_once_semantics">exactly once semantics</a>),
the new Java consumer must be used.</li>
</ol>
<p><b>Additional Upgrade Notes:</b></p>
<ol>
<li>If you are willing to accept downtime, you can simply take all the brokers down, update the code and start them back up. They will start
with the new protocol by default.</li>
<li>Bumping the protocol version and restarting can be done any time after the brokers are upgraded. It does not have to be immediately after.
Similarly for the message format version.</li>
<li>It is also possible to enable the 0.11.0 message format on individual topics using the topic admin tool (<code>bin/kafka-topics.sh</code>)
prior to updating the global setting <code>log.message.format.version</code>.</li>
<li>If you are upgrading from a version prior to 0.10.0, it is NOT necessary to first update the message format to 0.10.0
before you switch to 0.11.0.</li>
</ol>
<h5><a id="upgrade_1100_streams_from_0102" href="#upgrade_1100_streams_from_0102">Upgrading a 0.10.2 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.2 to 0.11.0 does not require a broker upgrade.
A Kafka Streams 0.11.0 application can connect to 0.11.0, 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though). </li>
<li> If you specify customized <code>key.serde</code>, <code>value.serde</code> and <code>timestamp.extractor</code> in configs, it is recommended to use their replaced configure parameter as these configs are deprecated. </li>
<li> See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0110">Streams API changes in 0.11.0</a> for more details. </li>
</ul>
<h5><a id="upgrade_1100_streams_from_0101" href="#upgrade_1100_streams_from_0101">Upgrading a 0.10.1 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.1 to 0.11.0 does not require a broker upgrade.
A Kafka Streams 0.11.0 application can connect to 0.11.0, 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though). </li>
<li> You need to recompile your code. Just swapping the Kafka Streams library jar file will not work and will break your application. </li>
<li> If you specify customized <code>key.serde</code>, <code>value.serde</code> and <code>timestamp.extractor</code> in configs, it is recommended to use their replaced configure parameter as these configs are deprecated. </li>
<li> If you use a custom (i.e., user implemented) timestamp extractor, you will need to update this code, because the <code>TimestampExtractor</code> interface was changed. </li>
<li> If you register custom metrics, you will need to update this code, because the <code>StreamsMetric</code> interface was changed. </li>
<li> See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0110">Streams API changes in 0.11.0</a> and
<a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0102">Streams API changes in 0.10.2</a> for more details. </li>
</ul>
<h5><a id="upgrade_1100_streams_from_0100" href="#upgrade_1100_streams_from_0100">Upgrading a 0.10.0 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.0 to 0.11.0 does require a <a href="#upgrade_10_1">broker upgrade</a> because a Kafka Streams 0.11.0 application can only connect to 0.11.0, 0.10.2, or 0.10.1 brokers. </li>
<li> There are couple of API changes, that are not backward compatible (cf. <a href="/{{version}}/documentation/streams#streams_api_changes_0110">Streams API changes in 0.11.0</a>,
<a href="/{{version}}/documentation/streams#streams_api_changes_0102">Streams API changes in 0.10.2</a>, and
<a href="/{{version}}/documentation/streams#streams_api_changes_0101">Streams API changes in 0.10.1</a> for more details).
Thus, you need to update and recompile your code. Just swapping the Kafka Streams library jar file will not work and will break your application. </li>
<li> Upgrading from 0.10.0.x to 0.11.0.3 requires two rolling bounces with config <code>upgrade.from="0.10.0"</code> set for first upgrade phase
(cf. <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-268%3A+Simplify+Kafka+Streams+Rebalance+Metadata+Upgrade">KIP-268</a>).
As an alternative, an offline upgrade is also possible.
<ul>
<li> prepare your application instances for a rolling bounce and make sure that config <code>upgrade.from</code> is set to <code>"0.10.0"</code> for new version 0.11.0.3 </li>
<li> bounce each instance of your application once </li>
<li> prepare your newly deployed 0.11.0.3 application instances for a second round of rolling bounces; make sure to remove the value for config <code>upgrade.mode</code> </li>
<li> bounce each instance of your application once more to complete the upgrade </li>
</ul>
</li>
<li> Upgrading from 0.10.0.x to 0.11.0.0, 0.11.0.1, or 0.11.0.2 requires an offline upgrade (rolling bounce upgrade is not supported)
<ul>
<li> stop all old (0.10.0.x) application instances </li>
<li> update your code and swap old code and jar file with new code and new jar file </li>
<li> restart all new (0.11.0.0 , 0.11.0.1, or 0.11.0.2) application instances </li>
</ul>
</li>
</ul>
<h5><a id="upgrade_1103_notable" href="#upgrade_1103_notable">Notable changes in 0.11.0.3</a></h5>
<ul>
<li> New Kafka Streams configuration parameter <code>upgrade.from</code> added that allows rolling bounce upgrade from version 0.10.0.x </li>
<li> See the <a href="/{{version}}/documentation/streams/upgrade-guide.html"><b>Kafka Streams upgrade guide</b></a> for details about this new config.
</ul>
<h5><a id="upgrade_1100_notable" href="#upgrade_1100_notable">Notable changes in 0.11.0.0</a></h5>
<ul>
<li>Unclean leader election is now disabled by default. The new default favors durability over availability. Users who wish to
to retain the previous behavior should set the broker config <code>unclean.leader.election.enable</code> to <code>true</code>.</li>
<li>Producer configs <code>block.on.buffer.full</code>, <code>metadata.fetch.timeout.ms</code> and <code>timeout.ms</code> have been
removed. They were initially deprecated in Kafka 0.9.0.0.</li>
<li>The <code>offsets.topic.replication.factor</code> broker config is now enforced upon auto topic creation. Internal
auto topic creation will fail with a GROUP_COORDINATOR_NOT_AVAILABLE error until the cluster size meets this
replication factor requirement.</li>
<li> When compressing data with snappy, the producer and broker will use the compression scheme's default block size (2 x 32 KB)
instead of 1 KB in order to improve the compression ratio. There have been reports of data compressed with the smaller
block size being 50% larger than when compressed with the larger block size. For the snappy case, a producer with 5000
partitions will require an additional 315 MB of JVM heap.</li>
<li> Similarly, when compressing data with gzip, the producer and broker will use 8 KB instead of 1 KB as the buffer size. The default
for gzip is excessively low (512 bytes). </li>
<li>The broker configuration <code>max.message.bytes</code> now applies to the total size of a batch of messages.
Previously the setting applied to batches of compressed messages, or to non-compressed messages individually.
A message batch may consist of only a single message, so in most cases, the limitation on the size of
individual messages is only reduced by the overhead of the batch format. However, there are some subtle implications
for message format conversion (see <a href="#upgrade_11_message_format">below</a> for more detail). Note also
that while previously the broker would ensure that at least one message is returned in each fetch request (regardless of the
total and partition-level fetch sizes), the same behavior now applies to one message batch.</li>
<li>GC log rotation is enabled by default, see KAFKA-3754 for details.</li>
<li>Deprecated constructors of RecordMetadata, MetricName and Cluster classes have been removed.</li>
<li>Added user headers support through a new Headers interface providing user headers read and write access.</li>
<li>ProducerRecord and ConsumerRecord expose the new Headers API via <code>Headers headers()</code> method call.</li>
<li>ExtendedSerializer and ExtendedDeserializer interfaces are introduced to support serialization and deserialization for headers. Headers will be ignored if the configured serializer and deserializer are not the above classes.</li>
<li>A new config, <code>group.initial.rebalance.delay.ms</code>, was introduced.
This config specifies the time, in milliseconds, that the <code>GroupCoordinator</code> will delay the initial consumer rebalance.
The rebalance will be further delayed by the value of <code>group.initial.rebalance.delay.ms</code> as new members join the group, up to a maximum of <code>max.poll.interval.ms</code>.
The default value for this is 3 seconds.
During development and testing it might be desirable to set this to 0 in order to not delay test execution time.
</li>
<li><code>org.apache.kafka.common.Cluster#partitionsForTopic</code>, <code>partitionsForNode</code> and <code>availablePartitionsForTopic</code> methods
will return an empty list instead of <code>null</code> (which is considered a bad practice) in case the metadata for the required topic does not exist.
</li>
<li>Streams API configuration parameters <code>timestamp.extractor</code>, <code>key.serde</code>, and <code>value.serde</code> were deprecated and
replaced by <code>default.timestamp.extractor</code>, <code>default.key.serde</code>, and <code>default.value.serde</code>, respectively.
</li>
<li>For offset commit failures in the Java consumer's <code>commitAsync</code> APIs, we no longer expose the underlying
cause when instances of <code>RetriableCommitFailedException</code> are passed to the commit callback. See
<a href="https://issues.apache.org/jira/browse/KAFKA-5052">KAFKA-5052</a> for more detail.
</li>
</ul>
<h5><a id="upgrade_1100_new_protocols" href="#upgrade_1100_new_protocols">New Protocol Versions</a></h5>
<ul>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-107%3A+Add+purgeDataBefore()+API+in+AdminClient">KIP-107</a>: FetchRequest v5 introduces a partition-level <code>log_start_offset</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-107%3A+Add+purgeDataBefore()+API+in+AdminClient">KIP-107</a>: FetchResponse v5 introduces a partition-level <code>log_start_offset</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-82+-+Add+Record+Headers">KIP-82</a>: ProduceRequest v3 introduces an array of <code>header</code> in the message protocol, containing <code>key</code> field and <code>value</code> field.</li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-82+-+Add+Record+Headers">KIP-82</a>: FetchResponse v5 introduces an array of <code>header</code> in the message protocol, containing <code>key</code> field and <code>value</code> field.</li>
</ul>
<h5><a id="upgrade_11_exactly_once_semantics" href="#upgrade_11_exactly_once_semantics">Notes on Exactly Once Semantics</a></h5>
<p>Kafka 0.11.0 includes support for idempotent and transactional capabilities in the producer. Idempotent delivery
ensures that messages are delivered exactly once to a particular topic partition during the lifetime of a single producer.
Transactional delivery allows producers to send data to multiple partitions such that either all messages are successfully
delivered, or none of them are. Together, these capabilities enable "exactly once semantics" in Kafka. More details on these
features are available in the user guide, but below we add a few specific notes on enabling them in an upgraded cluster.
Note that enabling EoS is not required and there is no impact on the broker's behavior if unused.</p>
<ol>
<li>Only the new Java producer and consumer support exactly once semantics.</li>
<li>These features depend crucially on the <a href="#upgrade_11_message_format">0.11.0 message format</a>. Attempting to use them
on an older format will result in unsupported version errors.</li>
<li>Transaction state is stored in a new internal topic <code>__transaction_state</code>. This topic is not created until the
the first attempt to use a transactional request API. Similar to the consumer offsets topic, there are several settings
to control the topic's configuration. For example, <code>transaction.state.log.min.isr</code> controls the minimum ISR for
this topic. See the configuration section in the user guide for a full list of options.</li>
<li>For secure clusters, the transactional APIs require new ACLs which can be turned on with the <code>bin/kafka-acls.sh</code>.
tool.</li>
<li>EoS in Kafka introduces new request APIs and modifies several existing ones. See
<a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-98+-+Exactly+Once+Delivery+and+Transactional+Messaging#KIP-98-ExactlyOnceDeliveryandTransactionalMessaging-RPCProtocolSummary">KIP-98</a>
for the full details</li>
</ol>
<h5><a id="upgrade_11_message_format" href="#upgrade_11_message_format">Notes on the new message format in 0.11.0</a></h5>
<p>The 0.11.0 message format includes several major enhancements in order to support better delivery semantics for the producer
(see <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-98+-+Exactly+Once+Delivery+and+Transactional+Messaging">KIP-98</a>)
and improved replication fault tolerance
(see <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-101+-+Alter+Replication+Protocol+to+use+Leader+Epoch+rather+than+High+Watermark+for+Truncation">KIP-101</a>).
Although the new format contains more information to make these improvements possible, we have made the batch format much
more efficient. As long as the number of messages per batch is more than 2, you can expect lower overall overhead. For smaller
batches, however, there may be a small performance impact. See <a href="bit.ly/kafka-eos-perf">here</a> for the results of our
initial performance analysis of the new message format. You can also find more detail on the message format in the
<a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-98+-+Exactly+Once+Delivery+and+Transactional+Messaging#KIP-98-ExactlyOnceDeliveryandTransactionalMessaging-MessageFormat">KIP-98</a> proposal.
</p>
<p>One of the notable differences in the new message format is that even uncompressed messages are stored together as a single batch.
This has a few implications for the broker configuration <code>max.message.bytes</code>, which limits the size of a single batch. First,
if an older client produces messages to a topic partition using the old format, and the messages are individually smaller than
<code>max.message.bytes</code>, the broker may still reject them after they are merged into a single batch during the up-conversion process.
Generally this can happen when the aggregate size of the individual messages is larger than <code>max.message.bytes</code>. There is a similar
effect for older consumers reading messages down-converted from the new format: if the fetch size is not set at least as large as
<code>max.message.bytes</code>, the consumer may not be able to make progress even if the individual uncompressed messages are smaller
than the configured fetch size. This behavior does not impact the Java client for 0.10.1.0 and later since it uses an updated fetch protocol
which ensures that at least one message can be returned even if it exceeds the fetch size. To get around these problems, you should ensure
1) that the producer's batch size is not set larger than <code>max.message.bytes</code>, and 2) that the consumer's fetch size is set at
least as large as <code>max.message.bytes</code>.
</p>
<p>Most of the discussion on the performance impact of <a href="#upgrade_10_performance_impact">upgrading to the 0.10.0 message format</a>
remains pertinent to the 0.11.0 upgrade. This mainly affects clusters that are not secured with TLS since "zero-copy" transfer
is already not possible in that case. In order to avoid the cost of down-conversion, you should ensure that consumer applications
are upgraded to the latest 0.11.0 client. Significantly, since the old consumer has been deprecated in 0.11.0.0, it does not support
the new message format. You must upgrade to use the new consumer to use the new message format without the cost of down-conversion.
Note that 0.11.0 consumers support backwards compatibility with 0.10.0 brokers and upward, so it is possible to upgrade the
clients first before the brokers.
</p>
<h4><a id="upgrade_10_2_0" href="#upgrade_10_2_0">Upgrading from 0.8.x, 0.9.x, 0.10.0.x or 0.10.1.x to 0.10.2.0</a></h4>
<p>0.10.2.0 has wire protocol changes. By following the recommended rolling upgrade plan below, you guarantee no downtime during the upgrade.
However, please review the <a href="#upgrade_1020_notable">notable changes in 0.10.2.0</a> before upgrading.
</p>
<p>Starting with version 0.10.2, Java clients (producer and consumer) have acquired the ability to communicate with older brokers. Version 0.10.2
clients can talk to version 0.10.0 or newer brokers. However, if your brokers are older than 0.10.0, you must upgrade all the brokers in the
Kafka cluster before upgrading your clients. Version 0.10.2 brokers support 0.8.x and newer clients.
</p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties file on all brokers and add the following properties:
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2, 0.9.0, 0.10.0 or 0.10.1).</li>
<li>log.message.format.version=CURRENT_KAFKA_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact following the upgrade</a> for the details on what this configuration does.)
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing inter.broker.protocol.version and setting it to 0.10.2. </li>
<li> If your previous message format is 0.10.0, change log.message.format.version to 0.10.2 (this is a no-op as the message format is the same for 0.10.0, 0.10.1 and 0.10.2).
If your previous message format version is lower than 0.10.0, do not change log.message.format.version yet - this parameter should only change once all consumers have been upgraded to 0.10.0.0 or later.</li>
<li> Restart the brokers one by one for the new protocol version to take effect. </li>
<li> If log.message.format.version is still lower than 0.10.0 at this point, wait until all consumers have been upgraded to 0.10.0 or later,
then change log.message.format.version to 0.10.2 on each broker and restart them one by one. </li>
</ol>
<p><b>Note:</b> If you are willing to accept downtime, you can simply take all the brokers down, update the code and start all of them. They will start with the new protocol by default.
<p><b>Note:</b> Bumping the protocol version and restarting can be done any time after the brokers were upgraded. It does not have to be immediately after.
<h5><a id="upgrade_1020_streams_from_0101" href="#upgrade_1020_streams_from_0101">Upgrading a 0.10.1 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.1 to 0.10.2 does not require a broker upgrade.
A Kafka Streams 0.10.2 application can connect to 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though). </li>
<li> You need to recompile your code. Just swapping the Kafka Streams library jar file will not work and will break your application. </li>
<li> If you use a custom (i.e., user implemented) timestamp extractor, you will need to update this code, because the <code>TimestampExtractor</code> interface was changed. </li>
<li> If you register custom metrics, you will need to update this code, because the <code>StreamsMetric</code> interface was changed. </li>
<li> See <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0102">Streams API changes in 0.10.2</a> for more details. </li>
</ul>
<h5><a id="upgrade_1020_streams_from_0100" href="#upgrade_1020_streams_from_0100">Upgrading a 0.10.0 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.0 to 0.10.2 does require a <a href="#upgrade_10_1">broker upgrade</a> because a Kafka Streams 0.10.2 application can only connect to 0.10.2 or 0.10.1 brokers. </li>
<li> There are couple of API changes, that are not backward compatible (cf. <a href="/{{version}}/documentation/streams#streams_api_changes_0102">Streams API changes in 0.10.2</a> for more details).
Thus, you need to update and recompile your code. Just swapping the Kafka Streams library jar file will not work and will break your application. </li>
<li> Upgrading from 0.10.0.x to 0.10.2.2 requires two rolling bounces with config <code>upgrade.from="0.10.0"</code> set for first upgrade phase
(cf. <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-268%3A+Simplify+Kafka+Streams+Rebalance+Metadata+Upgrade">KIP-268</a>).
As an alternative, an offline upgrade is also possible.
<ul>
<li> prepare your application instances for a rolling bounce and make sure that config <code>upgrade.from</code> is set to <code>"0.10.0"</code> for new version 0.10.2.2 </li>
<li> bounce each instance of your application once </li>
<li> prepare your newly deployed 0.10.2.2 application instances for a second round of rolling bounces; make sure to remove the value for config <code>upgrade.mode</code> </li>
<li> bounce each instance of your application once more to complete the upgrade </li>
</ul>
</li>
<li> Upgrading from 0.10.0.x to 0.10.2.0 or 0.10.2.1 requires an offline upgrade (rolling bounce upgrade is not supported)
<ul>
<li> stop all old (0.10.0.x) application instances </li>
<li> update your code and swap old code and jar file with new code and new jar file </li>
<li> restart all new (0.10.2.0 or 0.10.2.1) application instances </li>
</ul>
</li>
</ul>
<h5><a id="upgrade_10202_notable" href="#upgrade_10202_notable">Notable changes in 0.10.2.2</a></h5>
<ul>
<li> New configuration parameter <code>upgrade.from</code> added that allows rolling bounce upgrade from version 0.10.0.x </li>
</ul>
<h5><a id="upgrade_10201_notable" href="#upgrade_10201_notable">Notable changes in 0.10.2.1</a></h5>
<ul>
<li> The default values for two configurations of the StreamsConfig class were changed to improve the resiliency of Kafka Streams applications. The internal Kafka Streams producer <code>retries</code> default value was changed from 0 to 10. The internal Kafka Streams consumer <code>max.poll.interval.ms</code> default value was changed from 300000 to <code>Integer.MAX_VALUE</code>.
</li>
</ul>
<h5><a id="upgrade_1020_notable" href="#upgrade_1020_notable">Notable changes in 0.10.2.0</a></h5>
<ul>
<li>The Java clients (producer and consumer) have acquired the ability to communicate with older brokers. Version 0.10.2 clients
can talk to version 0.10.0 or newer brokers. Note that some features are not available or are limited when older brokers
are used. </li>
<li>Several methods on the Java consumer may now throw <code>InterruptException</code> if the calling thread is interrupted.
Please refer to the <code>KafkaConsumer</code> Javadoc for a more in-depth explanation of this change.</li>
<li>Java consumer now shuts down gracefully. By default, the consumer waits up to 30 seconds to complete pending requests.
A new close API with timeout has been added to <code>KafkaConsumer</code> to control the maximum wait time.</li>
<li>Multiple regular expressions separated by commas can be passed to MirrorMaker with the new Java consumer via the --whitelist option. This
makes the behaviour consistent with MirrorMaker when used the old Scala consumer.</li>
<li>Upgrading your Streams application from 0.10.1 to 0.10.2 does not require a broker upgrade.
A Kafka Streams 0.10.2 application can connect to 0.10.2 and 0.10.1 brokers (it is not possible to connect to 0.10.0 brokers though).</li>
<li>The Zookeeper dependency was removed from the Streams API. The Streams API now uses the Kafka protocol to manage internal topics instead of
modifying Zookeeper directly. This eliminates the need for privileges to access Zookeeper directly and "StreamsConfig.ZOOKEEPER_CONFIG"
should not be set in the Streams app any more. If the Kafka cluster is secured, Streams apps must have the required security privileges to create new topics.</li>
<li>Several new fields including "security.protocol", "connections.max.idle.ms", "retry.backoff.ms", "reconnect.backoff.ms" and "request.timeout.ms" were added to
StreamsConfig class. User should pay attention to the default values and set these if needed. For more details please refer to <a href="/{{version}}/documentation/#streamsconfigs">3.5 Kafka Streams Configs</a>.</li>
</ul>
<h5><a id="upgrade_1020_new_protocols" href="#upgrade_1020_new_protocols">New Protocol Versions</a></h5>
<ul>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-88%3A+OffsetFetch+Protocol+Update">KIP-88</a>: OffsetFetchRequest v2 supports retrieval of offsets for all topics if the <code>topics</code> array is set to <code>null</code>. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-88%3A+OffsetFetch+Protocol+Update">KIP-88</a>: OffsetFetchResponse v2 introduces a top-level <code>error_code</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-103%3A+Separation+of+Internal+and+External+traffic">KIP-103</a>: UpdateMetadataRequest v3 introduces a <code>listener_name</code> field to the elements of the <code>end_points</code> array. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-108%3A+Create+Topic+Policy">KIP-108</a>: CreateTopicsRequest v1 introduces a <code>validate_only</code> field. </li>
<li> <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-108%3A+Create+Topic+Policy">KIP-108</a>: CreateTopicsResponse v1 introduces an <code>error_message</code> field to the elements of the <code>topic_errors</code> array. </li>
</ul>
<h4><a id="upgrade_10_1" href="#upgrade_10_1">Upgrading from 0.8.x, 0.9.x or 0.10.0.X to 0.10.1.0</a></h4>
0.10.1.0 has wire protocol changes. By following the recommended rolling upgrade plan below, you guarantee no downtime during the upgrade.
However, please notice the <a href="#upgrade_10_1_breaking">Potential breaking changes in 0.10.1.0</a> before upgrade.
<br>
Note: Because new protocols are introduced, it is important to upgrade your Kafka clusters before upgrading your clients (i.e. 0.10.1.x clients
only support 0.10.1.x or later brokers while 0.10.1.x brokers also support older clients).
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties file on all brokers and add the following properties:
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2.0, 0.9.0.0 or 0.10.0.0).</li>
<li>log.message.format.version=CURRENT_KAFKA_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact following the upgrade</a> for the details on what this configuration does.)
</ul>
</li>
<li> Upgrade the brokers one at a time: shut down the broker, update the code, and restart it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing inter.broker.protocol.version and setting it to 0.10.1.0. </li>
<li> If your previous message format is 0.10.0, change log.message.format.version to 0.10.1 (this is a no-op as the message format is the same for both 0.10.0 and 0.10.1).
If your previous message format version is lower than 0.10.0, do not change log.message.format.version yet - this parameter should only change once all consumers have been upgraded to 0.10.0.0 or later.</li>
<li> Restart the brokers one by one for the new protocol version to take effect. </li>
<li> If log.message.format.version is still lower than 0.10.0 at this point, wait until all consumers have been upgraded to 0.10.0 or later,
then change log.message.format.version to 0.10.1 on each broker and restart them one by one. </li>
</ol>
<p><b>Note:</b> If you are willing to accept downtime, you can simply take all the brokers down, update the code and start all of them. They will start with the new protocol by default.
<p><b>Note:</b> Bumping the protocol version and restarting can be done any time after the brokers were upgraded. It does not have to be immediately after.
<!-- TODO: add when 0.10.1.2 is released
<h5><a id="upgrade_1012_notable" href="#upgrade_1012_notable">Notable changes in 0.10.1.2</a></h5>
<ul>
<li> New configuration parameter <code>upgrade.from</code> added that allows rolling bounce upgrade from version 0.10.0.x </li>
</ul>
-->
<h5><a id="upgrade_10_1_breaking" href="#upgrade_10_1_breaking">Potential breaking changes in 0.10.1.0</a></h5>
<ul>
<li> The log retention time is no longer based on last modified time of the log segments. Instead it will be based on the largest timestamp of the messages in a log segment.</li>
<li> The log rolling time is no longer depending on log segment create time. Instead it is now based on the timestamp in the messages. More specifically. if the timestamp of the first message in the segment is T, the log will be rolled out when a new message has a timestamp greater than or equal to T + log.roll.ms </li>
<li> The open file handlers of 0.10.0 will increase by ~33% because of the addition of time index files for each segment.</li>
<li> The time index and offset index share the same index size configuration. Since each time index entry is 1.5x the size of offset index entry. User may need to increase log.index.size.max.bytes to avoid potential frequent log rolling. </li>
<li> Due to the increased number of index files, on some brokers with large amount the log segments (e.g. >15K), the log loading process during the broker startup could be longer. Based on our experiment, setting the num.recovery.threads.per.data.dir to one may reduce the log loading time. </li>
</ul>
<h5><a id="upgrade_1010_streams_from_0100" href="#upgrade_1010_streams_from_0100">Upgrading a 0.10.0 Kafka Streams Application</a></h5>
<ul>
<li> Upgrading your Streams application from 0.10.0 to 0.10.1 does require a <a href="#upgrade_10_1">broker upgrade</a> because a Kafka Streams 0.10.1 application can only connect to 0.10.1 brokers. </li>
<li> There are couple of API changes, that are not backward compatible (cf. <a href="/{{version}}/documentation/streams/upgrade-guide#streams_api_changes_0101">Streams API changes in 0.10.1</a> for more details).
Thus, you need to update and recompile your code. Just swapping the Kafka Streams library jar file will not work and will break your application. </li>
<li> Upgrading from 0.10.0.x to 0.10.1.2 requires two rolling bounces with config <code>upgrade.from="0.10.0"</code> set for first upgrade phase
(cf. <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-268%3A+Simplify+Kafka+Streams+Rebalance+Metadata+Upgrade">KIP-268</a>).
As an alternative, an offline upgrade is also possible.
<ul>
<li> prepare your application instances for a rolling bounce and make sure that config <code>upgrade.from</code> is set to <code>"0.10.0"</code> for new version 0.10.1.2 </li>
<li> bounce each instance of your application once </li>
<li> prepare your newly deployed 0.10.1.2 application instances for a second round of rolling bounces; make sure to remove the value for config <code>upgrade.mode</code> </li>
<li> bounce each instance of your application once more to complete the upgrade </li>
</ul>
</li>
<li> Upgrading from 0.10.0.x to 0.10.1.0 or 0.10.1.1 requires an offline upgrade (rolling bounce upgrade is not supported)
<ul>
<li> stop all old (0.10.0.x) application instances </li>
<li> update your code and swap old code and jar file with new code and new jar file </li>
<li> restart all new (0.10.1.0 or 0.10.1.1) application instances </li>
</ul>
</li>
</ul>
<h5><a id="upgrade_1010_notable" href="#upgrade_1010_notable">Notable changes in 0.10.1.0</a></h5>
<ul>
<li> The new Java consumer is no longer in beta and we recommend it for all new development. The old Scala consumers are still supported, but they will be deprecated in the next release
and will be removed in a future major release. </li>
<li> The <code>--new-consumer</code>/<code>--new.consumer</code> switch is no longer required to use tools like MirrorMaker and the Console Consumer with the new consumer; one simply
needs to pass a Kafka broker to connect to instead of the ZooKeeper ensemble. In addition, usage of the Console Consumer with the old consumer has been deprecated and it will be
removed in a future major release. </li>
<li> Kafka clusters can now be uniquely identified by a cluster id. It will be automatically generated when a broker is upgraded to 0.10.1.0. The cluster id is available via the kafka.server:type=KafkaServer,name=ClusterId metric and it is part of the Metadata response. Serializers, client interceptors and metric reporters can receive the cluster id by implementing the ClusterResourceListener interface. </li>
<li> The BrokerState "RunningAsController" (value 4) has been removed. Due to a bug, a broker would only be in this state briefly before transitioning out of it and hence the impact of the removal should be minimal. The recommended way to detect if a given broker is the controller is via the kafka.controller:type=KafkaController,name=ActiveControllerCount metric. </li>
<li> The new Java Consumer now allows users to search offsets by timestamp on partitions. </li>
<li> The new Java Consumer now supports heartbeating from a background thread. There is a new configuration
<code>max.poll.interval.ms</code> which controls the maximum time between poll invocations before the consumer
will proactively leave the group (5 minutes by default). The value of the configuration
<code>request.timeout.ms</code> must always be larger than <code>max.poll.interval.ms</code> because this is the maximum
time that a JoinGroup request can block on the server while the consumer is rebalancing, so we have changed its default
value to just above 5 minutes. Finally, the default value of <code>session.timeout.ms</code> has been adjusted down to
10 seconds, and the default value of <code>max.poll.records</code> has been changed to 500.</li>
<li> When using an Authorizer and a user doesn't have <b>Describe</b> authorization on a topic, the broker will no
longer return TOPIC_AUTHORIZATION_FAILED errors to requests since this leaks topic names. Instead, the UNKNOWN_TOPIC_OR_PARTITION
error code will be returned. This may cause unexpected timeouts or delays when using the producer and consumer since
Kafka clients will typically retry automatically on unknown topic errors. You should consult the client logs if you
suspect this could be happening.</li>
<li> Fetch responses have a size limit by default (50 MB for consumers and 10 MB for replication). The existing per partition limits also apply (1 MB for consumers
and replication). Note that neither of these limits is an absolute maximum as explained in the next point. </li>
<li> Consumers and replicas can make progress if a message larger than the response/partition size limit is found. More concretely, if the first message in the
first non-empty partition of the fetch is larger than either or both limits, the message will still be returned. </li>
<li> Overloaded constructors were added to <code>kafka.api.FetchRequest</code> and <code>kafka.javaapi.FetchRequest</code> to allow the caller to specify the
order of the partitions (since order is significant in v3). The previously existing constructors were deprecated and the partitions are shuffled before
the request is sent to avoid starvation issues. </li>
</ul>
<h5><a id="upgrade_1010_new_protocols" href="#upgrade_1010_new_protocols">New Protocol Versions</a></h5>
<ul>
<li> ListOffsetRequest v1 supports accurate offset search based on timestamps. </li>
<li> MetadataResponse v2 introduces a new field: "cluster_id". </li>
<li> FetchRequest v3 supports limiting the response size (in addition to the existing per partition limit), it returns messages
bigger than the limits if required to make progress and the order of partitions in the request is now significant. </li>
<li> JoinGroup v1 introduces a new field: "rebalance_timeout". </li>
</ul>
<h4><a id="upgrade_10" href="#upgrade_10">Upgrading from 0.8.x or 0.9.x to 0.10.0.0</a></h4>
<p>
0.10.0.0 has <a href="#upgrade_10_breaking">potential breaking changes</a> (please review before upgrading) and possible <a href="#upgrade_10_performance_impact"> performance impact following the upgrade</a>. By following the recommended rolling upgrade plan below, you guarantee no downtime and no performance impact during and following the upgrade.
<br>
Note: Because new protocols are introduced, it is important to upgrade your Kafka clusters before upgrading your clients.
</p>
<p>
<b>Notes to clients with version 0.9.0.0: </b>Due to a bug introduced in 0.9.0.0,
clients that depend on ZooKeeper (old Scala high-level Consumer and MirrorMaker if used with the old consumer) will not
work with 0.10.0.x brokers. Therefore, 0.9.0.0 clients should be upgraded to 0.9.0.1 <b>before</b> brokers are upgraded to
0.10.0.x. This step is not necessary for 0.8.X or 0.9.0.1 clients.
</p>
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties file on all brokers and add the following properties:
<ul>
<li>inter.broker.protocol.version=CURRENT_KAFKA_VERSION (e.g. 0.8.2 or 0.9.0.0).</li>
<li>log.message.format.version=CURRENT_KAFKA_VERSION (See <a href="#upgrade_10_performance_impact">potential performance impact following the upgrade</a> for the details on what this configuration does.)
</ul>
</li>
<li> Upgrade the brokers. This can be done a broker at a time by simply bringing it down, updating the code, and restarting it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing inter.broker.protocol.version and setting it to 0.10.0.0. NOTE: You shouldn't touch log.message.format.version yet - this parameter should only change once all consumers have been upgraded to 0.10.0.0 </li>
<li> Restart the brokers one by one for the new protocol version to take effect. </li>
<li> Once all consumers have been upgraded to 0.10.0, change log.message.format.version to 0.10.0 on each broker and restart them one by one.
</li>
</ol>
<p><b>Note:</b> If you are willing to accept downtime, you can simply take all the brokers down, update the code and start all of them. They will start with the new protocol by default.
<p><b>Note:</b> Bumping the protocol version and restarting can be done any time after the brokers were upgraded. It does not have to be immediately after.
<h5><a id="upgrade_10_performance_impact" href="#upgrade_10_performance_impact">Potential performance impact following upgrade to 0.10.0.0</a></h5>
<p>
The message format in 0.10.0 includes a new timestamp field and uses relative offsets for compressed messages.
The on disk message format can be configured through log.message.format.version in the server.properties file.
The default on-disk message format is 0.10.0. If a consumer client is on a version before 0.10.0.0, it only understands
message formats before 0.10.0. In this case, the broker is able to convert messages from the 0.10.0 format to an earlier format
before sending the response to the consumer on an older version. However, the broker can't use zero-copy transfer in this case.
Reports from the Kafka community on the performance impact have shown CPU utilization going from 20% before to 100% after an upgrade, which forced an immediate upgrade of all clients to bring performance back to normal.
To avoid such message conversion before consumers are upgraded to 0.10.0.0, one can set log.message.format.version to 0.8.2 or 0.9.0 when upgrading the broker to 0.10.0.0. This way, the broker can still use zero-copy transfer to send the data to the old consumers. Once consumers are upgraded, one can change the message format to 0.10.0 on the broker and enjoy the new message format that includes new timestamp and improved compression.
The conversion is supported to ensure compatibility and can be useful to support a few apps that have not updated to newer clients yet, but is impractical to support all consumer traffic on even an overprovisioned cluster. Therefore, it is critical to avoid the message conversion as much as possible when brokers have been upgraded but the majority of clients have not.
</p>
<p>
For clients that are upgraded to 0.10.0.0, there is no performance impact.
</p>
<p>
<b>Note:</b> By setting the message format version, one certifies that all existing messages are on or below that
message format version. Otherwise consumers before 0.10.0.0 might break. In particular, after the message format
is set to 0.10.0, one should not change it back to an earlier format as it may break consumers on versions before 0.10.0.0.
</p>
<p>
<b>Note:</b> Due to the additional timestamp introduced in each message, producers sending small messages may see a
message throughput degradation because of the increased overhead.
Likewise, replication now transmits an additional 8 bytes per message.
If you're running close to the network capacity of your cluster, it's possible that you'll overwhelm the network cards
and see failures and performance issues due to the overload.
</p>
<b>Note:</b> If you have enabled compression on producers, you may notice reduced producer throughput and/or
lower compression rate on the broker in some cases. When receiving compressed messages, 0.10.0
brokers avoid recompressing the messages, which in general reduces the latency and improves the throughput. In
certain cases, however, this may reduce the batching size on the producer, which could lead to worse throughput. If this
happens, users can tune linger.ms and batch.size of the producer for better throughput. In addition, the producer buffer
used for compressing messages with snappy is smaller than the one used by the broker, which may have a negative
impact on the compression ratio for the messages on disk. We intend to make this configurable in a future Kafka
release.
<p>
</p>
<h5><a id="upgrade_10_breaking" href="#upgrade_10_breaking">Potential breaking changes in 0.10.0.0</a></h5>
<ul>
<li> Starting from Kafka 0.10.0.0, the message format version in Kafka is represented as the Kafka version. For example, message format 0.9.0 refers to the highest message version supported by Kafka 0.9.0. </li>
<li> Message format 0.10.0 has been introduced and it is used by default. It includes a timestamp field in the messages and relative offsets are used for compressed messages. </li>
<li> ProduceRequest/Response v2 has been introduced and it is used by default to support message format 0.10.0 </li>
<li> FetchRequest/Response v2 has been introduced and it is used by default to support message format 0.10.0 </li>
<li> MessageFormatter interface was changed from <code>def writeTo(key: Array[Byte], value: Array[Byte], output: PrintStream)</code> to
<code>def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream)</code> </li>
<li> MessageReader interface was changed from <code>def readMessage(): KeyedMessage[Array[Byte], Array[Byte]]</code> to
<code>def readMessage(): ProducerRecord[Array[Byte], Array[Byte]]</code> </li>
<li> MessageFormatter's package was changed from <code>kafka.tools</code> to <code>kafka.common</code> </li>
<li> MessageReader's package was changed from <code>kafka.tools</code> to <code>kafka.common</code> </li>
<li> MirrorMakerMessageHandler no longer exposes the <code>handle(record: MessageAndMetadata[Array[Byte], Array[Byte]])</code> method as it was never called. </li>
<li> The 0.7 KafkaMigrationTool is no longer packaged with Kafka. If you need to migrate from 0.7 to 0.10.0, please migrate to 0.8 first and then follow the documented upgrade process to upgrade from 0.8 to 0.10.0. </li>
<li> The new consumer has standardized its APIs to accept <code>java.util.Collection</code> as the sequence type for method parameters. Existing code may have to be updated to work with the 0.10.0 client library. </li>
<li> LZ4-compressed message handling was changed to use an interoperable framing specification (LZ4f v1.5.1).
To maintain compatibility with old clients, this change only applies to Message format 0.10.0 and later.
Clients that Produce/Fetch LZ4-compressed messages using v0/v1 (Message format 0.9.0) should continue
to use the 0.9.0 framing implementation. Clients that use Produce/Fetch protocols v2 or later
should use interoperable LZ4f framing. A list of interoperable LZ4 libraries is available at http://www.lz4.org/
</ul>
<h5><a id="upgrade_10_notable" href="#upgrade_10_notable">Notable changes in 0.10.0.0</a></h5>
<ul>
<li> Starting from Kafka 0.10.0.0, a new client library named <b>Kafka Streams</b> is available for stream processing on data stored in Kafka topics. This new client library only works with 0.10.x and upward versioned brokers due to message format changes mentioned above. For more information please read <a href="/{{version}}/documentation/streams">Streams documentation</a>.</li>
<li> The default value of the configuration parameter <code>receive.buffer.bytes</code> is now 64K for the new consumer.</li>
<li> The new consumer now exposes the configuration parameter <code>exclude.internal.topics</code> to restrict internal topics (such as the consumer offsets topic) from accidentally being included in regular expression subscriptions. By default, it is enabled.</li>
<li> The old Scala producer has been deprecated. Users should migrate their code to the Java producer included in the kafka-clients JAR as soon as possible. </li>
<li> The new consumer API has been marked stable. </li>
</ul>
<h4><a id="upgrade_9" href="#upgrade_9">Upgrading from 0.8.0, 0.8.1.X, or 0.8.2.X to 0.9.0.0</a></h4>
0.9.0.0 has <a href="#upgrade_9_breaking">potential breaking changes</a> (please review before upgrading) and an inter-broker protocol change from previous versions. This means that upgraded brokers and clients may not be compatible with older versions. It is important that you upgrade your Kafka cluster before upgrading your clients. If you are using MirrorMaker downstream clusters should be upgraded first as well.
<p><b>For a rolling upgrade:</b></p>
<ol>
<li> Update server.properties file on all brokers and add the following property: inter.broker.protocol.version=0.8.2.X </li>
<li> Upgrade the brokers. This can be done a broker at a time by simply bringing it down, updating the code, and restarting it. </li>
<li> Once the entire cluster is upgraded, bump the protocol version by editing inter.broker.protocol.version and setting it to 0.9.0.0.</li>
<li> Restart the brokers one by one for the new protocol version to take effect </li>
</ol>
<p><b>Note:</b> If you are willing to accept downtime, you can simply take all the brokers down, update the code and start all of them. They will start with the new protocol by default.
<p><b>Note:</b> Bumping the protocol version and restarting can be done any time after the brokers were upgraded. It does not have to be immediately after.
<h5><a id="upgrade_9_breaking" href="#upgrade_9_breaking">Potential breaking changes in 0.9.0.0</a></h5>
<ul>
<li> Java 1.6 is no longer supported. </li>
<li> Scala 2.9 is no longer supported. </li>
<li> Broker IDs above 1000 are now reserved by default to automatically assigned broker IDs. If your cluster has existing broker IDs above that threshold make sure to increase the reserved.broker.max.id broker configuration property accordingly. </li>
<li> Configuration parameter replica.lag.max.messages was removed. Partition leaders will no longer consider the number of lagging messages when deciding which replicas are in sync. </li>
<li> Configuration parameter replica.lag.time.max.ms now refers not just to the time passed since last fetch request from replica, but also to time since the replica last caught up. Replicas that are still fetching messages from leaders but did not catch up to the latest messages in replica.lag.time.max.ms will be considered out of sync. </li>
<li> Compacted topics no longer accept messages without key and an exception is thrown by the producer if this is attempted. In 0.8.x, a message without key would cause the log compaction thread to subsequently complain and quit (and stop compacting all compacted topics). </li>
<li> MirrorMaker no longer supports multiple target clusters. As a result it will only accept a single --consumer.config parameter. To mirror multiple source clusters, you will need at least one MirrorMaker instance per source cluster, each with its own consumer configuration. </li>
<li> Tools packaged under <em>org.apache.kafka.clients.tools.*</em> have been moved to <em>org.apache.kafka.tools.*</em>. All included scripts will still function as usual, only custom code directly importing these classes will be affected. </li>
<li> The default Kafka JVM performance options (KAFKA_JVM_PERFORMANCE_OPTS) have been changed in kafka-run-class.sh. </li>
<li> The kafka-topics.sh script (kafka.admin.TopicCommand) now exits with non-zero exit code on failure. </li>
<li> The kafka-topics.sh script (kafka.admin.TopicCommand) will now print a warning when topic names risk metric collisions due to the use of a '.' or '_' in the topic name, and error in the case of an actual collision. </li>
<li> The kafka-console-producer.sh script (kafka.tools.ConsoleProducer) will use the Java producer instead of the old Scala producer be default, and users have to specify 'old-producer' to use the old producer. </li>
<li> By default, all command line tools will print all logging messages to stderr instead of stdout. </li>
</ul>
<h5><a id="upgrade_901_notable" href="#upgrade_901_notable">Notable changes in 0.9.0.1</a></h5>
<ul>
<li> The new broker id generation feature can be disabled by setting broker.id.generation.enable to false. </li>
<li> Configuration parameter log.cleaner.enable is now true by default. This means topics with a cleanup.policy=compact will now be compacted by default, and 128 MB of heap will be allocated to the cleaner process via log.cleaner.dedupe.buffer.size. You may want to review log.cleaner.dedupe.buffer.size and the other log.cleaner configuration values based on your usage of compacted topics. </li>
<li> Default value of configuration parameter fetch.min.bytes for the new consumer is now 1 by default. </li>
</ul>
<h5>Deprecations in 0.9.0.0</h5>
<ul>
<li> Altering topic configuration from the kafka-topics.sh script (kafka.admin.TopicCommand) has been deprecated. Going forward, please use the kafka-configs.sh script (kafka.admin.ConfigCommand) for this functionality. </li>
<li> The kafka-consumer-offset-checker.sh (kafka.tools.ConsumerOffsetChecker) has been deprecated. Going forward, please use kafka-consumer-groups.sh (kafka.admin.ConsumerGroupCommand) for this functionality. </li>
<li> The kafka.tools.ProducerPerformance class has been deprecated. Going forward, please use org.apache.kafka.tools.ProducerPerformance for this functionality (kafka-producer-perf-test.sh will also be changed to use the new class). </li>
<li> The producer config block.on.buffer.full has been deprecated and will be removed in future release. Currently its default value has been changed to false. The KafkaProducer will no longer throw BufferExhaustedException but instead will use max.block.ms value to block, after which it will throw a TimeoutException. If block.on.buffer.full property is set to true explicitly, it will set the max.block.ms to Long.MAX_VALUE and metadata.fetch.timeout.ms will not be honoured</li>
</ul>
<h4><a id="upgrade_82" href="#upgrade_82">Upgrading from 0.8.1 to 0.8.2</a></h4>
0.8.2 is fully compatible with 0.8.1. The upgrade can be done one broker at a time by simply bringing it down, updating the code, and restarting it.
<h4><a id="upgrade_81" href="#upgrade_81">Upgrading from 0.8.0 to 0.8.1</a></h4>
0.8.1 is fully compatible with 0.8. The upgrade can be done one broker at a time by simply bringing it down, updating the code, and restarting it.
<h4><a id="upgrade_7" href="#upgrade_7">Upgrading from 0.7</a></h4>
Release 0.7 is incompatible with newer releases. Major changes were made to the API, ZooKeeper data structures, and protocol, and configuration in order to add replication (Which was missing in 0.7). The upgrade from 0.7 to later versions requires a <a href="https://cwiki.apache.org/confluence/display/KAFKA/Migrating+from+0.7+to+0.8">special tool</a> for migration. This migration can be done without downtime.
</script>
<div class="p-upgrade"></div>
| {'content_hash': '26e52a6688bf81d235e89ff49ee99b2c', 'timestamp': '', 'source': 'github', 'line_count': 1173, 'max_line_length': 634, 'avg_line_length': 95.04177323103154, 'alnum_prop': 0.7356750744501453, 'repo_name': 'KevinLiLu/kafka', 'id': '4668e6380edc6416513129d660277ded7f51463c', 'size': '111484', 'binary': False, 'copies': '1', 'ref': 'refs/heads/trunk', 'path': 'docs/upgrade.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '29277'}, {'name': 'Dockerfile', 'bytes': '6044'}, {'name': 'HTML', 'bytes': '3739'}, {'name': 'Java', 'bytes': '17318873'}, {'name': 'Python', 'bytes': '851864'}, {'name': 'Scala', 'bytes': '6380036'}, {'name': 'Shell', 'bytes': '91444'}, {'name': 'XSLT', 'bytes': '7116'}]} |
using System;
using static System.Configuration.ConfigurationManager;
namespace Chalmers.ILL.Models
{
public class InstanceBasic
{
public string Title { get; set; }
public string Source { get; set; } = "FOLIO";
public string InstanceTypeId { get; set; } = AppSettings["instanceResourceTypeId"];
public bool DiscoverySuppress { get; set; } = true;
public string StatusId { get; set; } = AppSettings["instanceStatusId"];
public string ModeOfIssuanceId { get; set; } = AppSettings["instanceModesOfIssuance"];
public Identifier[] Identifiers { get; set; }
public string[] StatisticalCodeIds { get; set; } = new string[]
{
AppSettings["chillinStatisticalCodeId"]
};
public InstanceBasic(string title, string orderId)
{
if (string.IsNullOrEmpty(title))
{
throw new ArgumentNullException(nameof(title));
}
if (string.IsNullOrEmpty(orderId))
{
throw new ArgumentNullException(nameof(orderId));
}
Title = title;
Identifiers = new Identifier[]
{
new Identifier
{
Value = orderId,
IdentifierTypeId = AppSettings["instanceIdentifierTypeId"]
}
};
}
}
} | {'content_hash': '0df3b50522db6de3349832f5d66776e8', 'timestamp': '', 'source': 'github', 'line_count': 41, 'max_line_length': 94, 'avg_line_length': 34.68292682926829, 'alnum_prop': 0.5541490857946554, 'repo_name': 'ChalmersLibrary/Chillin', 'id': '58d027d0c1086b09fe63dadaa5dcb530bf7e96e1', 'size': '1424', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Chalmers.ILL/Models/InstanceBasic.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ASP.NET', 'bytes': '106'}, {'name': 'C#', 'bytes': '601699'}, {'name': 'CSS', 'bytes': '7348'}, {'name': 'HTML', 'bytes': '224013'}, {'name': 'JavaScript', 'bytes': '52001'}, {'name': 'Perl', 'bytes': '1964'}]} |
<?php
namespace backend\models\search;
use Yii;
use yii\base\Model;
use yii\data\ActiveDataProvider;
use common\models\Hairdresser;
/**
* HairdresserSearch represents the model behind the search form about `common\models\Hairdresser`.
*/
class HairdresserSearch extends Hairdresser
{
/**
* @inheritdoc
*/
public function rules()
{
return [
[['id', 'status'], 'integer'],
[['name', 'logo_img', 'address', 'description', 'working_time', 'delivery', 'phone_1', 'phone_2', 'phone_3', 'slug', 'lat', 'lng'], 'safe'],
];
}
/**
* @inheritdoc
*/
public function scenarios()
{
// bypass scenarios() implementation in the parent class
return Model::scenarios();
}
/**
* Creates data provider instance with search query applied
*
* @param array $params
*
* @return ActiveDataProvider
*/
public function search($params)
{
$query = Hairdresser::find();
$dataProvider = new ActiveDataProvider([
'query' => $query,
]);
if (!($this->load($params) && $this->validate())) {
return $dataProvider;
}
$query->andFilterWhere([
'id' => $this->id,
'status' => $this->status,
]);
$query->andFilterWhere(['like', 'name', $this->name])
->andFilterWhere(['like', 'logo_img', $this->logo_img])
->andFilterWhere(['like', 'address', $this->address])
->andFilterWhere(['like', 'description', $this->description])
->andFilterWhere(['like', 'working_time', $this->working_time])
->andFilterWhere(['like', 'delivery', $this->delivery])
->andFilterWhere(['like', 'phone_1', $this->phone_1])
->andFilterWhere(['like', 'phone_2', $this->phone_2])
->andFilterWhere(['like', 'phone_3', $this->phone_3])
->andFilterWhere(['like', 'slug', $this->slug])
->andFilterWhere(['like', 'lat', $this->lat])
->andFilterWhere(['like', 'lng', $this->lng]);
return $dataProvider;
}
}
| {'content_hash': '2f9b6af7672bc683db481f16f1e7a4ca', 'timestamp': '', 'source': 'github', 'line_count': 74, 'max_line_length': 152, 'avg_line_length': 29.027027027027028, 'alnum_prop': 0.5409683426443203, 'repo_name': 'kartrez/pitomec', 'id': 'a601db56a7973c9f71a7a7297796a4796ef6a56f', 'size': '2148', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'backend/models/search/HairdresserSearch.php', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'ApacheConf', 'bytes': '224'}, {'name': 'Batchfile', 'bytes': '2069'}, {'name': 'CSS', 'bytes': '107813'}, {'name': 'JavaScript', 'bytes': '5739'}, {'name': 'PHP', 'bytes': '681238'}]} |
using Microsoft.EntityFrameworkCore;
using Piranha.Data;
using System;
using System.Data;
using System.Linq;
namespace Piranha.Repositories
{
public class ParamRepository : BaseRepository<Param>, IParamRepository
{
/// <summary>
/// Default constructor.
/// </summary>
/// <param name="db">The current db context</param>
/// <param name="cache">The optional model cache</param>
public ParamRepository(IDb db, ICache cache = null)
: base(db, cache) { }
/// <summary>
/// Gets the model with the given key.
/// </summary>
/// <param name="key">The unique key</param>
/// <returns>The model</returns>
public Param GetByKey(string key) {
var id = cache != null ? cache.Get<Guid?>($"ParamKey_{key}") : null;
Param model = null;
if (id.HasValue) {
model = GetById(id.Value);
} else {
model = db.Params
.AsNoTracking()
.FirstOrDefault(p => p.Key == key);
if (cache != null && model != null)
AddToCache(model);
}
return model;
}
#region Protected methods
/// <summary>
/// Adds a new model to the database.
/// </summary>
/// <param name="model">The model</param>
protected override void Add(Param model) {
PrepareInsert(model);
db.Params.Add(model);
}
/// <summary>
/// Updates the given model in the database.
/// </summary>
/// <param name="model">The model</param>
protected override void Update(Param model) {
PrepareUpdate(model);
var param = db.Params.FirstOrDefault(p => p.Id == model.Id);
if (param != null) {
App.Mapper.Map<Param, Param>(model, param);
}
}
/// <summary>
/// Adds the given model to cache.
/// </summary>
/// <param name="model">The model</param>
protected override void AddToCache(Param model) {
cache.Set(model.Id.ToString(), model);
cache.Set($"ParamKey_{model.Key}", model.Id);
}
#endregion
}
}
| {'content_hash': '65b3e5bd6916aa78a2b1fd8f23479dbe', 'timestamp': '', 'source': 'github', 'line_count': 77, 'max_line_length': 80, 'avg_line_length': 31.0, 'alnum_prop': 0.4964390448261416, 'repo_name': 'GyreTechnologies/piranha.core', 'id': '0cbef96c02a97a3646772d8630d842930563386c', 'size': '2613', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'core/Piranha/Repositories/ParamRepository.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Batchfile', 'bytes': '138'}, {'name': 'C#', 'bytes': '480277'}, {'name': 'CSS', 'bytes': '232805'}, {'name': 'JavaScript', 'bytes': '512439'}, {'name': 'Shell', 'bytes': '933'}]} |
#ifndef SkCGUtils_DEFINED
# define SkCGUtils_DEFINED
# include "include/core/SkImage.h"
# include "include/core/SkImageInfo.h"
# include "include/core/SkPixmap.h"
# include "include/core/SkSize.h"
# if defined(SK_BUILD_FOR_MAC) || defined(SK_BUILD_FOR_IOS)
# ifdef SK_BUILD_FOR_MAC
# include <ApplicationServices/ApplicationServices.h>
# endif
# ifdef SK_BUILD_FOR_IOS
# include <CoreGraphics/CoreGraphics.h>
# endif
class SkBitmap;
class SkData;
class SkPixmap;
class SkStreamRewindable;
SK_API CGContextRef SkCreateCGContext(const SkPixmap&);
/**
* Given a CGImage, allocate an SkBitmap and copy the image's pixels into it. If scaleToFit is not
* null, use it to determine the size of the bitmap, and scale the image to fill the bitmap.
* Otherwise use the image's width/height.
*
* On failure, return false, and leave bitmap unchanged.
*/
SK_API bool SkCreateBitmapFromCGImage(SkBitmap* dst, CGImageRef src);
SK_API sk_sp<SkImage> SkMakeImageFromCGImage(CGImageRef);
/**
* Copy the pixels from src into the memory specified by info/rowBytes/dstPixels. On failure,
* return false (e.g. ImageInfo incompatible with src).
*/
SK_API bool SkCopyPixelsFromCGImage(const SkImageInfo& info, size_t rowBytes, void* dstPixels, CGImageRef src);
static bool SkCopyPixelsFromCGImage(const SkPixmap& dst, CGImageRef src)
{
return SkCopyPixelsFromCGImage(dst.info(), dst.rowBytes(), dst.writable_addr(), src);
}
/**
* Create an imageref from the specified bitmap using the specified colorspace.
* If space is NULL, then CGColorSpaceCreateDeviceRGB() is used.
*/
SK_API CGImageRef SkCreateCGImageRefWithColorspace(const SkBitmap& bm, CGColorSpaceRef space);
/**
* Create an imageref from the specified bitmap using the colorspace returned
* by CGColorSpaceCreateDeviceRGB()
*/
static CGImageRef SkCreateCGImageRef(const SkBitmap& bm)
{
return SkCreateCGImageRefWithColorspace(bm, NULL);
}
/**
* Draw the bitmap into the specified CG context. The bitmap will be converted
* to a CGImage using the generic RGB colorspace. (x,y) specifies the position
* of the top-left corner of the bitmap. The bitmap is converted using the
* colorspace returned by CGColorSpaceCreateDeviceRGB()
*/
void SkCGDrawBitmap(CGContextRef, const SkBitmap&, float x, float y);
/**
* Return a provider that wraps the specified stream.
* When the provider is finally deleted, it will delete the stream.
*/
CGDataProviderRef SkCreateDataProviderFromStream(std::unique_ptr<SkStreamRewindable>);
CGDataProviderRef SkCreateDataProviderFromData(sk_sp<SkData>);
# endif
#endif
| {'content_hash': '01bf0c5476881956f02bf20271a57460', 'timestamp': '', 'source': 'github', 'line_count': 65, 'max_line_length': 111, 'avg_line_length': 40.06153846153846, 'alnum_prop': 0.7619047619047619, 'repo_name': 'satya-das/cppparser', 'id': '18d2e5c762c12ad8ff632f971429b4c7e139f3f7', 'size': '2747', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'test/e2e/test_master/skia/include/utils/mac/SkCGUtils.h', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '3858548'}, {'name': 'C++', 'bytes': '40366039'}, {'name': 'CMake', 'bytes': '5653'}, {'name': 'Lex', 'bytes': '39563'}, {'name': 'Objective-C', 'bytes': '10345580'}, {'name': 'Shell', 'bytes': '1365'}, {'name': 'Yacc', 'bytes': '103019'}]} |
/*!
* angular-ui-uploader
* https://github.com/angular-ui/ui-uploader
* Version: 1.1.2 - 2015-10-27T03:09:52.784Z
* License: MIT
*/
(function () {
'use strict';
/*
* Author: Remy Alain Ticona Carbajal http://realtica.org
* Description: The main objective of ng-uploader is to have a user control,
* clean, simple, customizable, and above all very easy to implement.
* Licence: MIT
*/
angular.module('ui.uploader', []).service('uiUploader', uiUploader);
uiUploader.$inject = ['$log'];
function uiUploader($log) {
/*jshint validthis: true */
var self = this;
self.files = [];
self.options = {};
self.activeUploads = 0;
self.uploadedFiles = 0;
$log.info('uiUploader loaded');
function addFiles(files) {
for (var i = 0; i < files.length; i++) {
self.files.push(files[i]);
}
}
function getFiles() {
return self.files;
}
function startUpload(options) {
self.options = options;
for (var i = 0; i < self.files.length; i++) {
if (self.activeUploads == self.options.concurrency) {
break;
}
if (self.files[i].active)
continue;
ajaxUpload(self.files[i], self.options.url, self.options.data);
}
}
function removeFile(file) {
self.files.splice(self.files.indexOf(file), 1);
}
function removeAll() {
self.files.splice(0, self.files.length);
}
return {
addFiles: addFiles,
getFiles: getFiles,
files: self.files,
startUpload: startUpload,
removeFile: removeFile,
removeAll: removeAll
};
function getHumanSize(bytes) {
var sizes = ['n/a', 'bytes', 'KiB', 'MiB', 'GiB', 'TB', 'PB', 'EiB', 'ZiB', 'YiB'];
var i = (bytes === 0) ? 0 : +Math.floor(Math.log(bytes) / Math.log(1024));
return (bytes / Math.pow(1024, i)).toFixed(i ? 1 : 0) + ' ' + sizes[isNaN(bytes) ? 0 : i + 1];
}
function isFunction(entity) {
return typeof(entity) === typeof(Function);
}
function ajaxUpload(file, url, data) {
var xhr, formData, prop, key = '' || 'file';
data = data || {};
self.activeUploads += 1;
file.active = true;
xhr = new window.XMLHttpRequest();
// To account for sites that may require CORS
if (data.withCredentials === true) {
xhr.withCredentials = true;
}
formData = new window.FormData();
xhr.open('POST', url);
// Triggered when upload starts:
xhr.upload.onloadstart = function() {
};
// Triggered many times during upload:
xhr.upload.onprogress = function(event) {
if (!event.lengthComputable) {
return;
}
// Update file size because it might be bigger than reported by
// the fileSize:
//$log.info("progres..");
//console.info(event.loaded);
file.loaded = event.loaded;
file.humanSize = getHumanSize(event.loaded);
if (isFunction(self.options.onProgress)) {
self.options.onProgress(file);
}
};
// Triggered when upload is completed:
xhr.onload = function() {
self.activeUploads -= 1;
self.uploadedFiles += 1;
startUpload(self.options);
if (isFunction(self.options.onCompleted)) {
self.options.onCompleted(file, xhr.responseText, xhr.status);
}
if (self.uploadedFiles === self.files.length) {
self.uploadedFiles = 0;
if (isFunction(self.options.onCompletedAll)) {
self.options.onCompletedAll(self.files);
}
}
};
// Triggered when upload fails:
xhr.onerror = function(e) {
if (isFunction(self.options.onError)) {
self.options.onError(e);
}
};
// Append additional data if provided:
if (data) {
for (prop in data) {
if (data.hasOwnProperty(prop)) {
formData.append(prop, data[prop]);
}
}
}
// Append file data:
formData.append(key, file, file.name);
// Initiate upload:
xhr.send(formData);
return xhr;
}
}
}()); | {'content_hash': 'cd0b92f9835599885ad0455d2385a434', 'timestamp': '', 'source': 'github', 'line_count': 160, 'max_line_length': 102, 'avg_line_length': 27.875, 'alnum_prop': 0.5300448430493273, 'repo_name': 'kenectin215/kenectin', 'id': '9eea84b3ecae19643b6b7b2e66dc9579cd526a7f', 'size': '4460', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'public/bower_components/angular-ui-uploader/dist/uploader.js', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '2347649'}, {'name': 'HTML', 'bytes': '1363834'}, {'name': 'JavaScript', 'bytes': '604367'}, {'name': 'PHP', 'bytes': '434'}]} |
package com.workplacesystems.queuj.process.java;
import com.workplacesystems.queuj.process.ProcessWrapper;
import com.workplacesystems.queuj.process.BatchProcessServer;
import com.workplacesystems.queuj.process.ForceProcessComplete;
import com.workplacesystems.queuj.process.ForceRescheduleException;
import com.workplacesystems.queuj.process.ProcessOutputable;
import com.workplacesystems.queuj.utils.QueujException;
public class JavaProcessServer extends BatchProcessServer {
public final static String JAVA_PROCESS_SESSION = "java_process_session";
@Override
protected boolean hasMoreSections(ProcessWrapper process, boolean firstSection) {
return getJavaProcessSession(process).hasMoreSections();
}
@Override
protected boolean hasFailureSection(ProcessWrapper process) {
return getJavaProcessSession(process).hasFailureSection();
}
@Override
protected void resetSection(ProcessWrapper process, boolean previousRunFailed) {
if (!previousRunFailed)
getJavaProcessSession(process).resetCurrentSection();
}
@Override
protected Integer runProcess(ProcessWrapper process, boolean failureRun) {
try
{
JavaProcessSession<JavaProcessSection> jps = getJavaProcessSession(process);
jps.clearRollbackSection();
ProcessOutputable output = process.getProcessOutputable();
if (failureRun)
jps.getFailureSection().invokeSection(jps, output);
else
{
Integer result_code = jps.getCurrentSection().invokeSection(jps, output);
// If section completed succesfully so increment current section
if (result_code == null || result_code.equals(BatchProcessServer.SUCCESS))
incrementCurrentSection(jps);
// if a non-null result was returned, use that (ignore the cache thing below!)
if (result_code != null)
return result_code;
}
}
// Allow ProcessImpl exceptions to be rethrown
catch (ForceProcessComplete fpc)
{
throw fpc;
}
catch (ForceRescheduleException fre)
{
throw fre;
}
catch (Exception e)
{
// any exception puts into error state
new QueujException(e);
return new Integer(1);
}
// default to ok
return new Integer(0);
}
protected void incrementCurrentSection(JavaProcessSession jps) {
// by default, simple increment in JavaProcessSession
jps.incrementCurrentSection();
}
@Override
protected void handleCustomRollback(ProcessWrapper process) {
JavaProcessSession jps = getJavaProcessSession(process);
jps.handleRollback();
}
private JavaProcessSession<JavaProcessSection> getJavaProcessSession(ProcessWrapper process) {
return (JavaProcessSession<JavaProcessSection>)getParameter(process, JAVA_PROCESS_SESSION);
}
}
| {'content_hash': 'fe76a3434b541e2d504c6e59cb90f800', 'timestamp': '', 'source': 'github', 'line_count': 90, 'max_line_length': 99, 'avg_line_length': 35.17777777777778, 'alnum_prop': 0.6525584333543903, 'repo_name': 'workplacesystems/queuj', 'id': '038467ed93c7170ef36ebb711c1c617b8e80ac02', 'size': '3816', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/workplacesystems/queuj/process/java/JavaProcessServer.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '388095'}]} |
package com.plumdo.common.exception;
/**
* 禁止异常
*
* @author wengwenhui
* @date 2018年4月2日
*/
class ForbiddenException extends BaseException {
private static final long serialVersionUID = 1L;
ForbiddenException(String ret, String msg) {
super(ret, msg);
}
}
| {'content_hash': 'c48cd7e45cabeed95225cabe30bf6aa2', 'timestamp': '', 'source': 'github', 'line_count': 17, 'max_line_length': 52, 'avg_line_length': 16.764705882352942, 'alnum_prop': 0.6807017543859649, 'repo_name': 'wengwh/plumdo-work', 'id': '3c154e6fd33af0efdecd97dc5464c83337f681b7', 'size': '299', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'java/common-module/src/main/java/com/plumdo/common/exception/ForbiddenException.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '309079'}, {'name': 'Dockerfile', 'bytes': '359'}, {'name': 'HTML', 'bytes': '593068'}, {'name': 'Java', 'bytes': '1844178'}, {'name': 'SCSS', 'bytes': '262034'}, {'name': 'TypeScript', 'bytes': '23455'}]} |
package view;
import applist.App;
import javafx.application.Platform;
import javafx.geometry.Pos;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.ListView;
import javafx.scene.control.ProgressBar;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Priority;
import java.util.ArrayList;
import java.util.List;
public class DownloadQueueEntryView extends AnchorPane implements HidableProgressDialogWithEnqueuedNotification {
private MainWindow mainWindow;
private ListView<DownloadQueueEntryView> parent;
private ProgressBar progressBar;
private Label progressLabel;
private Button cancelButton;
private App app;
private final ImageView cancelButtonIcon = new ImageView(new Image(DownloadQueueEntryView.class.getResourceAsStream("cancel.png")));
private volatile DownloadStatus currentStatus;
private double kilobytesDownloaded;
private double totalKiloBytes;
private final List<HidableProgressDialogWithEnqueuedNotification> attachedGUIs = new ArrayList<>();
public DownloadQueueEntryView(MainWindow mainWindow, ListView<DownloadQueueEntryView> parent, App app) {
// super(spacing);
setMainWindow(mainWindow);
setParent(parent);
setApp(app);
addAttachedGui(new CLIProgressUpdateDialog());
buildViewAndAttachToParent();
}
private void buildViewAndAttachToParent() {
// setAlignment(Pos.CENTER_LEFT);
// setHgrow(this, Priority.ALWAYS);
progressBar = new ProgressBar(-1);
progressLabel = new Label();
Label titleLabel = new Label(getApp().getName());
Label spacerLabel = new Label(" - ");
progressLabel.textProperty().addListener((observable, oldValue, newValue) -> getMainWindow().triggerUpdateOfDownloadQueuePaneWidthIfPaneIsExtended());
titleLabel.textProperty().addListener((observable, oldValue, newValue) -> getMainWindow().triggerUpdateOfDownloadQueuePaneWidthIfPaneIsExtended());
spacerLabel.textProperty().addListener((observable, oldValue, newValue) -> getMainWindow().triggerUpdateOfDownloadQueuePaneWidthIfPaneIsExtended());
cancelButton = new Button("", cancelButtonIcon);
cancelButton.getStyleClass().add("transparentButton");
cancelButton.disableProperty().addListener((observable, oldValue, newValue) -> {
if (newValue) {
// disabled, select gray icon
cancelButtonIcon.setImage(new Image(DownloadQueueEntryView.class.getResourceAsStream("cancel_gray.png")));
} else {
// enabled, select blue icon
cancelButtonIcon.setImage(new Image(DownloadQueueEntryView.class.getResourceAsStream("cancel.png")));
}
});
cancelButton.setOnAction((event -> getApp().cancelDownloadAndLaunch(this)));
setBottomAnchor(progressBar, 0.0);
setLeftAnchor(progressBar, 0.0);
setRightAnchor(progressBar, 0.0);
setTopAnchor(progressBar, 0.0);
HBox subBox = new HBox(5, titleLabel, spacerLabel, progressLabel, cancelButton);
HBox.setHgrow(subBox, Priority.ALWAYS);
subBox.setAlignment(Pos.CENTER_RIGHT);
setBottomAnchor(subBox, 0.0);
setLeftAnchor(subBox, 0.0);
setRightAnchor(subBox, 0.0);
setTopAnchor(subBox, 0.0);
this.getChildren().addAll(progressBar, subBox);
getParentCustom().getItems().add(this);
}
@Override
public void hide() {
currentStatus = DownloadStatus.DONE;
Platform.runLater(() -> getParentCustom().getItems().remove(this));
getMainWindow().triggerUpdateOfDownloadQueuePaneWidthIfPaneIsExtended();
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.hide();
}
}
@Override
public void enqueued() {
currentStatus = DownloadStatus.ENQUEUED;
Platform.runLater(() -> {
progressBar.setProgress(-1);
progressLabel.setText(MainWindow.getBundle().getString("progress.enqueued"));
});
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.enqueued();
}
}
@Override
public void preparePhaseStarted() {
currentStatus = DownloadStatus.PREPARE_PHASE_STARTED;
Platform.runLater(() -> {
progressBar.setProgress(-1);
progressLabel.setText(MainWindow.getBundle().getString("progress.preparing"));
});
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.preparePhaseStarted();
}
}
@Override
public void downloadStarted() {
currentStatus = DownloadStatus.DOWNLOAD_STARTED;
Platform.runLater(() -> {
progressBar.setProgress(-1);
progressLabel.setText(MainWindow.getBundle().getString("progress.downloading"));
});
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.downloadStarted();
}
}
@Override
public void downloadProgressChanged(double kilobytesDownloaded, double totalKiloBytes) {
this.kilobytesDownloaded = kilobytesDownloaded;
this.totalKiloBytes = totalKiloBytes;
Platform.runLater(() -> {
progressBar.setProgress(kilobytesDownloaded / totalKiloBytes);
progressLabel.setText(MainWindow.getBundle().getString("progress.downloading"));
});
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.downloadProgressChanged(kilobytesDownloaded, totalKiloBytes);
}
}
@Override
public void installStarted() {
currentStatus = DownloadStatus.INSTALL_STARTED;
Platform.runLater(() -> {
progressBar.setProgress(-1);
progressLabel.setText(MainWindow.getBundle().getString("progress.installing"));
});
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.installStarted();
}
}
@Override
public void launchStarted() {
currentStatus = DownloadStatus.LAUNCH_STARTED;
Platform.runLater(() -> {
progressBar.setProgress(-1);
progressLabel.setText(MainWindow.getBundle().getString("progress.launching"));
});
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.launchStarted();
}
}
@Override
public void cancelRequested() {
currentStatus = DownloadStatus.CANCEL_REQUESTED;
Platform.runLater(() -> {
progressBar.setProgress(-1);
progressLabel.setText(MainWindow.getBundle().getString("cancelRequested"));
cancelButton.setDisable(true);
});
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.cancelRequested();
}
}
@Override
public void operationCanceled() {
currentStatus = DownloadStatus.CANCELLED;
Platform.runLater(() -> getParentCustom().getItems().remove(this));
getMainWindow().triggerUpdateOfDownloadQueuePaneWidthIfPaneIsExtended();
for (HidableProgressDialogWithEnqueuedNotification gui : attachedGUIs) {
gui.operationCanceled();
}
}
@Override
public void showErrorMessage(String s) {
getMainWindow().showErrorMessage(s);
}
public MainWindow getMainWindow() {
return mainWindow;
}
private void setMainWindow(MainWindow mainWindow) {
this.mainWindow = mainWindow;
}
public ListView<DownloadQueueEntryView> getParentCustom() {
return parent;
}
private void setParent(ListView<DownloadQueueEntryView> parent) {
this.parent = parent;
}
public App getApp() {
return app;
}
public void setApp(App app) {
this.app = app;
}
public DownloadStatus getCurrentStatus() {
return currentStatus;
}
public double getKilobytesDownloaded() {
return kilobytesDownloaded;
}
public double getTotalKiloBytes() {
return totalKiloBytes;
}
public boolean removeAttachedGui(HidableProgressDialogWithEnqueuedNotification guiToRemove) {
return attachedGUIs.remove(guiToRemove);
}
public List<HidableProgressDialogWithEnqueuedNotification> getAttachedGUIs() {
return attachedGUIs;
}
public void addAttachedGui(HidableProgressDialogWithEnqueuedNotification attachedGui) {
this.attachedGUIs.add(attachedGui);
if (getCurrentStatus() == null)
return;
if (DownloadStatus.ENQUEUED.ordinal() <= getCurrentStatus().ordinal())
attachedGui.enqueued();
if (DownloadStatus.PREPARE_PHASE_STARTED.ordinal() <= getCurrentStatus().ordinal())
attachedGui.preparePhaseStarted();
if (DownloadStatus.DOWNLOAD_STARTED.ordinal() <= getCurrentStatus().ordinal()) {
attachedGui.downloadStarted();
attachedGui.downloadProgressChanged(kilobytesDownloaded, totalKiloBytes);
}
if (DownloadStatus.INSTALL_STARTED.ordinal() <= getCurrentStatus().ordinal())
attachedGui.installStarted();
if (DownloadStatus.LAUNCH_STARTED.ordinal() <= getCurrentStatus().ordinal())
attachedGui.launchStarted();
if (DownloadStatus.CANCEL_REQUESTED.ordinal() <= getCurrentStatus().ordinal())
attachedGui.cancelRequested();
if (DownloadStatus.CANCELLED.ordinal() <= getCurrentStatus().ordinal())
attachedGui.operationCanceled();
if (DownloadStatus.DONE.ordinal() <= getCurrentStatus().ordinal())
attachedGui.hide();
}
public enum DownloadStatus {
ENQUEUED, PREPARE_PHASE_STARTED, DOWNLOAD_STARTED, INSTALL_STARTED, LAUNCH_STARTED, CANCEL_REQUESTED, CANCELLED, DONE
}
}
| {'content_hash': '9d34f092b75a3cabfd9c57bae9b5f2e8', 'timestamp': '', 'source': 'github', 'line_count': 285, 'max_line_length': 158, 'avg_line_length': 35.44210526315789, 'alnum_prop': 0.6738936738936739, 'repo_name': 'vatbub/fokLauncher', 'id': '2a5e765a23d2ceaa1b49df6b718faef85cafd3f5', 'size': '10753', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/view/DownloadQueueEntryView.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '798'}, {'name': 'Java', 'bytes': '311868'}, {'name': 'Shell', 'bytes': '609'}]} |
FROM ubuntu:latest
MAINTAINER Arthur Alvim <[email protected]>
# avoid debconf and initrd
ENV DEBIAN_FRONTEND noninteractive
ENV INITRD No
RUN locale-gen en_US en_US.UTF-8
RUN apt-get update
RUN apt-get install -y lsb-release
RUN apt-get install -y openssh-server
RUN apt-get install -y supervisor
RUN apt-get install -y build-essential
# make /var/run/sshd
RUN mkdir /var/run/sshd
# copy supervisor conf
ADD supervisord.conf /etc/supervisor/conf.d/supervisord.conf
# set root password
RUN echo "root:root" | chpasswd
# clean packages
RUN apt-get clean
RUN rm -rf /var/cache/apt/archives/* /var/lib/apt/lists/*
# expose port
EXPOSE 22
# start supervisor
CMD ["/usr/bin/supervisord"]
| {'content_hash': '8729a024ec288afbdd381417c5d923f3', 'timestamp': '', 'source': 'github', 'line_count': 32, 'max_line_length': 60, 'avg_line_length': 21.625, 'alnum_prop': 0.7644508670520231, 'repo_name': 'arthuralvim/docker-ubuntu-ssh', 'id': 'e2e6ac2f09b49be06ccdd1c0d7de32430d301044', 'size': '745', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Dockerfile', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Shell', 'bytes': '745'}]} |
<?xml version="1.0" encoding="UTF-8"?>
<application id="beam_arithm">
<jobTemplates>
<!-- BEAM BandMaths operator job template -->
<jobTemplate id="expression">
<streamingExecutable>/application/expression/run.sh</streamingExecutable>
<defaultParameters>
<parameter id="expression">l1_flags.INVALID?0:radiance_13>15?0:100+radiance_9-(radiance_8+(radiance_10-radiance_8)*27.524/72.570)</parameter>
</defaultParameters>
</jobTemplate>
<!-- BEAM Level 3 processor job template -->
<jobTemplate id="binning">
<streamingExecutable>/application/binning/run.sh</streamingExecutable>
<defaultParameters>
<parameter id="cellsize">9.28</parameter>
<parameter id="bandname">out</parameter>
<parameter id="bitmask">l1_flags.INVALID?0:radiance_13>15?0:100+radiance_9-(radiance_8+(radiance_10-radiance_8)*27.524/72.570)</parameter>
<parameter id="bbox" title="Bounding Box" abstract="The bounding box parameter" scope="runtime" target="geo:box">-180,-90,180,90</parameter>
<parameter id="algorithm">Minimum/Maximum</parameter>
<parameter id="outputname">binned</parameter>
<parameter id="resampling">binning</parameter>
<parameter id="palette">#MCI_Palette
color0=0,0,0
color1=0,0,154
color2=54,99,250
color3=110,201,136
color4=166,245,8
color5=222,224,0
color6=234,136,0
color7=245,47,0
color8=255,255,255
numPoints=9
sample0=98.19878118960284
sample1=98.64947122314665
sample2=99.10016125669047
sample3=99.5508512902343
sample4=100.0015413237781
sample5=100.4522313573219
sample6=100.90292139086574
sample7=101.35361142440956
sample8=101.80430145795337</parameter>
<parameter id="band">1</parameter>
<parameter id="tailor">true</parameter>
</defaultParameters>
<defaultJobconf>
<property id="ciop.job.max.tasks">1</property>
</defaultJobconf>
</jobTemplate>
</jobTemplates>
<workflow id="beam_arithm" title="beam arithm" abstract="this is a tutorial on the use of beam on a Sandbox">
<workflowVersion>1.0</workflowVersion>
<node id="node_expression">
<job id="expression"></job>
<sources>
<source refid="cas:series">http://localhost/catalogue/sandbox/MER_RR__1P/description</source>
</sources>
<parameters>
</parameters>
</node>
<node id="node_binning">
<job id="binning"></job>
<sources>
<source refid="wf:node">node_expression</source>
</sources>
<parameters>
<parameter id="bitmask"/>
</parameters>
</node>
</workflow>
</application>
| {'content_hash': 'fb0ed0a13ea27ecea3a20d2cb616ea17', 'timestamp': '', 'source': 'github', 'line_count': 70, 'max_line_length': 149, 'avg_line_length': 36.15714285714286, 'alnum_prop': 0.7036744369814303, 'repo_name': 'e-ceo-challenge-demo/participant-1', 'id': '1cd5956132daa119f71bd0fc8602b58cc2e5f170', 'size': '2531', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'application.xml', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
template <typename T>
struct SingletonIterator
{
T& data;
void operator += (ptrdiff_t diff) {}
T& operator* ()
{
return data;
}
};
template <typename T>
class SingletonStore : BaseStore
{
public:
using TELEMENT = T;
void new_entity(EntityRef ref) override
{
}
T& get()
{
return data;
}
T& get(EntityRef ref)
{
return data;
}
T& set(EntityRef ref, const T& value)
{
data = value;
return data;
}
T& set(const T& value)
{
data = value;
return data;
}
auto iter() { return SingletonIterator<T>{ data }; }
private:
T data;
}; | {'content_hash': '32833d5e33bdb2d9d72da6038e6f61d6', 'timestamp': '', 'source': 'github', 'line_count': 48, 'max_line_length': 53, 'avg_line_length': 11.770833333333334, 'alnum_prop': 0.6265486725663717, 'repo_name': 'xunilrj/sandbox', 'id': '1e8fcb9f1df47acd22ab5ce2be07c5a32bcbc727', 'size': '565', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'sources/cpp/ecs/src/ecs/Stores/SingletonStore.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ANTLR', 'bytes': '235'}, {'name': 'ASP.NET', 'bytes': '110'}, {'name': 'Assembly', 'bytes': '28409'}, {'name': 'Asymptote', 'bytes': '22978'}, {'name': 'C', 'bytes': '1022035'}, {'name': 'C#', 'bytes': '474510'}, {'name': 'C++', 'bytes': '33387716'}, {'name': 'CMake', 'bytes': '1288737'}, {'name': 'CSS', 'bytes': '49690'}, {'name': 'Common Lisp', 'bytes': '858'}, {'name': 'Coq', 'bytes': '6200'}, {'name': 'Dockerfile', 'bytes': '2912'}, {'name': 'Elixir', 'bytes': '34'}, {'name': 'Erlang', 'bytes': '8204'}, {'name': 'F#', 'bytes': '33187'}, {'name': 'Fortran', 'bytes': '20472'}, {'name': 'GDB', 'bytes': '701'}, {'name': 'GLSL', 'bytes': '7478'}, {'name': 'Go', 'bytes': '8971'}, {'name': 'HTML', 'bytes': '6469462'}, {'name': 'Handlebars', 'bytes': '8236'}, {'name': 'Haskell', 'bytes': '18581'}, {'name': 'Java', 'bytes': '120539'}, {'name': 'JavaScript', 'bytes': '5055335'}, {'name': 'Jupyter Notebook', 'bytes': '1849172'}, {'name': 'LLVM', 'bytes': '43431'}, {'name': 'MATLAB', 'bytes': '462980'}, {'name': 'Makefile', 'bytes': '1622666'}, {'name': 'Objective-C', 'bytes': '2001'}, {'name': 'PostScript', 'bytes': '45490'}, {'name': 'PowerShell', 'bytes': '192867'}, {'name': 'Python', 'bytes': '726138'}, {'name': 'R', 'bytes': '31364'}, {'name': 'Roff', 'bytes': '5700'}, {'name': 'Ruby', 'bytes': '5865'}, {'name': 'Rust', 'bytes': '797104'}, {'name': 'Sage', 'bytes': '654'}, {'name': 'Scala', 'bytes': '42383'}, {'name': 'Shell', 'bytes': '154039'}, {'name': 'TLA', 'bytes': '16779'}, {'name': 'TSQL', 'bytes': '3412'}, {'name': 'TeX', 'bytes': '6989202'}, {'name': 'TypeScript', 'bytes': '8845'}, {'name': 'Visual Basic .NET', 'bytes': '1090'}, {'name': 'WebAssembly', 'bytes': '70321'}, {'name': 'q', 'bytes': '13889'}]} |
export { default, initialize } from 'ember-flexberry-offline/initializers/local-store';
| {'content_hash': 'b46df907f411a35abbe120b4b5578958', 'timestamp': '', 'source': 'github', 'line_count': 1, 'max_line_length': 87, 'avg_line_length': 88.0, 'alnum_prop': 0.7954545454545454, 'repo_name': 'Flexberry/ember-flexberry-offline', 'id': 'b77d1bfdd4580c0e62cb79e6f10a0732cb79a567', 'size': '88', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/initializers/local-store.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '1680'}, {'name': 'JavaScript', 'bytes': '63542'}]} |
import logging
import SocketServer
import threading
class _MonitorTCPRequestHandler(SocketServer.BaseRequestHandler):
def handle(self):
data = True
while self.server._running and data and data != 'STOP':
data = self.request.recv(8).strip()
self.request.send(data)
class _ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
_running = False
class Server(object):
def __init__(self, address):
logging.info('starting server on %s', address)
self._address = address
self._server = _ThreadedTCPServer(self._address, _MonitorTCPRequestHandler)
self._server._running = False
self._server.timeout = 10
self._server_thread = threading.Thread(target=self._server.serve_forever)
self._server_thread.daemon = True
def start(self):
self._server._running = True
self._server_thread.start()
def stop(self):
logging.info('Stopping server')
self._server._running = False
self._server.shutdown()
self._server_thread.join()
logging.info('Stopping stopped')
| {'content_hash': 'c14923754466708aeccbd9aada526ea7', 'timestamp': '', 'source': 'github', 'line_count': 41, 'max_line_length': 78, 'avg_line_length': 24.70731707317073, 'alnum_prop': 0.7334649555774926, 'repo_name': 'hamaxx/tcphealthcheck', 'id': 'd94ed05888d0e9bcf436d3a9eaec9dd5899fcd03', 'size': '1013', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'server.py', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Python', 'bytes': '8572'}]} |
using AutoMapper;
using AutoMapperContracts;
using Iris.DomainClasses;
namespace Iris.ViewModels
{
public class SlideShowDataGridViewModel : IHaveCustomMappings
{
public int Id { get; set; }
public string Title { get; set; }
public string Description { get; set; }
public string Image { get; set; }
public string Link { get; set; }
public int Order { get; set; }
public void CreateMappings(IConfiguration configuration)
{
configuration.CreateMap<SlideShowImage, SlideShowDataGridViewModel>();
}
}
}
| {'content_hash': 'aa91015d971bf05b125274704d405e9d', 'timestamp': '', 'source': 'github', 'line_count': 21, 'max_line_length': 82, 'avg_line_length': 28.476190476190474, 'alnum_prop': 0.6471571906354515, 'repo_name': 'MehdiSaeedifar/IrisStore', 'id': 'bcf358c2e5c9f6d9a8d5f631d580ff61027f76b4', 'size': '600', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Iris.ViewModels/SlideShowDataGridViewModel.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ASP', 'bytes': '26797'}, {'name': 'C#', 'bytes': '981220'}, {'name': 'CSS', 'bytes': '388488'}, {'name': 'Go', 'bytes': '8967'}, {'name': 'HTML', 'bytes': '353473'}, {'name': 'JavaScript', 'bytes': '2377214'}, {'name': 'PHP', 'bytes': '55071'}, {'name': 'Python', 'bytes': '7577'}]} |
package models;
import contracts.IBunny;
import contracts.IResource;
import contracts.IWarrior;
public class WarriorBunny
extends BaseBunny implements IBunny, IWarrior {
private int hp;
public WarriorBunny(int x, int y, int hp) {
super(x, y);
this.setHp(hp);
}
public int getHp() {
return hp;
}
public void setHp(int hp) {
this.hp = hp;
}
@Override
public void eat(IResource resource) {
this.setHp(this.getHp() + 10 * resource.getQuantity());
}
@Override
public int getSize() {
return this.getHp();
}
} | {'content_hash': '25923837f7b5dccf7cd272bf9d2ff63a', 'timestamp': '', 'source': 'github', 'line_count': 33, 'max_line_length': 59, 'avg_line_length': 17.060606060606062, 'alnum_prop': 0.6589698046181173, 'repo_name': 'TelerikAcademy/SchoolAcademy', 'id': 'c8c193965c7e247099ee5ea67f8165e28b3fd56c', 'size': '563', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '2015-11-Java-OOP/08. Polymorphism/demos/models/WarriorBunny.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ASP', 'bytes': '5695'}, {'name': 'Batchfile', 'bytes': '68'}, {'name': 'C#', 'bytes': '709009'}, {'name': 'CSS', 'bytes': '20455'}, {'name': 'CoffeeScript', 'bytes': '4618'}, {'name': 'HTML', 'bytes': '420435'}, {'name': 'Java', 'bytes': '101270'}, {'name': 'JavaScript', 'bytes': '718829'}, {'name': 'PowerShell', 'bytes': '498027'}]} |
using System.Collections.Generic;
using System.Linq;
using GF.FeatureWise.Services.Models;
namespace GF.FeatureWise.Services.Repositories
{
public class HistogramRepository : IHistogramRepository
{
private readonly ApiDataContext context;
public HistogramRepository(ApiDataContext context)
{
this.context = context;
}
public void DeleteAll()
{
var objectContext = ((System.Data.Entity.Infrastructure.IObjectContextAdapter)context).ObjectContext;
objectContext.ExecuteStoreCommand(string.Format("TRUNCATE TABLE [{0}]", typeof(Histogram).Name + "s"));
}
public Histogram Add(Histogram histogram)
{
context.Histograms.Add(histogram);
context.SaveChanges();
return histogram;
}
public IEnumerable<Histogram> GetAll()
{
return context.Histograms.AsEnumerable().OrderBy(t=>t.Feature);
}
}
} | {'content_hash': 'e4d0a4ad27b032636d2a52d9169bdc7c', 'timestamp': '', 'source': 'github', 'line_count': 34, 'max_line_length': 115, 'avg_line_length': 29.11764705882353, 'alnum_prop': 0.6393939393939394, 'repo_name': 'neilb14/featurewise', 'id': 'd877cdd19f3d1713da5a56961bcc108cb4b8b5aa', 'size': '992', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/FWServices/Repositories/HistogramRepository.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ASP', 'bytes': '114'}, {'name': 'C#', 'bytes': '115636'}, {'name': 'CSS', 'bytes': '17820'}, {'name': 'JavaScript', 'bytes': '501526'}, {'name': 'Python', 'bytes': '2824'}]} |
var path = require('path'),
fs = require('fs'),
wrench = require('wrench'),
vm = require('vm'),
uglifyjs = require('uglify-js'),
// alloy requires
_ = require('../../lib/alloy/underscore'),
logger = require('../../logger'),
U = require('../../utils'),
tiapp = require('../../tiapp'),
CONST = require('../../common/constants'),
platforms = require('../../../platforms/index'),
// alloy compiler requires
CU = require('./compilerUtils'),
styler = require('./styler'),
sourceMapper = require('./sourceMapper'),
CompilerMakeFile = require('./CompilerMakeFile'),
BuildLog = require('./BuildLog'),
Orphanage = require('./Orphanage');
var alloyRoot = path.join(__dirname,'..','..'),
viewRegex = new RegExp('\\.' + CONST.FILE_EXT.VIEW + '$'),
controllerRegex = new RegExp('\\.' + CONST.FILE_EXT.CONTROLLER + '$'),
modelRegex = new RegExp('\\.' + CONST.FILE_EXT.MODEL + '$'),
compileConfig = {},
otherPlatforms,
buildPlatform,
titaniumFolder,
buildLog,
theme,
widgetIds = [];
var times = {
first: null,
last: null,
msgs: []
};
//////////////////////////////////////
////////// command function //////////
//////////////////////////////////////
module.exports = function(args, program) {
BENCHMARK();
var alloyConfig = {},
compilerMakeFile,
paths = U.getAndValidateProjectPaths(
program.outputPath || args[0] || process.cwd()
);
// Initialize modules used throughout the compile process
buildLog = new BuildLog(paths.project);
tiapp.init(path.join(paths.project, 'tiapp.xml'));
// validate the current Titanium SDK version, exit on failure
tiapp.validateSdkVersion();
// construct compiler config from command line config parameters
// and print the configuration data
logger.debug('----- CONFIGURATION -----');
if (program.config && _.isString(program.config)) {
logger.debug('raw config = "' + program.config + '"');
_.each(program.config.split(','), function(v) {
var parts = v.split('=');
alloyConfig[parts[0]] = parts[1];
logger.debug(parts[0] + ' = ' + parts[1]);
});
}
if (program.platform) {
logger.debug('platform = ' + program.platform);
alloyConfig.platform = program.platform;
}
if (!alloyConfig.deploytype) {
alloyConfig.deploytype = 'development';
logger.debug('deploytype = ' + alloyConfig.deploytype);
}
logger.debug('project path = ' + paths.project);
logger.debug('app path = ' + paths.app);
logger.debug('');
// make sure a platform was specified
buildPlatform = alloyConfig.platform;
if (!buildPlatform) {
U.die([
'You must define a target platform for the alloy compile command',
' Ex. "alloy compile --config platform=ios"'
]);
}
titaniumFolder = platforms[buildPlatform].titaniumFolder;
otherPlatforms = _.without(CONST.PLATFORM_FOLDERS, titaniumFolder);
// create compile config from paths and various alloy config files
logger.debug('----- CONFIG.JSON -----');
compileConfig = CU.createCompileConfig(paths.app, paths.project, alloyConfig, buildLog);
theme = compileConfig.theme;
buildLog.data.themeChanged = theme !== buildLog.data.theme;
buildLog.data.theme = theme;
logger.debug('');
// wipe the controllers, models, and widgets
logger.debug('----- CLEANING RESOURCES -----');
var orphanage = new Orphanage(paths.project, buildPlatform, {
theme: theme,
adapters: compileConfig.adapters
});
orphanage.clean();
logger.debug('');
// process project makefiles
compilerMakeFile = new CompilerMakeFile();
var alloyJMK = path.resolve(path.join(paths.app, 'alloy.jmk'));
if (path.existsSync(alloyJMK)) {
logger.debug('Loading "alloy.jmk" compiler hooks...');
var script = vm.createScript(fs.readFileSync(alloyJMK), 'alloy.jmk');
// process alloy.jmk compile file
try {
script.runInNewContext(compilerMakeFile);
compilerMakeFile.isActive = true;
} catch(e) {
logger.error(e.stack);
U.die('Project build at "' + alloyJMK + '" generated an error during load.');
}
compilerMakeFile.trigger('pre:load', _.clone(compileConfig));
logger.debug('');
}
// create generated controllers folder in resources
logger.debug('----- BASE RUNTIME FILES -----');
U.installPlugin(path.join(alloyRoot,'..'), paths.project);
// copy in all lib resources from alloy module
updateFilesWithBuildLog(
path.join(alloyRoot, 'lib'),
path.join(paths.resources, titaniumFolder),
{
rootDir: paths.project,
exceptions: _.map(_.difference(CONST.ADAPTERS, compileConfig.adapters), function(a) {
return path.join('alloy', 'sync', a + '.js');
})
}
);
updateFilesWithBuildLog(
path.join(alloyRoot, 'common'),
path.join(paths.resources, titaniumFolder, 'alloy'),
{ rootDir: paths.project }
);
// create runtime folder structure for alloy
_.each(['COMPONENT','WIDGET','RUNTIME_STYLE'], function(type) {
var p = path.join(paths.resources, titaniumFolder, 'alloy', CONST.DIR[type]);
wrench.mkdirSyncRecursive(p, 0755);
});
// Copy in all developer assets, libs, and additional resources
_.each(['ASSETS','LIB','VENDOR'], function(type) {
updateFilesWithBuildLog(
path.join(paths.app, CONST.DIR[type]),
path.join(paths.resources, titaniumFolder),
{
rootDir: paths.project,
themeChanged: buildLog.data.themeChanged,
filter: new RegExp('^(?:' + otherPlatforms.join('|') + ')[\\/\\\\]'),
exceptions: otherPlatforms,
createSourceMap: (type==='ASSETS') ? false : compileConfig.sourcemap,
compileConfig: compileConfig,
titaniumFolder: titaniumFolder
}
);
});
// copy in test specs if not in production
if (alloyConfig.deploytype !== 'production') {
updateFilesWithBuildLog(
path.join(paths.app,'specs'),
path.join(paths.resources, titaniumFolder, 'specs'),
{ rootDir: paths.project }
);
}
// check theme for assets
if (theme) {
var themeAssetsPath = path.join(paths.app,'themes',theme,'assets');
if (path.existsSync(themeAssetsPath)) {
updateFilesWithBuildLog(
themeAssetsPath,
path.join(paths.resources, titaniumFolder),
{
rootDir: paths.project,
themeChanged: true,
filter: new RegExp('^(?:' + otherPlatforms.join('|') + ')[\\/\\\\]'),
exceptions: otherPlatforms,
titaniumFolder: titaniumFolder
}
);
}
}
logger.debug('');
// trigger our custom compiler makefile
if (compilerMakeFile.isActive) {
compilerMakeFile.trigger('pre:compile', _.clone(compileConfig));
}
logger.info('----- MVC GENERATION -----');
// create the global style, if it exists
styler.setPlatform(buildPlatform);
styler.loadGlobalStyles(paths.app, theme ? {theme:theme} : {});
// Create collection of all widget and app paths
var widgetDirs = U.getWidgetDirectories(paths.app);
var viewCollection = widgetDirs;
viewCollection.push({ dir: path.join(paths.project,CONST.ALLOY_DIR) });
// Process all models
var models = processModels(viewCollection);
_.each(models, function(m) {
CU.models.push(m.charAt(0).toLowerCase() + m.slice(1));
});
// Create a regex for determining which platform-specific
// folders should be used in the compile process
var filteredPlatforms = _.reject(CONST.PLATFORM_FOLDERS_ALLOY, function(p) {
return p === buildPlatform;
});
filteredPlatforms = _.map(filteredPlatforms, function(p) { return p + '[\\\\\\/]'; });
var filterRegex = new RegExp('^(?:(?!' + filteredPlatforms.join('|') + '))');
// don't process XML/controller files inside .svn folders (ALOY-839)
var excludeRegex = new RegExp('(?:^|[\\/\\\\])(?:' + CONST.EXCLUDED_FILES.join('|') + ')(?:$|[\\/\\\\])');
// Process all views/controllers and generate their runtime
// commonjs modules and source maps.
var tracker = {};
_.each(viewCollection, function(collection) {
// generate runtime controllers from views
var theViewDir = path.join(collection.dir,CONST.DIR.VIEW);
if (fs.existsSync(theViewDir)) {
_.each(wrench.readdirSyncRecursive(theViewDir), function(view) {
if (viewRegex.test(view) && filterRegex.test(view) && !excludeRegex.test(view)) {
// make sure this controller is only generated once
var theFile = view.substring(0, view.lastIndexOf('.'));
var theKey = theFile.replace(new RegExp('^' + buildPlatform + '[\\/\\\\]'), '');
var fp = path.join(collection.dir, theKey);
if (tracker[fp]) { return; }
// generate runtime controller
logger.info('[' + view + '] ' + (collection.manifest ? collection.manifest.id +
' ' : '') + 'view processing...');
parseAlloyComponent(view, collection.dir, collection.manifest);
tracker[fp] = true;
}
});
}
// generate runtime controllers from any controller code that has no
// corresponding view markup
var theControllerDir = path.join(collection.dir,CONST.DIR.CONTROLLER);
if (fs.existsSync(theControllerDir)) {
_.each(wrench.readdirSyncRecursive(theControllerDir), function(controller) {
if (controllerRegex.test(controller) && filterRegex.test(controller) && !excludeRegex.test(controller)) {
// make sure this controller is only generated once
var theFile = controller.substring(0,controller.lastIndexOf('.'));
var theKey = theFile.replace(new RegExp('^' + buildPlatform + '[\\/\\\\]'), '');
var fp = path.join(collection.dir, theKey);
if (tracker[fp]) { return; }
// generate runtime controller
logger.info('[' + controller + '] ' + (collection.manifest ?
collection.manifest.id + ' ' : '') + 'controller processing...');
parseAlloyComponent(controller, collection.dir, collection.manifest, true);
tracker[fp] = true;
}
});
}
});
logger.info('');
generateAppJs(paths, compileConfig);
// ALOY-905: workaround TiSDK < 3.2.0 iOS device build bug where it can't reference app.js
// in platform-specific folders, so we just copy the platform-specific one to
// the Resources folder.
if (buildPlatform === 'ios' && tiapp.version.lt('3.2.0')) {
U.copyFileSync(path.join(paths.resources, titaniumFolder, 'app.js'), path.join(paths.resources, 'app.js'));
}
// optimize code
logger.info('----- OPTIMIZING -----');
optimizeCompiledCode(alloyConfig, paths);
// trigger our custom compiler makefile
if (compilerMakeFile.isActive) {
compilerMakeFile.trigger('post:compile', _.clone(compileConfig));
}
// write out the log for this build
buildLog.write();
BENCHMARK('TOTAL', true);
};
///////////////////////////////////////
////////// private functions //////////
///////////////////////////////////////
function generateAppJs(paths, compileConfig) {
var alloyJs = path.join(paths.app, 'alloy.js'),
// info needed to generate app.js
target = {
filename: 'Resources/' + titaniumFolder + '/app.js',
filepath: path.join(paths.resources, titaniumFolder, 'app.js'),
template: path.join(alloyRoot, 'template', 'app.js')
},
// additional data used for source mapping
data = {
'__MAPMARKER_ALLOY_JS__': {
filename: 'app/alloy.js',
filepath: alloyJs
}
},
// hash used to determine if we need to rebuild
hash = U.createHash(alloyJs);
// is it already generated from a prior copile?
buildLog.data[buildPlatform] || (buildLog.data[buildPlatform] = {});
if (fs.existsSync(target.filepath) && buildLog.data[buildPlatform][alloyJs] === hash) {
logger.info('[app.js] using cached app.js...');
// if not, generate the platform-specific app.js and save its hash
} else {
logger.info('[app.js] Titanium entry point processing...');
sourceMapper.generateCodeAndSourceMap({
target: target,
data: data,
}, compileConfig);
buildLog.data[buildPlatform][alloyJs] = hash;
}
logger.info('');
}
function parseAlloyComponent(view, dir, manifest, noView) {
var parseType = noView ? 'controller' : 'view';
// validate parameters
if (!view) { U.die('Undefined ' + parseType + ' passed to parseAlloyComponent()'); }
if (!dir) { U.die('Failed to parse ' + parseType + ' "' + view + '", no directory given'); }
var dirRegex = new RegExp('^(?:' + CONST.PLATFORM_FOLDERS_ALLOY.join('|') + ')[\\\\\\/]*');
var basename = path.basename(view, '.' + CONST.FILE_EXT[parseType.toUpperCase()]),
dirname = path.dirname(view).replace(dirRegex,''),
viewName = basename,
template = {
viewCode: '',
modelVariable: CONST.BIND_MODEL_VAR,
parentVariable: CONST.PARENT_SYMBOL_VAR,
itemTemplateVariable: CONST.ITEM_TEMPLATE_VAR,
controllerPath: (dirname ? path.join(dirname,viewName) : viewName).replace(/\\/g, '/'),
preCode: '',
postCode: '',
Widget: !manifest ? '' : 'var ' + CONST.WIDGET_OBJECT +
" = new (require('alloy/widget'))('" + manifest.id + "');this.__widgetId='" +
manifest.id + "';",
WPATH: !manifest ? '' : _.template(
fs.readFileSync(path.join(alloyRoot,'template','wpath.js'),'utf8'),
{ WIDGETID: manifest.id }
),
__MAPMARKER_CONTROLLER_CODE__: ''
},
widgetDir = dirname ? path.join(CONST.DIR.COMPONENT,dirname) : CONST.DIR.COMPONENT,
widgetStyleDir = dirname ? path.join(CONST.DIR.RUNTIME_STYLE,dirname) :
CONST.DIR.RUNTIME_STYLE,
state = { parent: {}, styles: [] },
files = {};
// reset the bindings map
styler.bindingsMap = {};
CU.destroyCode = '';
CU.postCode = '';
CU[CONST.AUTOSTYLE_PROPERTY] = compileConfig[CONST.AUTOSTYLE_PROPERTY];
CU.currentManifest = manifest;
CU.currentDefaultId = viewName;
// create a list of file paths
var searchPaths = noView ? ['CONTROLLER'] : ['VIEW','STYLE','CONTROLLER'];
_.each(searchPaths, function(fileType) {
// get the path values for the file
var fileTypeRoot = path.join(dir, CONST.DIR[fileType]);
var filename = viewName + '.' + CONST.FILE_EXT[fileType];
var filepath = dirname ? path.join(dirname, filename) : filename;
// check for platform-specific versions of the file
var baseFile = path.join(fileTypeRoot,filepath);
if (buildPlatform) {
var platformSpecificFile = path.join(fileTypeRoot,buildPlatform,filepath);
if (path.existsSync(platformSpecificFile)) {
if (fileType === 'STYLE') {
files[fileType] = [
{ file:baseFile },
{ file:platformSpecificFile, platform:true }
];
} else {
files[fileType] = platformSpecificFile;
}
return;
}
}
files[fileType] = baseFile;
});
_.each(['COMPONENT','RUNTIME_STYLE'], function(fileType) {
files[fileType] = path.join(compileConfig.dir.resources, 'alloy', CONST.DIR[fileType]);
if (dirname) { files[fileType] = path.join(files[fileType], dirname); }
files[fileType] = path.join(files[fileType], viewName+'.js');
});
// we are processing a view, not just a controller
if (!noView) {
// validate view
if (!path.existsSync(files.VIEW)) {
logger.warn('No ' + CONST.FILE_EXT.VIEW + ' view file found for view ' + files.VIEW);
return;
}
// load global style, if present
state.styles = styler.globalStyle || [];
// Load the style and update the state
if (files.STYLE) {
var theStyles = _.isArray(files.STYLE) ? files.STYLE : [{file:files.STYLE}];
_.each(theStyles, function(style) {
if (fs.existsSync(style.file)) {
logger.info(' style: "' +
path.relative(path.join(dir,CONST.DIR.STYLE),style.file) + '"');
state.styles = styler.loadAndSortStyle(style.file, {
existingStyle: state.styles,
platform: style.platform
});
}
});
}
if (theme) {
// if a theme is applied, override TSS definitions with those defined in the theme
var themeStylesDir, theStyle, themeStylesFile, psThemeStylesFile;
if(!manifest) {
// theming a "normal" controller
themeStylesDir = path.join(compileConfig.dir.themes,theme,'styles');
theStyle = dirname ? path.join(dirname,viewName+'.tss') : viewName+'.tss';
themeStylesFile = path.join(themeStylesDir,theStyle);
psThemeStylesFile = path.join(themeStylesDir,buildPlatform,theStyle);
} else {
// theming a widget
themeStylesDir = path.join(compileConfig.dir.themes,theme,'widgets',manifest.id,'styles');
theStyle = dirname ? path.join(dirname,viewName+'.tss') : viewName+'.tss';
themeStylesFile = path.join(themeStylesDir,theStyle);
psThemeStylesFile = path.join(themeStylesDir,buildPlatform,theStyle);
}
if (path.existsSync(themeStylesFile)) {
// load theme-specific styles, overriding default definitions
logger.info(' theme: "' + path.join(theme.toUpperCase(),theStyle) + '"');
state.styles = styler.loadAndSortStyle(themeStylesFile, {
existingStyle: state.styles,
theme: true
});
}
if (path.existsSync(psThemeStylesFile)) {
// load theme- and platform-specific styles, overriding default definitions
logger.info(' theme: "' +
path.join(theme.toUpperCase(), buildPlatform, theStyle) + '"');
state.styles = styler.loadAndSortStyle(psThemeStylesFile, {
existingStyle: state.styles,
platform: true,
theme: true
});
}
}
// Load view from file into an XML document root node
var docRoot;
try {
logger.info(' view: "' +
path.relative(path.join(dir, CONST.DIR.VIEW), files.VIEW) + '"');
docRoot = U.XML.getAlloyFromFile(files.VIEW);
} catch (e) {
U.die([
e.stack,
'Error parsing XML for view "' + view + '"'
]);
}
// see if autoStyle is enabled for the view
if (docRoot.hasAttribute(CONST.AUTOSTYLE_PROPERTY)) {
CU[CONST.AUTOSTYLE_PROPERTY] =
docRoot.getAttribute(CONST.AUTOSTYLE_PROPERTY) === 'true';
}
// make sure we have a Window, TabGroup, or SplitWindow
var rootChildren = U.XML.getElementsFromNodes(docRoot.childNodes);
if (viewName === 'index' && !dirname) {
var valid = [
'Ti.UI.Window',
'Ti.UI.iPad.SplitWindow',
'Ti.UI.TabGroup',
'Ti.UI.iOS.NavigationWindow'
].concat(CONST.MODEL_ELEMENTS);
_.each(rootChildren, function(node) {
var found = true;
var args = CU.getParserArgs(node, {}, { doSetId: false });
if (args.fullname === 'Alloy.Require') {
var inspect = CU.inspectRequireNode(node);
for (var j = 0; j < inspect.names.length; j++) {
if (!_.contains(valid, inspect.names[j])) {
found = false;
break;
}
}
} else {
found = _.contains(valid, args.fullname);
}
if (!found) {
U.die([
'Compile failed. index.xml must have a top-level container element.',
'Valid elements: [' + valid.join(',') + ']'
]);
}
});
}
// process any model/collection nodes
_.each(rootChildren, function(node, i) {
var fullname = CU.getNodeFullname(node);
var isModelElement = _.contains(CONST.MODEL_ELEMENTS,fullname);
if (isModelElement) {
var vCode = CU.generateNode(node, state, undefined, false, true);
template.viewCode += vCode.content;
template.preCode += vCode.pre;
// remove the model/collection nodes when done
docRoot.removeChild(node);
}
});
// rebuild the children list since model elements have been removed
rootChildren = U.XML.getElementsFromNodes(docRoot.childNodes);
// process the UI nodes
var hasUsedDefaultId = false;
_.each(rootChildren, function(node, i) {
// should we use the default id?
var defaultId;
if (!hasUsedDefaultId && CU.isNodeForCurrentPlatform(node)) {
hasUsedDefaultId = true;
defaultId = viewName;
}
// generate the code for this node
var fullname = CU.getNodeFullname(node);
template.viewCode += CU.generateNode(node, {
parent:{},
styles:state.styles,
widgetId: manifest ? manifest.id : undefined
}, defaultId, true);
});
}
// process the controller code
if (path.existsSync(files.CONTROLLER)) {
logger.info(' controller: "' +
path.relative(path.join(dir, CONST.DIR.CONTROLLER), files.CONTROLLER) + '"');
}
var cCode = CU.loadController(files.CONTROLLER);
template.parentController = (cCode.parentControllerName !== '') ?
cCode.parentControllerName : "'BaseController'";
template.__MAPMARKER_CONTROLLER_CODE__ += cCode.controller;
template.preCode += cCode.pre;
// process the bindingsMap, if it contains any data bindings
var bTemplate = "$.<%= id %>.<%= prop %>=_.isFunction(<%= model %>.transform)?";
bTemplate += "<%= model %>.transform()['<%= attr %>']: _.template('<%= tplVal %>', {<%= mname %>: <%= model %>.toJSON()});";
// for each model variable in the bindings map...
_.each(styler.bindingsMap, function(mapping,modelVar) {
// open the model binding handler
var handlerVar = CU.generateUniqueId();
template.viewCode += 'var ' + handlerVar + '=function() {';
CU.destroyCode += modelVar + ".off('" + CONST.MODEL_BINDING_EVENTS + "'," +
handlerVar + ");";
// for each specific conditional within the bindings map....
_.each(_.groupBy(mapping, function(b){return b.condition;}), function(bindings,condition) {
var bCode = '';
// for each binding belonging to this model/conditional pair...
_.each(bindings, function(binding) {
bCode += _.template(bTemplate, {
id: binding.id,
prop: binding.prop,
model: modelVar,
attr: binding.attr,
mname: binding.mname,
tplVal: binding.tplVal
});
});
// if this is a legit conditional, wrap the binding code in it
if (typeof condition !== 'undefined' && condition !== 'undefined') {
bCode = 'if(' + condition + '){' + bCode + '}';
}
template.viewCode += bCode;
});
template.viewCode += "};";
template.viewCode += modelVar + ".on('" + CONST.MODEL_BINDING_EVENTS + "'," +
handlerVar + ");";
});
// add destroy() function to view for cleaning up bindings
template.viewCode += 'exports.destroy=function(){' + CU.destroyCode + '};';
// add any postCode after the controller code
template.postCode += CU.postCode;
// create generated controller module code for this view/controller or widget
var controllerCode = template.__MAPMARKER_CONTROLLER_CODE__;
delete template.__MAPMARKER_CONTROLLER_CODE__;
var code = _.template(fs.readFileSync(
path.join(compileConfig.dir.template, 'component.js'), 'utf8'), template);
// prep the controller paths based on whether it's an app
// controller or widget controller
var targetFilepath = path.join(compileConfig.dir.resources, titaniumFolder,
path.relative(compileConfig.dir.resources, files.COMPONENT));
var runtimeStylePath = path.join(compileConfig.dir.resources, titaniumFolder,
path.relative(compileConfig.dir.resources, files.RUNTIME_STYLE));
if (manifest) {
wrench.mkdirSyncRecursive(
path.join(compileConfig.dir.resources, titaniumFolder, 'alloy', CONST.DIR.WIDGET,
manifest.id, widgetDir),
0755
);
wrench.mkdirSyncRecursive(
path.join(compileConfig.dir.resources, titaniumFolder, 'alloy', CONST.DIR.WIDGET,
manifest.id, widgetStyleDir),
0755
);
// [ALOY-967] merge "i18n" dir in widget folder
if (fs.existsSync(path.join(dir,CONST.DIR.I18N))) {
CU.mergeI18n(path.join(dir,CONST.DIR.I18N), compileConfig.dir);
}
widgetIds.push(manifest.id);
CU.copyWidgetResources(
[path.join(dir,CONST.DIR.ASSETS), path.join(dir,CONST.DIR.LIB)],
path.join(compileConfig.dir.resources, titaniumFolder),
manifest.id,
{
filter: new RegExp('^(?:' + otherPlatforms.join('|') + ')[\\/\\\\]'),
exceptions: otherPlatforms,
titaniumFolder: titaniumFolder,
theme: theme
}
);
targetFilepath = path.join(
compileConfig.dir.resources, titaniumFolder, 'alloy', CONST.DIR.WIDGET, manifest.id,
widgetDir, viewName + '.js'
);
runtimeStylePath = path.join(
compileConfig.dir.resources, titaniumFolder, 'alloy', CONST.DIR.WIDGET, manifest.id,
widgetStyleDir, viewName + '.js'
);
}
// generate the code and source map for the current controller
sourceMapper.generateCodeAndSourceMap({
target: {
filename: path.relative(compileConfig.dir.project,files.COMPONENT),
filepath: targetFilepath,
templateContent: code
},
data: {
__MAPMARKER_CONTROLLER_CODE__: {
filename: path.relative(compileConfig.dir.project,files.CONTROLLER),
fileContent: controllerCode
}
}
}, compileConfig);
// initiate runtime style module creation
var relativeStylePath = path.relative(compileConfig.dir.project, runtimeStylePath);
logger.info(' created: "' + relativeStylePath + '"');
// pre-process runtime controllers to save runtime performance
var STYLE_PLACEHOLDER = '__STYLE_PLACEHOLDER__';
var STYLE_REGEX = new RegExp('[\'"]' + STYLE_PLACEHOLDER + '[\'"]');
var processedStyles = [];
_.each(state.styles, function(s) {
var o = {};
// make sure this style entry applies to the current platform
if (s && s.queries && s.queries.platform &&
!_.contains(s.queries.platform, buildPlatform)) {
return;
}
// get the runtime processed version of the JSON-safe style
var processed = '{' + styler.processStyle(s.style, state) + '}';
// create a temporary style object, sans style key
_.each(s, function(v,k) {
if (k === 'queries') {
var queriesObj = {};
// optimize style conditionals for runtime
_.each(s[k], function(query, queryKey) {
if (queryKey === 'platform') {
// do nothing, we don't need the platform key anymore
} else if (queryKey === 'formFactor') {
queriesObj[queryKey] = 'is' + U.ucfirst(query);
} else if (queryKey === 'if') {
queriesObj[queryKey] = query;
} else {
logger.warn('Unknown device query "' + queryKey + '"');
}
});
// add the queries object, if not empty
if (!_.isEmpty(queriesObj)) {
o[k] = queriesObj;
}
} else if (k !== 'style') {
o[k] = v;
}
});
// Create a full processed style string by inserting the processed style
// into the JSON stringifed temporary style object
o.style = STYLE_PLACEHOLDER;
processedStyles.push(JSON.stringify(o).replace(STYLE_REGEX, processed));
});
// write out the pre-processed styles to runtime module files
var styleCode = 'module.exports = [' + processedStyles.join(',') + '];';
if (manifest) {
styleCode += _.template(
fs.readFileSync(path.join(alloyRoot,'template','wpath.js'), 'utf8'),
{ WIDGETID: manifest.id }
);
}
wrench.mkdirSyncRecursive(path.dirname(runtimeStylePath), 0755);
fs.writeFileSync(runtimeStylePath, styleCode);
}
function findModelMigrations(name, inDir) {
try {
var migrationsDir = inDir || compileConfig.dir.migrations;
var files = fs.readdirSync(migrationsDir);
var part = '_'+name+'.'+CONST.FILE_EXT.MIGRATION;
// look for our model
files = _.reject(files, function(f) { return f.indexOf(part) === -1; });
// sort them in the oldest order first
files = files.sort(function(a,b){
var x = a.substring(0,a.length - part.length -1);
var y = b.substring(0,b.length - part.length -1);
if (x<y) { return -1; }
if (x>y) { return 1; }
return 0;
});
var codes = [];
_.each(files,function(f) {
var mf = path.join(migrationsDir,f);
var m = fs.readFileSync(mf,'utf8');
var code = "(function(migration){\n " +
"migration.name = '" + name + "';\n" +
"migration.id = '" + f.substring(0,f.length-part.length).replace(/_/g,'') + "';\n" +
m +
"})";
codes.push(code);
});
logger.info("Found " + codes.length + " migrations for model: " + name);
return codes;
} catch(E) {
return [];
}
}
function processModels(dirs) {
var models = [];
var modelTemplateFile = path.join(alloyRoot,'template','model.js');
_.each(dirs, function(dirObj) {
var modelDir = path.join(dirObj.dir,CONST.DIR.MODEL);
if (!fs.existsSync(modelDir)) {
return;
}
var migrationDir = path.join(dirObj.dir,CONST.DIR.MIGRATION);
var manifest = dirObj.manifest;
var isWidget = typeof manifest !== 'undefined' && manifest !== null;
var pathPrefix = isWidget ? 'widgets/' + manifest.id + '/': '';
_.each(fs.readdirSync(modelDir), function(file) {
if (!modelRegex.test(file)) {
logger.warn('Non-model file "' + file + '" in ' + pathPrefix + 'models directory');
return;
}
logger.info('[' + pathPrefix + 'models/' + file + '] model processing...');
var fullpath = path.join(modelDir,file);
var basename = path.basename(fullpath, '.'+CONST.FILE_EXT.MODEL);
// generate model code based on model.js template and migrations
var code = _.template(fs.readFileSync(modelTemplateFile,'utf8'), {
basename: basename,
modelJs: fs.readFileSync(fullpath,'utf8'),
migrations: findModelMigrations(basename, migrationDir)
});
// write the model to the runtime file
var casedBasename = U.properCase(basename);
var modelRuntimeDir = path.join(compileConfig.dir.resources,
titaniumFolder, 'alloy', 'models');
if (isWidget) {
modelRuntimeDir = path.join(compileConfig.dir.resources,
titaniumFolder, 'alloy', 'widgets', manifest.id, 'models');
}
wrench.mkdirSyncRecursive(modelRuntimeDir, 0755);
fs.writeFileSync(path.join(modelRuntimeDir,casedBasename+'.js'), code);
models.push(casedBasename);
});
});
return models;
}
function updateFilesWithBuildLog(src, dst, opts) {
U.updateFiles(src, dst, _.extend({ isNew: buildLog.isNew }, opts));
}
function optimizeCompiledCode() {
var mods = [
'builtins',
'optimizer',
'compress'
],
modLocation = './ast/',
lastFiles = [],
files;
// Get the list of JS files from the Resources directory
// and exclude files that don't need to be optimized, or
// have already been optimized.
function getJsFiles() {
var exceptions = [
'app.js',
'alloy/CFG.js',
'alloy/controllers/',
'alloy/styles/',
'alloy/backbone.js',
'alloy/constants.js',
'alloy/underscore.js',
'alloy/widget.js'
];
_.each(exceptions.slice(0), function(ex) {
exceptions.push(path.join(titaniumFolder, ex));
});
var rx = new RegExp('^(?!' + otherPlatforms.join('|') + ').+\\.js$');
return _.filter(wrench.readdirSyncRecursive(compileConfig.dir.resources), function(f) {
// TODO: remove should.js check here once ALOY-921 is resolved
// also remove check in sourceMapper.js exports.generateSourceMap()
return rx.test(f) && !/(?:^|[\\\/])should\.js$/.test(f) && !_.find(exceptions, function(e) {
return f.indexOf(e) === 0;
});
});
}
while((files = _.difference(getJsFiles(),lastFiles)).length > 0) {
_.each(files, function(file) {
// generate AST from file
var fullpath = path.join(compileConfig.dir.resources,file);
var ast;
logger.info('- ' + file);
try {
ast = uglifyjs.parse(fs.readFileSync(fullpath,'utf8'), {
filename: file
});
} catch (e) {
U.die('Error generating AST for "' + fullpath + '"', e);
}
// process all AST operations
_.each(mods, function(mod) {
logger.trace(' processing "' + mod + '" module...');
ast.figure_out_scope();
ast = require(modLocation+mod).process(ast, compileConfig) || ast;
});
// Write out the optimized file
var stream = uglifyjs.OutputStream(sourceMapper.OPTIONS_OUTPUT);
ast.print(stream);
fs.writeFileSync(fullpath, stream.toString());
});
// Combine lastFiles and files, so on the next iteration we can make sure that the
// list of files to be processed has not grown, like in the case of builtins.
lastFiles = _.union(lastFiles, files);
}
}
function BENCHMARK(desc, isFinished) {
var places = Math.pow(10,5);
desc = desc || '<no description>';
if (times.first === null) {
times.first = process.hrtime();
return;
}
function hrtimeInSeconds(t) {
return t[0] + (t[1] / 1000000000);
}
var total = process.hrtime(times.first);
var current = hrtimeInSeconds(total) - (times.last ? hrtimeInSeconds(times.last) : 0);
times.last = total;
var thisTime = Math.round((isFinished ? hrtimeInSeconds(total) : current)*places)/places;
times.msgs.push('[' + thisTime + 's] ' + desc);
if (isFinished) {
logger.trace(' ');
logger.trace('Benchmarking');
logger.trace('------------');
logger.trace(times.msgs);
logger.info('');
logger.info('Alloy compiled in ' + thisTime + 's');
}
}
| {'content_hash': 'ee4bf58112b6a5f9b52276ce68a76d01', 'timestamp': '', 'source': 'github', 'line_count': 953, 'max_line_length': 125, 'avg_line_length': 33.29171038824764, 'alnum_prop': 0.659060106533867, 'repo_name': 'mcvendrell/alloy', 'id': 'a8153d9f4088783e3fe5108497e35a25e15cc7e4', 'size': '31727', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Alloy/commands/compile/index.js', 'mode': '33261', 'license': 'apache-2.0', 'language': []} |
typedef void (*pipeline_function)(struct rsp *rsp);
// Instruction cache fetch stage.
static inline void rsp_if_stage(struct rsp *rsp) {
struct rsp_ifrd_latch *ifrd_latch = &rsp->pipeline.ifrd_latch;
uint32_t pc = ifrd_latch->pc;
uint32_t iw;
assert(!(pc & 0x1000) || "RSP $PC points past IMEM.");
ifrd_latch->pc = (pc + 4) & 0xFFC;
memcpy(&iw, rsp->mem + 0x1000 + pc, sizeof(iw));
ifrd_latch->common.pc = pc;
ifrd_latch->opcode = rsp->opcode_cache[pc >> 2];
ifrd_latch->iw = iw;
}
// Register fetch and decode stage.
static inline int rsp_rd_stage(struct rsp *rsp) {
struct rsp_rdex_latch *rdex_latch = &rsp->pipeline.rdex_latch;
struct rsp_ifrd_latch *ifrd_latch = &rsp->pipeline.ifrd_latch;
uint32_t previous_insn_flags = rdex_latch->opcode.flags;
uint32_t iw = ifrd_latch->iw;
rdex_latch->common = ifrd_latch->common;
rdex_latch->opcode = ifrd_latch->opcode;
rdex_latch->iw = iw;
// Check for load-use stalls.
if (previous_insn_flags & OPCODE_INFO_LOAD) {
const struct rsp_opcode *opcode = &rdex_latch->opcode;
unsigned dest = rsp->pipeline.exdf_latch.result.dest;
unsigned rs = GET_RS(iw);
unsigned rt = GET_RT(iw);
if (unlikely(dest && (
(dest == rs && (opcode->flags & OPCODE_INFO_NEEDRS)) ||
(dest == rt && (opcode->flags & OPCODE_INFO_NEEDRT))
))) {
static const struct rsp_opcode rsp_rf_kill_op = {RSP_OPCODE_SLL, 0x0};
rdex_latch->opcode = rsp_rf_kill_op;
rdex_latch->iw = 0x00000000U;
return 1;
}
}
return 0;
}
// Execution stage.
cen64_flatten static inline void rsp_ex_stage(struct rsp *rsp) {
struct rsp_dfwb_latch *dfwb_latch = &rsp->pipeline.dfwb_latch;
struct rsp_exdf_latch *exdf_latch = &rsp->pipeline.exdf_latch;
struct rsp_rdex_latch *rdex_latch = &rsp->pipeline.rdex_latch;
uint32_t rs_reg, rt_reg, temp;
unsigned rs, rt;
uint32_t iw;
exdf_latch->common = rdex_latch->common;
if (rdex_latch->opcode.flags & OPCODE_INFO_VECTOR)
return;
iw = rdex_latch->iw;
rs = GET_RS(iw);
rt = GET_RT(iw);
// Forward results from DF/WB.
temp = rsp->regs[dfwb_latch->result.dest];
rsp->regs[dfwb_latch->result.dest] = dfwb_latch->result.result;
rsp->regs[RSP_REGISTER_R0] = 0x00000000U;
rs_reg = rsp->regs[rs];
rt_reg = rsp->regs[rt];
rsp->regs[dfwb_latch->result.dest] = temp;
// Finally, execute the instruction.
#ifdef PRINT_EXEC
debug("%.8X: %s\n", rdex_latch->common.pc,
rsp_opcode_mnemonics[rdex_latch->opcode.id]);
#endif
return rsp_function_table[rdex_latch->opcode.id](
rsp, iw, rs_reg, rt_reg);
}
// Execution stage (vector).
cen64_flatten static inline void rsp_v_ex_stage(struct rsp *rsp) {
struct rsp_rdex_latch *rdex_latch = &rsp->pipeline.rdex_latch;
rsp_vect_t vd_reg, vs_reg, vt_shuf_reg, zero;
unsigned vs, vt, vd, e;
uint32_t iw;
if (!(rdex_latch->opcode.flags & OPCODE_INFO_VECTOR))
return;
iw = rdex_latch->iw;
vs = GET_VS(iw);
vt = GET_VT(iw);
vd = GET_VD(iw);
e = GET_E (iw);
vs_reg = rsp_vect_load_unshuffled_operand(rsp->cp2.regs[vs].e);
vt_shuf_reg = rsp_vect_load_and_shuffle_operand(rsp->cp2.regs[vt].e, e);
zero = rsp_vzero();
// Finally, execute the instruction.
#ifdef PRINT_EXEC
debug("%.8X: %s\n", rdex_latch->common.pc,
rsp_vector_opcode_mnemonics[rdex_latch->opcode.id]);
#endif
vd_reg = rsp_vector_function_table[rdex_latch->opcode.id](
rsp, iw, vt_shuf_reg, vs_reg, zero);
rsp_vect_write_operand(rsp->cp2.regs[vd].e, vd_reg);
}
// Data cache fetch stage.
cen64_flatten static inline void rsp_df_stage(struct rsp *rsp) {
struct rsp_dfwb_latch *dfwb_latch = &rsp->pipeline.dfwb_latch;
struct rsp_exdf_latch *exdf_latch = &rsp->pipeline.exdf_latch;
const struct rsp_mem_request *request = &exdf_latch->request;
uint32_t addr;
dfwb_latch->common = exdf_latch->common;
dfwb_latch->result = exdf_latch->result;
if (request->type == RSP_MEM_REQUEST_NONE)
return;
addr = request->addr & 0xFFF;
// Vector unit DMEM access.
if (request->type != RSP_MEM_REQUEST_INT_MEM) {
uint16_t *regp = rsp->cp2.regs[request->packet.p_vect.dest].e;
unsigned element = request->packet.p_vect.element;
rsp_vect_t reg, dqm;
reg = rsp_vect_load_unshuffled_operand(regp);
dqm = rsp_vect_load_unshuffled_operand(exdf_latch->
request.packet.p_vect.vdqm.e);
// Make sure the vector data doesn't get
// written into the scalar part of the RF.
dfwb_latch->result.dest = 0;
exdf_latch->request.packet.p_vect.vldst_func(
rsp, addr, element, regp, reg, dqm);
}
// Scalar unit DMEM access.
else {
uint32_t rdqm = request->packet.p_int.rdqm;
uint32_t wdqm = request->packet.p_int.wdqm;
uint32_t data = request->packet.p_int.data;
unsigned rshift = request->packet.p_int.rshift;
uint32_t word;
memcpy(&word, rsp->mem + addr, sizeof(word));
word = byteswap_32(word);
dfwb_latch->result.result = rdqm & (((int32_t) word) >> rshift);
word = byteswap_32((word & ~wdqm) | (data & wdqm));
memcpy(rsp->mem + addr, &word, sizeof(word));
}
}
// Writeback stage.
static inline void rsp_wb_stage(struct rsp *rsp) {
const struct rsp_dfwb_latch *dfwb_latch = &rsp->pipeline.dfwb_latch;
rsp->regs[dfwb_latch->result.dest] = dfwb_latch->result.result;
}
// Advances the processor pipeline by one clock.
void rsp_cycle_(struct rsp *rsp) {
rsp_wb_stage(rsp);
rsp_df_stage(rsp);
rsp->pipeline.exdf_latch.result.dest = RSP_REGISTER_R0;
rsp->pipeline.exdf_latch.request.type = RSP_MEM_REQUEST_NONE;
rsp_v_ex_stage(rsp);
rsp_ex_stage(rsp);
if (likely(!rsp_rd_stage(rsp)))
rsp_if_stage(rsp);
}
// Initializes the pipeline with default values.
void rsp_pipeline_init(struct rsp_pipeline *pipeline) {
memset(pipeline, 0, sizeof(*pipeline));
}
| {'content_hash': '52c7334ddcf89aa4583cbc42f5b25d68', 'timestamp': '', 'source': 'github', 'line_count': 204, 'max_line_length': 76, 'avg_line_length': 28.53921568627451, 'alnum_prop': 0.6568189625558227, 'repo_name': 'glaubitz/cen64-debian', 'id': '9377c789c05a262004694dd52b856d7f70190feb', 'size': '6358', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'rsp/pipeline.c', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Assembly', 'bytes': '20405'}, {'name': 'C', 'bytes': '884648'}, {'name': 'C++', 'bytes': '83096'}, {'name': 'CMake', 'bytes': '18903'}, {'name': 'Objective-C', 'bytes': '2614'}, {'name': 'QMake', 'bytes': '1085'}]} |
package methods_objects;
import java.util.ArrayDeque;
import java.util.Queue;
import java.util.concurrent.*;
/**
* @author Sergey
* @since 10.11.2017
* <p>
* This is app show us how can speed up our app.
* Time is executing this app 9-10s.
* App is in single thread executing 25-28s.
*/
public class ThreadPool implements Executor {
Queue<Runnable> ourTasks = new ArrayDeque<>();
/**
* This is isRunning informs main thread need stopping
*/
boolean isRunning = true;
/**
* Constructor
*
* @param nThreads quantity started threads
*/
public ThreadPool(int nThreads) {
for (int i = 0; i < nThreads; i++) {
new Thread(new InnerThread()).start();
}
}
@Override
public void execute(Runnable command) {
if (ourTasks.add(command)) {
synchronized (ourTasks) {
ourTasks.notifyAll();
}
}
;
}
/**
* He is stopping all threads if queue is empty and tasks will not be more.
*
* @throws InterruptedException
*/
public void shutdown() throws InterruptedException {
while (!ourTasks.isEmpty()) {
Thread.currentThread().sleep(1000);
}
isRunning = false;
synchronized (ourTasks) {
ourTasks.notifyAll();
}
}
/**
* Class inner thread
*/
private class InnerThread implements Runnable {
@Override
public void run() {
while (isRunning) {
if (ourTasks.isEmpty()) {
synchronized (ourTasks) {
try {
ourTasks.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} else {
Runnable r = ourTasks.poll();
if (r != null) {
r.run();
}
}
}
}
}
}
| {'content_hash': 'ed78d8584e2a2a58f55605b524855b4d', 'timestamp': '', 'source': 'github', 'line_count': 86, 'max_line_length': 79, 'avg_line_length': 24.046511627906977, 'alnum_prop': 0.4874274661508704, 'repo_name': 'SergeyI88/InduikovS', 'id': 'c49004494c88e7c24410680c231e75da8f35dafd', 'size': '2068', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'chapter_005_concurrency/src/main/java/methods_objects/ThreadPool.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '158348'}]} |
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js canvas - interactive - voxel painter</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
font-family: Monospace;
background-color: #f0f0f0;
margin: 0px;
overflow: hidden;
}
</style>
</head>
<body>
<script src="../build/three.js"></script>
<script src="js/renderers/Projector.js"></script>
<script src="js/renderers/CanvasRenderer.js"></script>
<script>
var container;
var camera, scene, renderer;
var plane;
var mouse, raycaster, isShiftDown = false;
var cubeGeometry = new THREE.BoxGeometry( 50, 50, 50 );
var cubeMaterial = new THREE.MeshLambertMaterial( { color: 0x00ff80, overdraw: 0.5 } );
var objects = [];
init();
render();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
var info = document.createElement( 'div' );
info.style.position = 'absolute';
info.style.top = '10px';
info.style.width = '100%';
info.style.textAlign = 'center';
info.innerHTML = '<a href="http://threejs.org" target="_blank">three.js</a> - voxel painter<br><strong>click</strong>: add voxel, <strong>shift + click</strong>: remove voxel, <a href="javascript:save()">save .png</a>';
container.appendChild( info );
camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 10000 );
camera.position.set( 500, 800, 1300 );
camera.lookAt( new THREE.Vector3() );
scene = new THREE.Scene();
// Grid
var gridHelper = new THREE.GridHelper( 1000, 20 );
scene.add( gridHelper );
//
raycaster = new THREE.Raycaster();
mouse = new THREE.Vector2();
var geometry = new THREE.PlaneBufferGeometry( 1000, 1000 );
geometry.rotateX( - Math.PI / 2 );
plane = new THREE.Mesh( geometry, new THREE.MeshBasicMaterial( { visible: false } ) );
scene.add( plane );
objects.push( plane );
var material = new THREE.MeshBasicMaterial( { color: 0xff0000, wireframe: true } );
// Lights
var ambientLight = new THREE.AmbientLight( 0x606060 );
scene.add( ambientLight );
var directionalLight = new THREE.DirectionalLight( 0xffffff );
directionalLight.position.x = Math.random() - 0.5;
directionalLight.position.y = Math.random() - 0.5;
directionalLight.position.z = Math.random() - 0.5;
directionalLight.position.normalize();
scene.add( directionalLight );
var directionalLight = new THREE.DirectionalLight( 0x808080 );
directionalLight.position.x = Math.random() - 0.5;
directionalLight.position.y = Math.random() - 0.5;
directionalLight.position.z = Math.random() - 0.5;
directionalLight.position.normalize();
scene.add( directionalLight );
renderer = new THREE.CanvasRenderer();
renderer.setClearColor( 0xf0f0f0 );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild(renderer.domElement);
document.addEventListener( 'mousedown', onDocumentMouseDown, false );
document.addEventListener( 'keydown', onDocumentKeyDown, false );
document.addEventListener( 'keyup', onDocumentKeyUp, false );
//
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
render();
}
function onDocumentMouseDown( event ) {
event.preventDefault();
mouse.x = ( event.clientX / renderer.domElement.clientWidth ) * 2 - 1;
mouse.y = - ( event.clientY / renderer.domElement.clientHeight ) * 2 + 1;
raycaster.setFromCamera( mouse, camera );
var intersects = raycaster.intersectObjects( objects );
if ( intersects.length > 0 ) {
var intersect = intersects[ 0 ];
if ( isShiftDown ) {
if ( intersect.object != plane ) {
scene.remove( intersect.object );
objects.splice( objects.indexOf( intersect.object ), 1 );
}
} else {
var voxel = new THREE.Mesh( cubeGeometry, cubeMaterial );
voxel.position.copy( intersect.point ).add( intersect.face.normal );
voxel.position.divideScalar( 50 ).floor().multiplyScalar( 50 ).addScalar( 25 );
scene.add( voxel );
objects.push( voxel );
}
render();
}
}
function onDocumentKeyDown( event ) {
switch( event.keyCode ) {
case 16: isShiftDown = true; break;
}
}
function onDocumentKeyUp( event ) {
switch( event.keyCode ) {
case 16: isShiftDown = false; break;
}
}
function save() {
window.open( renderer.domElement.toDataURL('image/png'), 'mywindow' );
return false;
}
function render() {
renderer.render( scene, camera );
}
</script>
</body>
</html>
| {'content_hash': '572809411790fd0fb768311d652eeb91', 'timestamp': '', 'source': 'github', 'line_count': 201, 'max_line_length': 223, 'avg_line_length': 25.109452736318406, 'alnum_prop': 0.6524668119675054, 'repo_name': 'ondys/three.js', 'id': 'c5f8b59aa5799a06460fbc132f58fb0cda1c4f76', 'size': '5047', 'binary': False, 'copies': '2', 'ref': 'refs/heads/dev', 'path': 'examples/canvas_interactive_voxelpainter.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Batchfile', 'bytes': '139'}, {'name': 'C', 'bytes': '80088'}, {'name': 'C++', 'bytes': '116991'}, {'name': 'CSS', 'bytes': '18464'}, {'name': 'GLSL', 'bytes': '89790'}, {'name': 'HTML', 'bytes': '39435'}, {'name': 'JavaScript', 'bytes': '4672414'}, {'name': 'MAXScript', 'bytes': '75494'}, {'name': 'Python', 'bytes': '417674'}, {'name': 'Shell', 'bytes': '9783'}]} |
<?php
/*
Plugin Name: Users Only
Plugin URI: https://github.com/jacobbuck/wp-users-only
Description: Restrict a website to logged in users only, and disable the dashboard for non-admin user types.
Version: 2.2.2
Author: Jacob Buck
Author URI: http://jacobbuck.co.nz/
*/
require('class.users_only.php');
require('class.users_only_settings.php');
require('class.users_only_shortcodes.php');
Users_Only::initialize();
Users_Only_Settings::initialize();
Users_Only_Shortcodes::initialize(); | {'content_hash': 'abcac04b30b05da7e4b597c70021a1e3', 'timestamp': '', 'source': 'github', 'line_count': 17, 'max_line_length': 108, 'avg_line_length': 28.705882352941178, 'alnum_prop': 0.7561475409836066, 'repo_name': 'jacobbuck/wp-users-only', 'id': '7f2a7a39a9f5d54d99af9e96605f84580cbda9d9', 'size': '488', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'users-only.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'PHP', 'bytes': '13124'}]} |
{% extends 'frontend/core.html' %}
{% load bootstrap3 %}
{% block nav %}
<div class="navbar navbar-default nabvar-fix-top">
<div class="container">
<div class="navbar-header">
<a href="{% url 'index' %}" class="navbar-brand">SGK</a>
<button class="navbar-toggle" type="button" data-toggle="collapse" data-target="#navbar-main">
{% bootstrap_icon "th" %}
</button>
</div>
<div class="navbar-collapse collapse" id="navbar-main">
{% if user.is_authenticated %}
<ul class="nav navbar-nav">
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">Gestión <span class="caret"></span></a>
<ul class="dropdown-menu">
<li><a tabindex="1" href="{% url 'turno_list' %}">Turnos</a></li>
<li class="divider"></li>
<li><a tabindex="2" href="{% url 'paciente_list' %}">Paciente</a></li>
<li><a tabindex="3" href="{% url 'persona_list' %}">Personas</a></li>
<li><a tabindex="4" href="../profesionales/">Profesionales</a></li>
</ul>
</li>
<li>
<a href="{% url 'turno_create' %}">Nuevo turno</a>
</li>
<li>
<a href="{% url 'paciente_create' %}">Nuevo paciente</a>
</li>
</ul>
{% endif %}
<div class="nav navbar-nav navbar-right">
{% block header %}
{% if user.is_authenticated %}
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">
Hola<strong> {{ user.username }} </strong><span class="caret"></span>
</a>
<ul class="dropdown-menu">
<li><a href="{% url 'auth_password_change' %}">Cambiar contraseña</a></li>
<li><a href="#" onclick="closeSession()">Cerrar sesión</a></li>
</ul>
</li>
{% else %}
<li><a href="{% url 'auth_login' %}">Iniciar sesión</a></li>
{% endif %}
{% endblock %}
</div>
</div>
</div>
</div>
{% endblock %}
| {'content_hash': '27fafe19fdd3517dda6497b15ef17314', 'timestamp': '', 'source': 'github', 'line_count': 55, 'max_line_length': 118, 'avg_line_length': 44.8, 'alnum_prop': 0.42613636363636365, 'repo_name': 'matuu/sgk', 'id': '33adbad4aa0fce95e6a10e718738fe5433434588', 'size': '2468', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'sgk/frontend/templates/frontend/nav.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '329843'}, {'name': 'HTML', 'bytes': '187248'}, {'name': 'JavaScript', 'bytes': '89413'}, {'name': 'Python', 'bytes': '236775'}]} |
package org.dyn4j.sandbox.panels;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.text.DecimalFormat;
import java.text.MessageFormat;
import javax.swing.BorderFactory;
import javax.swing.GroupLayout;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFormattedTextField;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JToggleButton;
import javax.swing.border.TitledBorder;
import org.dyn4j.dynamics.joint.Joint;
import org.dyn4j.dynamics.joint.WeldJoint;
import org.dyn4j.geometry.Vector2;
import org.dyn4j.sandbox.SandboxBody;
import org.dyn4j.sandbox.icons.Icons;
import org.dyn4j.sandbox.listeners.SelectTextFocusListener;
import org.dyn4j.sandbox.resources.Messages;
import org.dyn4j.sandbox.utilities.ControlUtilities;
/**
* Panel used to create or edit an weld joint.
* @author William Bittle
* @version 1.0.1
* @since 1.0.0
*/
public class WeldJointPanel extends JointPanel implements InputPanel, ActionListener, ItemListener {
/** The version id */
private static final long serialVersionUID = 8812128051146951491L;
/** The body 1 drop down label */
private JLabel lblBody1;
/** The body 2 drop down label */
private JLabel lblBody2;
/** The body 1 drop down */
private JComboBox cmbBody1;
/** The body 2 drop down */
private JComboBox cmbBody2;
// reference angle
/** The reference angle label */
private JLabel lblReferenceAngle;
/** The reference angle text field */
private JFormattedTextField txtReferenceAngle;
/** The reference angle auto compute button */
private JToggleButton tglReferenceAngle;
/** The button used to reset the reference angle (only used in edit mode) */
private JButton btnResetReferenceAngle;
// anchor points
/** The anchor label */
private JLabel lblAnchor;
/** The x label for the anchor point */
private JLabel lblX1;
/** The y label for the anchor point */
private JLabel lblY1;
/** The anchor's x text field */
private JFormattedTextField txtX1;
/** The anchor's y text field */
private JFormattedTextField txtY1;
/** The button to set anchor1 to body1's center of mass */
private JButton btnUseCenter1;
/** The button to set anchor2 to body2's center of mass */
private JButton btnUseCenter2;
// frequency and damping ratio
/** The frequency label */
private JLabel lblFrequency;
/** The ratio label */
private JLabel lblRatio;
/** The frequency text field */
private JFormattedTextField txtFrequency;
/** The ratio text field */
private JFormattedTextField txtRatio;
/**
* Full constructor.
* @param joint the original joint; null if creating
* @param bodies the list of bodies to choose from
* @param edit true if the joint is being edited
*/
public WeldJointPanel(WeldJoint joint, SandboxBody[] bodies, boolean edit) {
super();
// get initial values
String name = (String)joint.getUserData();
boolean collision = joint.isCollisionAllowed();
SandboxBody b1 = (SandboxBody)joint.getBody1();
SandboxBody b2 = (SandboxBody)joint.getBody2();
Vector2 an = joint.getAnchor1();
double f = joint.getFrequency();
double r = joint.getDampingRatio();
double ref = joint.getReferenceAngle();
// set the super classes defaults
this.txtName.setText(name);
this.txtName.setColumns(15);
this.chkCollision.setSelected(collision);
this.lblBody1 = new JLabel(Messages.getString("panel.joint.body1"), Icons.INFO, JLabel.LEFT);
this.lblBody2 = new JLabel(Messages.getString("panel.joint.body2"), Icons.INFO, JLabel.LEFT);
this.lblBody1.setToolTipText(Messages.getString("panel.joint.body1.tooltip"));
this.lblBody2.setToolTipText(Messages.getString("panel.joint.body2.tooltip"));
this.cmbBody1 = new JComboBox(bodies);
this.cmbBody2 = new JComboBox(bodies);
this.lblAnchor = new JLabel(Messages.getString("panel.joint.anchor"), Icons.INFO, JLabel.LEFT);
this.lblAnchor.setToolTipText(Messages.getString("panel.joint.weld.anchor.tooltip"));
this.lblX1 = new JLabel(Messages.getString("x"));
this.lblY1 = new JLabel(Messages.getString("y"));
this.txtX1 = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.anchor.format")));
this.txtX1.addFocusListener(new SelectTextFocusListener(this.txtX1));
this.txtX1.setColumns(7);
this.txtY1 = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.anchor.format")));
this.txtY1.addFocusListener(new SelectTextFocusListener(this.txtY1));
this.txtY1.setColumns(7);
this.btnUseCenter1 = new JButton(Messages.getString("panel.joint.useCenter"));
this.btnUseCenter1.setToolTipText(Messages.getString("panel.joint.useCenter.tooltip"));
this.btnUseCenter1.setActionCommand("use-com1");
this.btnUseCenter1.addActionListener(this);
this.btnUseCenter2 = new JButton(Messages.getString("panel.joint.useCenter"));
this.btnUseCenter2.setToolTipText(Messages.getString("panel.joint.useCenter.tooltip"));
this.btnUseCenter2.setActionCommand("use-com2");
this.btnUseCenter2.addActionListener(this);
this.lblReferenceAngle = new JLabel(Messages.getString("panel.joint.referenceAngle"), Icons.INFO, JLabel.LEFT);
this.lblReferenceAngle.setToolTipText(MessageFormat.format(Messages.getString("panel.joint.referenceAngle.tooltip"), Messages.getString("unit.rotation")));
this.txtReferenceAngle = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.referenceAngle.format")));
this.txtReferenceAngle.addFocusListener(new SelectTextFocusListener(this.txtReferenceAngle));
this.txtReferenceAngle.setValue(Math.toDegrees(ref));
this.tglReferenceAngle = new JToggleButton(Messages.getString("panel.joint.referenceAngle.autoCompute"));
this.tglReferenceAngle.setToolTipText(Messages.getString("panel.joint.referenceAngle.autoCompute.tooltip"));
this.tglReferenceAngle.setActionCommand("toggle-auto-compute");
this.tglReferenceAngle.setSelected(true);
this.btnResetReferenceAngle = new JButton(Messages.getString("panel.joint.referenceAngle.reset"));
this.btnResetReferenceAngle.setToolTipText(Messages.getString("panel.joint.referenceAngle.reset.tooltip"));
this.btnResetReferenceAngle.setActionCommand("reset-reference-angle");
this.lblFrequency = new JLabel(Messages.getString("panel.joint.weld.frequency"), Icons.INFO, JLabel.LEFT);
this.lblFrequency.setToolTipText(MessageFormat.format(Messages.getString("panel.joint.weld.frequency.tooltip"), Messages.getString("unit.inverseTime"), Messages.getString("unit.time")));
this.txtFrequency = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.weld.frequency.format")));
this.txtFrequency.addFocusListener(new SelectTextFocusListener(this.txtFrequency));
this.lblRatio = new JLabel(Messages.getString("panel.joint.weld.dampingRatio"), Icons.INFO, JLabel.LEFT);
this.lblRatio.setToolTipText(Messages.getString("panel.joint.weld.dampingRatio.tooltip"));
this.txtRatio = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.weld.dampingRatio.format")));
this.txtRatio.addFocusListener(new SelectTextFocusListener(this.txtRatio));
// set defaults
this.cmbBody1.setSelectedItem(b1);
this.cmbBody2.setSelectedItem(b2);
this.txtX1.setValue(an.x);
this.txtY1.setValue(an.y);
this.txtFrequency.setValue(f);
this.txtRatio.setValue(r);
// setup edit mode if necessary
if (edit) {
// disable/hide certain controls
this.cmbBody1.setEnabled(false);
this.cmbBody2.setEnabled(false);
this.txtX1.setEnabled(false);
this.txtY1.setEnabled(false);
this.btnUseCenter1.setEnabled(false);
this.btnUseCenter2.setEnabled(false);
this.tglReferenceAngle.setVisible(false);
} else {
this.btnResetReferenceAngle.setVisible(false);
}
// add listeners after all the values have been set
// this will preserve the initial values
this.cmbBody1.addItemListener(this);
this.cmbBody2.addItemListener(this);
this.tglReferenceAngle.addActionListener(this);
this.btnResetReferenceAngle.addActionListener(this);
// setup the sections
GroupLayout layout;
// setup the general section
JPanel pnlGeneral = new JPanel();
TitledBorder border = BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), Messages.getString("panel.section.general"));
border.setTitlePosition(TitledBorder.TOP);
pnlGeneral.setBorder(border);
layout = new GroupLayout(pnlGeneral);
pnlGeneral.setLayout(layout);
layout.setAutoCreateContainerGaps(true);
layout.setAutoCreateGaps(true);
layout.setHorizontalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup()
.addComponent(this.lblName)
.addComponent(this.lblCollision)
.addComponent(this.lblBody1)
.addComponent(this.lblBody2)
.addComponent(this.lblAnchor)
.addComponent(this.lblReferenceAngle))
.addGroup(layout.createParallelGroup()
.addComponent(this.txtName)
.addComponent(this.chkCollision)
.addGroup(layout.createSequentialGroup()
.addComponent(this.cmbBody1)
.addComponent(this.btnUseCenter1))
.addGroup(layout.createSequentialGroup()
.addComponent(this.cmbBody2)
.addComponent(this.btnUseCenter2))
.addGroup(layout.createSequentialGroup()
.addComponent(this.txtX1)
.addComponent(this.lblX1)
.addComponent(this.txtY1)
.addComponent(this.lblY1))
.addGroup(layout.createSequentialGroup()
.addComponent(this.txtReferenceAngle)
.addComponent(this.tglReferenceAngle)
.addComponent(this.btnResetReferenceAngle))));
layout.setVerticalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblName)
.addComponent(this.txtName, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblCollision)
.addComponent(this.chkCollision, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblBody1)
.addComponent(this.cmbBody1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(this.btnUseCenter1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblBody2)
.addComponent(this.cmbBody2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(this.btnUseCenter2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblAnchor)
.addComponent(this.txtX1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(this.lblX1)
.addComponent(this.txtY1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(this.lblY1))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblReferenceAngle)
.addComponent(this.txtReferenceAngle, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(this.tglReferenceAngle, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(this.btnResetReferenceAngle, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)));
// setup the spring/damper secion
JPanel pnlSpringDamper = new JPanel();
border = BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), Messages.getString("panel.joint.section.springDamper"));
border.setTitlePosition(TitledBorder.TOP);
pnlSpringDamper.setBorder(border);
layout = new GroupLayout(pnlSpringDamper);
pnlSpringDamper.setLayout(layout);
layout.setAutoCreateContainerGaps(true);
layout.setAutoCreateGaps(true);
layout.setHorizontalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup()
.addComponent(this.lblFrequency)
.addComponent(this.lblRatio))
.addGroup(layout.createParallelGroup()
.addComponent(this.txtFrequency)
.addComponent(this.txtRatio)));
layout.setVerticalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblFrequency)
.addComponent(this.txtFrequency, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
.addComponent(this.lblRatio)
.addComponent(this.txtRatio, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)));
// setup the layout of the sections
layout = new GroupLayout(this);
this.setLayout(layout);
layout.setAutoCreateContainerGaps(true);
layout.setAutoCreateGaps(true);
layout.setHorizontalGroup(layout.createParallelGroup()
.addComponent(pnlGeneral)
.addComponent(pnlSpringDamper));
layout.setVerticalGroup(layout.createSequentialGroup()
.addComponent(pnlGeneral)
.addComponent(pnlSpringDamper));
}
/**
* Returns the computed reference angle between the two bodies.
* @return double
*/
private double computeReferenceAngle() {
double r1 = ((SandboxBody)this.cmbBody1.getSelectedItem()).getTransform().getRotation();
double r2 = ((SandboxBody)this.cmbBody2.getSelectedItem()).getTransform().getRotation();
return r1 - r2;
}
/* (non-Javadoc)
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed(ActionEvent e) {
if ("use-com1".equals(e.getActionCommand())) {
Vector2 c = ((SandboxBody)this.cmbBody1.getSelectedItem()).getWorldCenter();
this.txtX1.setValue(c.x);
this.txtY1.setValue(c.y);
} else if ("use-com2".equals(e.getActionCommand())) {
Vector2 c = ((SandboxBody)this.cmbBody2.getSelectedItem()).getWorldCenter();
this.txtX1.setValue(c.x);
this.txtY1.setValue(c.y);
} else if ("reset-reference-angle".equals(e.getActionCommand())) {
this.txtReferenceAngle.setValue(Math.toDegrees(this.computeReferenceAngle()));
} else if ("toggle-auto-compute".equals(e.getActionCommand())) {
// if the state of the toggle button changes, check if its selected now, if so
// then recompute the reference angle
if (this.tglReferenceAngle.isSelected()) {
this.txtReferenceAngle.setValue(Math.toDegrees(this.computeReferenceAngle()));
}
}
}
/* (non-Javadoc)
* @see java.awt.event.ItemListener#itemStateChanged(java.awt.event.ItemEvent)
*/
@Override
public void itemStateChanged(ItemEvent e) {
// when the items change in either drop down, check if the auto compute button is
// selected, if so, then compute the reference angle
if (this.tglReferenceAngle.isSelected()) {
this.txtReferenceAngle.setValue(Math.toDegrees(this.computeReferenceAngle()));
}
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.panels.JointPanel#setJoint(org.dyn4j.dynamics.joint.Joint)
*/
@Override
public void setJoint(Joint joint) {
if (joint instanceof WeldJoint) {
WeldJoint wj = (WeldJoint)joint;
// set the super class properties
wj.setUserData(this.txtName.getText());
wj.setCollisionAllowed(this.chkCollision.isSelected());
wj.setFrequency(ControlUtilities.getDoubleValue(this.txtFrequency));
wj.setDampingRatio(ControlUtilities.getDoubleValue(this.txtRatio));
wj.setReferenceAngle(Math.toRadians(ControlUtilities.getDoubleValue(this.txtReferenceAngle)));
}
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.panels.JointPanel#getJoint()
*/
@Override
public Joint getJoint() {
// get the selected bodies
SandboxBody body1 = (SandboxBody)this.cmbBody1.getSelectedItem();
SandboxBody body2 = (SandboxBody)this.cmbBody2.getSelectedItem();
// get the anchor points
Vector2 a = new Vector2(
ControlUtilities.getDoubleValue(this.txtX1),
ControlUtilities.getDoubleValue(this.txtY1));
WeldJoint wj = new WeldJoint(body1, body2, a);
// set the super class properties
wj.setUserData(this.txtName.getText());
wj.setCollisionAllowed(this.chkCollision.isSelected());
wj.setFrequency(ControlUtilities.getDoubleValue(this.txtFrequency));
wj.setDampingRatio(ControlUtilities.getDoubleValue(this.txtRatio));
wj.setReferenceAngle(Math.toRadians(ControlUtilities.getDoubleValue(this.txtReferenceAngle)));
return wj;
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.panels.InputPanel#isValidInput()
*/
@Override
public boolean isValidInput() {
// must have some name
String name = this.txtName.getText();
if (name == null || name.isEmpty()) {
return false;
}
// they can't be the same body
if (this.cmbBody1.getSelectedItem() == this.cmbBody2.getSelectedItem()) {
return false;
}
// check the damping ratio
double dr = ControlUtilities.getDoubleValue(this.txtRatio);
if (dr < 0.0 || dr > 1.0) {
return false;
}
// check the frequency
double f = ControlUtilities.getDoubleValue(this.txtFrequency);
if (f < 0.0) {
return false;
}
return true;
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.panels.InputPanel#showInvalidInputMessage(java.awt.Window)
*/
@Override
public void showInvalidInputMessage(Window owner) {
String name = this.txtName.getText();
if (name == null || name.isEmpty()) {
JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.missingName"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
}
// they can't be the same body
if (this.cmbBody1.getSelectedItem() == this.cmbBody2.getSelectedItem()) {
JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.sameBody"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
}
// check the damping ratio
double dr = ControlUtilities.getDoubleValue(this.txtRatio);
if (dr < 0.0 || dr > 1.0) {
JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.invalidDampingRatio"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
}
// check the frequency
double f = ControlUtilities.getDoubleValue(this.txtFrequency);
if (f < 0.0) {
JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.invalidFrequency"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE);
}
}
}
| {'content_hash': '521c0196ad755a0ae4a85206e53b0771', 'timestamp': '', 'source': 'github', 'line_count': 472, 'max_line_length': 188, 'avg_line_length': 39.95974576271186, 'alnum_prop': 0.7571178622554477, 'repo_name': 'dmitrykolesnikovich/dyn4j', 'id': '0b02d3517e203ca5ef4f1056454957d3f3302436', 'size': '20458', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'sandbox/src/org/dyn4j/sandbox/panels/WeldJointPanel.java', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'HTML', 'bytes': '53251'}, {'name': 'Java', 'bytes': '4204231'}]} |
#ifndef OGLR_RENDERER_SHADOW_SHADOWRENDERER_H_
#define OGLR_RENDERER_SHADOW_SHADOWRENDERER_H_
#include "renderer/IRenderer.h"
#include "opengl/texture/FrameBuffer.h"
#include "opengl/texture/Texture2D.h"
#include "opengl/shader/ShaderProgram.h"
#include <glm/glm.hpp>
#include <memory>
#include <vector>
namespace oglr {
class ShadowRenderer : public IRenderer
{
public:
ShadowRenderer();
void render(OpenglContext& context, SceneView& view, Scene& scene) override;
private:
/** The depth framebuffer used to render the shadowmap. */
FrameBuffer m_depthFramebuffer;
Texture2D m_depthTexture;
/** A depth only shader to fill the shadowmap. */
std::unique_ptr<ShaderProgram> m_depthOnlyShader;
};
}
#endif // OGLR_RENDERER_SHADOW_SHADOWRENDERER_H_ | {'content_hash': 'd5b8a12674efcbfead79e33da5820a78', 'timestamp': '', 'source': 'github', 'line_count': 36, 'max_line_length': 77, 'avg_line_length': 21.72222222222222, 'alnum_prop': 0.7429667519181585, 'repo_name': 'mlaveaux/OpenGLRenderer', 'id': 'dfc767173e28c16a88afb3058c3f84d71ccc9f84', 'size': '1380', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/src/renderer/shadow/ShadowRenderer.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '21973'}, {'name': 'C++', 'bytes': '180003'}, {'name': 'CMake', 'bytes': '13658'}, {'name': 'GLSL', 'bytes': '16422'}]} |
package org.innovateuk.ifs.workflow.audit;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.innovateuk.ifs.commons.util.AuditableEntity;
import org.innovateuk.ifs.workflow.domain.Process;
import javax.persistence.*;
/**
* Records a {@link Process} state change.
*
* @see AuditableEntity
* @see Process
*/
@Entity
public class ProcessHistory extends AuditableEntity {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.PERSIST, optional = false)
@JoinColumn(name="process_id", referencedColumnName = "id")
private Process process;
private String processStateName;
public ProcessHistory() {
this.process = null;
this.processStateName = null;
}
public ProcessHistory(Process process) {
if (process == null) throw new NullPointerException("process cannot be null");
this.process = process;
this.processStateName = process.getProcessState().getStateName();
}
public Process getProcess() {
return process;
}
public void setProcess(Process process) {
this.process = process;
}
String getProcessStateName() {
return processStateName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ProcessHistory that = (ProcessHistory) o;
return new EqualsBuilder()
.append(id, that.id)
.append(process, that.process)
.append(processStateName, that.processStateName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(id)
.append(process)
.append(processStateName)
.toHashCode();
}
} | {'content_hash': '25fdd493181ce5460282825c070d1172', 'timestamp': '', 'source': 'github', 'line_count': 76, 'max_line_length': 87, 'avg_line_length': 26.06578947368421, 'alnum_prop': 0.6390711761736497, 'repo_name': 'InnovateUKGitHub/innovation-funding-service', 'id': 'fc4e48d980edd4959dbbb5e27c6d06f1cfe6afcb', 'size': '1981', 'binary': False, 'copies': '1', 'ref': 'refs/heads/development', 'path': 'ifs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/workflow/audit/ProcessHistory.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Dockerfile', 'bytes': '1972'}, {'name': 'HTML', 'bytes': '6342985'}, {'name': 'Java', 'bytes': '26591674'}, {'name': 'JavaScript', 'bytes': '269444'}, {'name': 'Python', 'bytes': '58983'}, {'name': 'RobotFramework', 'bytes': '3317394'}, {'name': 'SCSS', 'bytes': '100274'}, {'name': 'Shell', 'bytes': '60248'}]} |
layout: post
date: 2017-06-18
title: "Naeem Khan VENICE Short Sleeves Court Train Ballgown"
category: Naeem Khan
tags: [Naeem Khan,Ballgown,Queen Anne,Court Train,Short Sleeves]
---
### Naeem Khan VENICE
Just **$329.99**
### Short Sleeves Court Train Ballgown
<table><tr><td>BRANDS</td><td>Naeem Khan</td></tr><tr><td>Silhouette</td><td>Ballgown</td></tr><tr><td>Neckline</td><td>Queen Anne</td></tr><tr><td>Hemline/Train</td><td>Court Train</td></tr><tr><td>Sleeve</td><td>Short Sleeves</td></tr></table>
<a href="https://www.readybrides.com/en/naeem-khan/9963-naeem-khan-venice.html"><img src="//img.readybrides.com/22772/naeem-khan-venice.jpg" alt="Naeem Khan VENICE" style="width:100%;" /></a>
<!-- break --><a href="https://www.readybrides.com/en/naeem-khan/9963-naeem-khan-venice.html"><img src="//img.readybrides.com/22771/naeem-khan-venice.jpg" alt="Naeem Khan VENICE" style="width:100%;" /></a>
Buy it: [https://www.readybrides.com/en/naeem-khan/9963-naeem-khan-venice.html](https://www.readybrides.com/en/naeem-khan/9963-naeem-khan-venice.html)
| {'content_hash': 'cff582b58378effcea390aa64a1b9979', 'timestamp': '', 'source': 'github', 'line_count': 14, 'max_line_length': 245, 'avg_line_length': 75.64285714285714, 'alnum_prop': 0.7129367327667611, 'repo_name': 'HOLEIN/HOLEIN.github.io', 'id': 'b0f3a5a2c0a155451b0a050e1e4453fa68d32fba', 'size': '1063', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '_posts/2017-06-18-Naeem-Khan-VENICE-Short-Sleeves-Court-Train-Ballgown.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '83876'}, {'name': 'HTML', 'bytes': '14547'}, {'name': 'Ruby', 'bytes': '897'}]} |
'''OpenGL extension VERSION.GLX_1_0
This module customises the behaviour of the
OpenGL.raw.GLX.VERSION.GLX_1_0 to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/VERSION/GLX_1_0.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLX import _types, _glgets
from OpenGL.raw.GLX.VERSION.GLX_1_0 import *
from OpenGL.raw.GLX.VERSION.GLX_1_0 import _EXTENSION_NAME
def glInitGlx10VERSION():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | {'content_hash': '1296138e8a9a259e05b02c4a4fff5d53', 'timestamp': '', 'source': 'github', 'line_count': 23, 'max_line_length': 71, 'avg_line_length': 32.21739130434783, 'alnum_prop': 0.7800269905533064, 'repo_name': 'alexus37/AugmentedRealityChess', 'id': '2ac67d4129201e743bb39a3d9b8d33c12178f1d9', 'size': '741', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GLX/VERSION/GLX_1_0.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '158062'}, {'name': 'C++', 'bytes': '267993'}, {'name': 'CMake', 'bytes': '11319'}, {'name': 'Fortran', 'bytes': '3707'}, {'name': 'Makefile', 'bytes': '14618'}, {'name': 'Python', 'bytes': '12813086'}, {'name': 'Roff', 'bytes': '3310'}, {'name': 'Shell', 'bytes': '3855'}]} |
CONTRIBUTING
============
Reporting a Bug
---------------
Please report bugs!
Before submitting a bug:
- Check the [tracker][1] to see if anyone has already reported the bug.
If your problem definitely looks like a new bug, report it using the
official bug [tracker][1] and follow some basic rules:
- Use the title field to clearly describe the issue;
- Describe the steps needed to reproduce the bug with short code
examples (providing a unit test that illustrates the bug is best);
- Give as much detail as possible about your environment (OS, PHP
version, Symfony version, enabled extensions, ...);
- _(optional)_ Attach a patch.
Submitting a Patch
------------------
### Step 1: Setup your Environment ###
#### Install the Software Stack ####
Before beginning work, setup a friendly environment with the
following software:
- Git;
- PHP version 5.3 or above
#### Configure Git ####
Set up your user information with your real name and a working email
address:
```bash
$ git config --global user.name "Your Name"
$ git config --global user.email [email protected]
```
If you are new to Git, you are highly recommended to read the excellent
and free [ProGit][2] book.
Windows users: when installing Git, the installer will ask what to do
with line endings, and suggests replacing all LF with CRLF. This is the
wrong setting if you wish to contribute! Selecting
the as-is method is your best choice, as Git will convert your line
feeds to the ones in the repository. If you have already installed Git,
you can check the value of this setting by typing:
```bash
$ git config core.autocrlf
```
This will return either "false", "input" or "true"; "true" and "false"
being the wrong values. Change it to "input" by typing:
```bash
$ git config --global core.autocrlf input
```
Replace --global with --local if you want to set it only for the active
repository.
#### Get the Source Code ####
Get the source code:
- Create a [GitHub][3] account and sign in;
- Fork the [repository][4] (click on the "Fork" button);
- After the "forking action" has completed, clone your fork locally
(this will create a `gaufrette-extras-bundle` directory):
```bash
$ git clone [email protected]:USERNAME/gaufrette-extras-bundle-bundle.git
```
- Add the upstream repository as a remote
```bash
$ cd gaufrette-extras-bundle-bundle
$ git remote add upstream git://github.com/course-hero/gaufrette-extras-bundle-bundle.git
```
#### Install All Dependencies Using Composer ####
Install all needed dependencies:
- Install [composer][5];
- Run
```bash
$ cd gaufrette-extras-bundle-bundle
$ php composer.phar install
```
Change `composer.phar` as needed to point to wherever composer was
installed.
### Step 2: Work on your Patch ###
#### The License ####
Before you start, you must know that all patches you are going to submit
must be released under the [Apache v2][6] license.
#### Choose the Right Branch ####
Before working on a patch, you must determine on which branch you need
to work. The branch should be based on the `master` branch if you want
to add a new feature. If you want to fix a bug, use the oldest but
still maintained version where the bug likely happens.
All bug fixes merged into maintenance branches are also merged into more
recent branches on a regular basis. For instance, if you submit a patch
for the `x` branch, the patch will also be applied by the core team on
the `master` branch.
#### Create a Topic Branch ####
Each time you want to work on a patch for a bug or on an enhancement,
create a topic branch:
```bash
$ git checkout -b BRANCH_NAME master
```
Or, if you want to provide a bugfix for the `x` branch, first track the
remote `x` branch locally:
```bash
$ git checkout -t origin/x
```
Then create a new branch off the `x` branch to work on the bugfix:
```bash
$ git checkout -b BRANCH_NAME x
```
Use a descriptive name for your branch (`ticket_xxx` where `xxx` is the
ticket number is a good convention for bug fixes).
The above checkout commands automatically switch the code to the newly
created branch (check the branch you are working on with `git branch`).
#### Work on your Patch ####
Work on the code as much as you want and commit as much as you want; but
keep in mind the following:
- Read about the Symfony [conventions][7] and follow the coding
[standards][8] (use `git diff --check`` to check for trailing spaces
-- also read the tip below);
- Add unit tests to prove that the bug is fixed or that the new feature
actually works;
- Try hard to not break backward compatibility (if you must do so, try
to provide a compatibility layer to support the old way) -- patches
that break backward compatibility have less chance to be merged;
- Do atomic and logically separate commits (use the power of `git
rebase` to have a clean and logical history;
- Squash irrelevant commits that are just about fixing code standards or
fixing typos in your own code;
- Never fix coding standards in some existing code as it makes the code
review more difficult;
- Write good commit messages (see the tip below).
A good commit message is composed of a summary (the first line),
optionally followed by a blank line and a more detailed description.
Use a verb (`fixed`, `added`, ...) to start the summary and don't add a
period at the end. The summary line should be less than 50 characters ideally,
72 characters at maximum. Each line should be wrapped at 72 characters. Here
is an example:
```
Added CONTRIBUTING.md
The CONTRIBUTING.md file contains all of the information that a user or
developer might need to contribute to the project. While it is
primarily written for developers who need technical knowledge about how
to contribute code to the project, it also covers the procedures needed
by regular users who simply integrate it with their
projects.
```
#### Prepare your Patch for Submission ####
When your patch is not about a bug fix (when you add a new feature or
change and existing one, for instance), it must also include the
following:
- An explanation of the changes in the [CHANGELOG][9] file (the `[BC
BREAK]` or the `[DEPRECATION]` prefix must be used when relevant);
- An explanation on how to upgrade an existing application in the
relevant [UPGRADE][10] file if the changes break backward
compatibility.
### Step 3: Submit your Patch ###
Whenever you feel that your patch is ready for submission, follow the
following steps:
#### Rebase your Patch ####
Before submitting your patch, update your branch (needed if it takes you
a while to finish your changes):
```bash
$ git checkout master
$ git fetch upstream
$ git merge upstream/master
$ git checkout BRANCH_NAME
$ git rebase master
```
Replace `master` with the branch you selected previously (e.g. `x`) if
you are working on a bugfix.
When doing the `rebase` command, you might have to fix merge conflicts.
`git status` will show you the _unmerged_ files. Resolve all the
conflicts, then continue the rebase.
```bash
$ git add ... # add resolved files
$ git rebase --continue
```
Check that all tests still pass and push your branch remotely.
```bash
$ git push --force origin BRANCH_NAME
```
#### Make a Pull Request ####
You can now make a pull request on the [gaufrette-extras-bundle][4] GitHub
repository.
Take care to point your pull request towards `gaufrette-extras-bundle:X` if
you want the core team to pull a bugfix based on the `X` branch.
The pull request description must include the following checklist at the
top to ensure that contributions may be reviewed without needless
feedback loops and that your contributions can be included as quickly as possible:
```
| Q | A
| ------------- | ---
| Bug fix? | [yes|no]
| New feature? | [yes|no]
| BC breaks? | [yes|no]
| Deprecations? | [yes|no]
| Tests pass? | [yes|no]
| Fixed tickets | [comma separated list of tickets fixed by the PR]
| License | Apache v2
| Doc PR | [The reference to the documentation PR if any]
```
An example submission could now look as follows:
```
| Q | A
| ------------- | ---
| Bug fix? | yes
| New feature? | no
| BC breaks? | no
| Deprecations? | no
| Tests pass? | yes
| Fixed tickets | #12
| License | Apache v2
| Doc PR |
```
The whole table must be included (do *not* remove lines that you think
are not relevant). For simple typos, minor changes in the PHPDocs, or
changes in translation files, use the shorter version of the checklist:
```
| Q | A
| ------------- | ---
| Fixed tickets | #12
| License | Apache v2
```
Some answers to the questions trigger some more requirements:
- If you answer yes to "Bug fix?", check if the bug is already listed in
the [issue tracker][1] and reference it/them in "Fixed tickets";
- If you answer yes to "New feature?", you must submit a pull request to
the documentation and reference it under the "Doc PR" section;
- If you answer yes to "BC breaks?", the patch must contain updates to
the relevant [CHANGELOG][9] and [UPGRADE][10] files;
- If you answer yes to "Deprecations?", the patch must contain updates
to the relevant [CHANGELOG][9] and [UPGRADE][10] files;
- If you answer no to "Tests pass", you must add an item to a todo-list
with the actions that must be done to fix the tests;
- If the "license" is not Apache v2, just don't submit the pull request
as it won't be accepted anyway.
If some of the previous requirements are not met, create a todo-list and
add relevant items:
```
- [ ] fix the tests as they have not been updated yet
- [ ] submit changes to the documentation
- [ ] document the BC breaks
```
If the code is not finished yet because you don't have time to finish it
or because you want early feedback on your work, add an item to the
todo-list:
```
- [ ] finish the code
- [ ] gather feedback for my changes
```
As long as you have items in the todo-list, please prefix the pull
request title with `[WIP]`.
In the pull request description, give as much detail as possible about
your changes (don't hesitate to give code examples to illustrate your
points). If your pull request is about adding a new feature or
modifying an existing one, explain the rationale for the changes. The
pull request description helps the code review and it serves as a
reference when the code is merged (the pull request description and all
its associated comments are part of the merge commit message).
#### Rework your Patch ####
Based on the feedback on the pull request, you might need to rework your
patch. Before resubmitting the patch, rebase with `upstream/master` or
`upstream/X`, don't merge; and force the push to the origin:
```bash
$ git rebase -f upstream/master
$ git push --force origin BRANCH_NAME
```
When doing a `push --force`, always specify the branch name explicitly
to avoid messing other branches in the repo (`--force` tells Git that
you really want to mess with things so do it carefully).
Often, moderators will ask you to "squash" your commits. This means you
will convert many commits into one commit. To do this, use the rebase
command:
```bash
$ git rebase -i upstream/master
$ git push --force origin BRANCH_NAME
```
After you type this commit, an editor will popup showing a list of
commits:
```
pick 1a31be6 first commit
pick 7fc64b4 second commit
pick 7d33018 third commit
```
To squash all commits into the first one, remove the word `pick` before
the second and third commits, and replace it by the word `squash` or
just `s`. When you save, Git will start rebasing, and if successful,
will ask you to edit the commit message, which by default is a listing
of the commit messages of all the commits. When you are finished,
execute the push command.
[1]: https://github.com/course-hero/gaufrette-extras-bundle/issues
[2]: http://git-scm.com/book
[3]: https://github.com
[4]: https://github.com/course-hero/gaufrette-extras-bundle/
[5]: https://getcomposer.org/download/
[6]: http://www.apache.org/licenses/LICENSE-2.0.txt
[7]: http://symfony.com/doc/current/contributing/code/conventions.html
[8]: http://symfony.com/doc/current/contributing/code/standards.html
[9]: CHANGELOG.md
[10]: UPGRADE.md
| {'content_hash': '3e1d7efcdaf07f6390c6304baaee9ef5', 'timestamp': '', 'source': 'github', 'line_count': 383, 'max_line_length': 89, 'avg_line_length': 31.845953002610965, 'alnum_prop': 0.7264081331474953, 'repo_name': 'course-hero/gaufrette-extras-bundle', 'id': '5c4bbd8baf95860ca08d83b6f1246141393573b8', 'size': '12197', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'CONTRIBUTING.md', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'PHP', 'bytes': '6323'}, {'name': 'Shell', 'bytes': '1036'}]} |
<?php
/**
* @license GPL 2 (http://www.gnu.org/licenses/gpl.html)
*
* @author Esther Brunner <[email protected]>
* @author iDo <[email protected]>
* @author Jens Hyllegaard <[email protected]>
*/
$lang['encoding'] = 'utf-8';
$lang['direction'] = 'ltr';
$lang['menu'] = 'Moderér diskussioner';
$lang['discussion'] = 'Diskussion';
$lang['comment'] = 'Kommentar';
$lang['comments'] = 'Kommentarer';
$lang['nocomments'] = 'Kommentarer';
$lang['url'] = 'Hjemmeside';
$lang['address'] = 'Adresse';
$lang['date'] = 'Dato';
$lang['entercomment'] = 'Skriv din kommentar';
$lang['wikisyntax'] = 'wiki syntax er tilladt';
$lang['older'] = 'Ældre diskussioner';
$lang['newer'] = 'Nyere diskussioner';
$lang['btn_reply'] = 'Svar';
$lang['btn_hide'] = 'Skjul';
$lang['btn_show'] = 'Vis';
$lang['btn_change'] = 'Gem';
$lang['statuschanged'] = 'Diskussionsstatus ændret';
$lang['wordblock'] = 'Din kommentar blev blokeret som spam.';
$lang['moderation'] = 'Din kommentar blev tilføjet og afventer moderators accept.';
$lang['mail_newcomment'] = 'Ny kommentar';
$lang['subscribe'] = 'Tilmeld til kommentarer';
$lang['noguests'] = 'Du kunne skrive en kommentar hvis du var logget ind.';
$lang['status'] = 'Status';
$lang['off'] = 'deaktiveret';
$lang['open'] = 'åben';
$lang['closed'] = 'lukket';
$lang['reply'] = 'Svar';
$lang['replies'] = 'Svar';
$lang['newthread'] = 'Ny tråd';
$lang['toggle_display'] = 'Vis/skjul';
| {'content_hash': '77730c6349225ec12c910da7987a42d6', 'timestamp': '', 'source': 'github', 'line_count': 41, 'max_line_length': 94, 'avg_line_length': 44.926829268292686, 'alnum_prop': 0.501085776330076, 'repo_name': 'qingzi-king/dokuwiki-modern-style', 'id': 'e2cb693598ad96e0a307c7ce4af320d4e1141e5f', 'size': '1848', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/plugins/discussion/lang/da/lang.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '261'}, {'name': 'CSS', 'bytes': '154854'}, {'name': 'HTML', 'bytes': '83046'}, {'name': 'JavaScript', 'bytes': '286242'}, {'name': 'PHP', 'bytes': '6865974'}, {'name': 'Shell', 'bytes': '2681'}]} |
package com.zfxf.douniu.activity.askstock;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.text.TextUtils;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.zfxf.douniu.R;
import com.zfxf.douniu.activity.ActivityAdvisorHome;
import com.zfxf.douniu.bean.IndexResult;
import com.zfxf.douniu.internet.NewsInternetRequest;
import com.zfxf.douniu.utils.CommonUtils;
import com.zfxf.douniu.utils.Constants;
import com.zfxf.douniu.utils.SpTools;
import butterknife.BindView;
import butterknife.ButterKnife;
import jp.wasabeef.glide.transformations.CropCircleTransformation;
/**
* @author IMXU
* @time 2017/5/3 13:39
* @des 微问答 回答详情
* 邮箱:[email protected]
*
*/
public class ActivityAnswerDetail extends FragmentActivity implements View.OnClickListener{
@BindView(R.id.iv_base_back)
ImageView back;
@BindView(R.id.iv_base_edit)
ImageView edit;
@BindView(R.id.iv_base_share)
ImageView share;
@BindView(R.id.tv_base_title)
TextView title;
@BindView(R.id.tv_answer_content)
TextView content;//提问内容
@BindView(R.id.tv_answer_time)
TextView askTime;//提问时间
@BindView(R.id.tv_answer_name)
TextView name;//回答者姓名
@BindView(R.id.tv_answer_answer_time)
TextView answerTime;//回答时间
@BindView(R.id.tv_answer_zan)
TextView zan;//点赞数量
@BindView(R.id.tv_answer_detail_answer_content)
TextView answer_content;//回答内容
@BindView(R.id.tv_answer_detail_homepage)
TextView homepage;//主页
@BindView(R.id.tv_answer_detail_ask)
TextView ask;//问股
@BindView(R.id.ll_answer_zan)
LinearLayout ll_zan;//问股
@BindView(R.id.iv_answer_detail_img)
ImageView mImageView;//回答者头像
private String mId;
private String sx_id;
private String sx_fee;
private String mIsZan;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_answer_detail);
ButterKnife.bind(this);
title.setText("回答详情");
edit.setVisibility(View.INVISIBLE);
share.setVisibility(View.INVISIBLE);
mId = getIntent().getStringExtra("id");
initdata();
initListener();
}
private void initdata() {
visitInternet();
}
private void visitInternet() {
CommonUtils.showProgressDialog(this,"加载中……");
NewsInternetRequest.getAnswerDetailInformation(mId, new NewsInternetRequest.ForResultAnswerIndexListener() {
@Override
public void onResponseMessage(IndexResult result) {
content.setText(result.context_info.zc_context);
askTime.setText(result.context_info.zc_response_date);
Glide.with(ActivityAnswerDetail.this).load(result.context_info.sx_url)
.bitmapTransform(new CropCircleTransformation(ActivityAnswerDetail.this))
.placeholder(R.drawable.home_adviosr_img).into(mImageView);
name.setText(result.context_info.sx_ud_nickname);
answerTime.setText(result.context_info.sx_answer_date);
zan.setText("赞("+result.context_info.sx_count+")");
answer_content.setText(result.context_info.sx_pl);
sx_id = result.context_info.sx_ub_id;
sx_fee = result.context_info.sx_fee;
mIsZan = result.context_info.is_zan;
SpTools.setBoolean(ActivityAnswerDetail.this, Constants.read,true);
CommonUtils.dismissProgressDialog();
}
});
}
private void initListener() {
back.setOnClickListener(this);
homepage.setOnClickListener(this);
ask.setOnClickListener(this);
ll_zan.setOnClickListener(this);
}
Intent intent;
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.iv_base_back:
finishAll();
finish();
break;
case R.id.tv_answer_detail_homepage:
intent = new Intent(CommonUtils.getContext(), ActivityAdvisorHome.class);
intent.putExtra("id",Integer.parseInt(sx_id));
startActivity(intent);
overridePendingTransition(0,0);
break;
case R.id.ll_answer_zan:
if(mIsZan.equals("0")){//点赞并提交服务器
NewsInternetRequest.dianZanAnswer(Integer.parseInt(mId),new NewsInternetRequest.ForResultListener() {
@Override
public void onResponseMessage(String count) {
if(!TextUtils.isEmpty(count)){
zan.setText("赞("+count+")");
mIsZan = "1";
}
}
});
}else{
CommonUtils.toastMessage("您已经点过赞了");
}
break;
case R.id.tv_answer_detail_ask:
intent = new Intent(CommonUtils.getContext(), ActivityAskStock.class);
intent.putExtra("name",name.getText().toString());
intent.putExtra("fee",sx_fee);
intent.putExtra("sx_id",sx_id);
startActivity(intent);
overridePendingTransition(0,0);
break;
}
intent = null;
}
private void finishAll() {
}
@Override
public void onBackPressed() {
super.onBackPressed();
CommonUtils.dismissProgressDialog();
}
}
| {'content_hash': '5615dc6f0dc9a9f77433d8ab1d1ec6ee', 'timestamp': '', 'source': 'github', 'line_count': 167, 'max_line_length': 121, 'avg_line_length': 34.50898203592814, 'alnum_prop': 0.6135693215339233, 'repo_name': 'chsimon99/myDouNiu', 'id': '43dd3e187b7744ba38ae95d4c89d27311221cf09', 'size': '5907', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/src/main/java/com/zfxf/douniu/activity/askstock/ActivityAnswerDetail.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '1951660'}]} |
module Permissify
module Generators
class ViewsGenerator < Rails::Generators::Base
source_root File.expand_path('../template', __FILE__)
def generate_views
copy_file "permissions_helper.rb", "app/helpers/permissions_helper.rb"
copy_file "roles_helper.rb", "app/helpers/roles_helper.rb"
directory "permissions", "app/views/permissions"
directory "roles", "app/views/roles"
end
end
end
end
| {'content_hash': '222f621f680e46e2b41b454bd585a554', 'timestamp': '', 'source': 'github', 'line_count': 14, 'max_line_length': 78, 'avg_line_length': 32.214285714285715, 'alnum_prop': 0.6651884700665188, 'repo_name': 'rickfix/permissify', 'id': '47b7090446c384dc6d2ae2a9c5f2931339d876b3', 'size': '451', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/generators/permissify/views/views_generator.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Ruby', 'bytes': '60703'}]} |
[](https://ci.appveyor.com/project/supergibbs/automaticsharp/branch/master)
[](https://www.nuget.org/packages/AutomaticSharp)
# AutomaticSharp
A C# client for Automatic's REST API.
## Usage
### Authentication
The [Automatic OAuth2 Workflow](https://developer.automatic.com/api-reference/#oauth-workflow) can be completed using the `Microsoft.AspNet.Authentication.OAuth` framework.
In a modern webapp you can add this to your `startup.cs`
```c#
app.UseAutomaticAuthentication(options =>
{
//Add Automatic API key
options.ClientId = Configuration["automatic:clientid"];
options.ClientSecret = Configuration["automatic:clientsecret"];
//Add desired scopes
options.AddScope(AutomaticScope.Public);
options.AddScope(AutomaticScope.UserProfile);
options.AddScope(AutomaticScope.Location);
options.AddScope(AutomaticScope.VehicleEvents);
options.AddScope(AutomaticScope.VehicleProfile);
options.AddScope(AutomaticScope.Trip);
options.AddScope(AutomaticScope.Behavior);
});
```
### Getting Data
Using the Client you can query for data from Automatic's REST API. More examples can be found in the demo application which can be [viewed here](http://automaticsharp.azurewebsites.net/).
```c#
var client = new Client(access_token);
var vehicles = (await client.GetVehiclesAsync()).Results;
```
| {'content_hash': '42da4f2a980def0512fbaf888e93532f', 'timestamp': '', 'source': 'github', 'line_count': 39, 'max_line_length': 187, 'avg_line_length': 40.48717948717949, 'alnum_prop': 0.7232425585813806, 'repo_name': 'TrueCar/AutomaticSharp', 'id': '8eca30bc25e1d4b37cee53cb7ad25ae047f6f20f', 'size': '1579', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C#', 'bytes': '82941'}, {'name': 'CSS', 'bytes': '289'}, {'name': 'JavaScript', 'bytes': '2882'}, {'name': 'PowerShell', 'bytes': '468'}]} |
package org.kaaproject.kaa.client.persistence;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class FilePersistentStorage implements PersistentStorage {
@Override
public InputStream openForRead(String path) throws IOException {
File f = new File(path);
return new FileInputStream(f);
}
@Override
public OutputStream openForWrite(String path) throws IOException {
File f = new File(path);
if (f.getParentFile() != null && !f.getParentFile().exists()) {
f.getParentFile().mkdirs();
}
return new FileOutputStream(f);
}
@Override
public boolean exists(String path) {
File f = new File(path);
return f.exists();
}
@Override
public boolean renameTo(String oldPath, String newPath) throws IOException {
return new File(oldPath).renameTo(new File(newPath));
}
}
| {'content_hash': '3825f2ff0caa6635af3c31783e878ccf', 'timestamp': '', 'source': 'github', 'line_count': 40, 'max_line_length': 80, 'avg_line_length': 25.65, 'alnum_prop': 0.6734892787524367, 'repo_name': 'vzhukovskyi/kaa', 'id': 'c68366871d53bb945abab0048c40c933f4a88613', 'size': '1627', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'client/client-multi/client-java-core/src/main/java/org/kaaproject/kaa/client/persistence/FilePersistentStorage.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Arduino', 'bytes': '22520'}, {'name': 'C', 'bytes': '1018980'}, {'name': 'C++', 'bytes': '1255365'}, {'name': 'CMake', 'bytes': '54170'}, {'name': 'CSS', 'bytes': '18207'}, {'name': 'HTML', 'bytes': '4788'}, {'name': 'Java', 'bytes': '13789776'}, {'name': 'Makefile', 'bytes': '1467'}, {'name': 'Python', 'bytes': '128276'}, {'name': 'Shell', 'bytes': '153256'}, {'name': 'Thrift', 'bytes': '20997'}, {'name': 'XSLT', 'bytes': '4062'}]} |
namespace musik {
namespace cube {
class ReassignHotkeyOverlay:
public cursespp::OverlayBase,
public sigslot::has_slots<>
{
public:
using Callback = std::function<void(std::string)>;
static void Show(Hotkeys::Id id, Callback callback);
virtual void Layout();
virtual bool KeyPress(const std::string& key);
private:
ReassignHotkeyOverlay(Hotkeys::Id id, Callback callback);
void RecalculateSize();
void InitViews();
Hotkeys::Id id;
Callback callback;
int width, height, x, y;
std::shared_ptr<cursespp::TextLabel> titleLabel, hotkeyLabel;
std::shared_ptr<cursespp::TextInput> hotkeyInput;
std::shared_ptr<cursespp::ShortcutsWindow> shortcuts;
};
}
}
| {'content_hash': '8101fea419fdbb6aba220cf2e0fd8f4d', 'timestamp': '', 'source': 'github', 'line_count': 29, 'max_line_length': 73, 'avg_line_length': 30.24137931034483, 'alnum_prop': 0.5701254275940707, 'repo_name': 'clangen/musikcube', 'id': '98d100fef73f26537ae74d4864229208dea14dab', 'size': '2839', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/musikcube/app/overlay/ReassignHotkeyOverlay.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Batchfile', 'bytes': '3733'}, {'name': 'C', 'bytes': '1105752'}, {'name': 'C++', 'bytes': '6345032'}, {'name': 'CMake', 'bytes': '99578'}, {'name': 'CSS', 'bytes': '5821'}, {'name': 'HTML', 'bytes': '1512'}, {'name': 'Java', 'bytes': '3869'}, {'name': 'JavaScript', 'bytes': '9001'}, {'name': 'Kotlin', 'bytes': '537477'}, {'name': 'Objective-C', 'bytes': '14694'}, {'name': 'Objective-C++', 'bytes': '7124'}, {'name': 'Python', 'bytes': '946'}, {'name': 'Shell', 'bytes': '43348'}]} |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
use kartik\tabs\TabsX;
/* @var $this yii\web\View */
/* @var $model app\models\Responsables */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="container">
<?php $form = ActiveForm::begin(); ?>
<div class="tabbable">
<ul class="nav nav-tabs" id="myTab">
<li class="active">
<a data-toggle="tab" href="#home">
<i class="green ace-icon fa fa-cog "></i>
Datos Principales
</a>
</li>
<li>
<a data-toggle="tab" href="#messages">
<i class="blue ace-icon fa fa-code-fork "></i>
Datos de Adscripcion
</a>
</li>
</ul>
<div class="tab-content">
<div id="home" class="tab-pane fade in active">
<?php
echo Yii::$app->controller->renderPartial('basicos',['model'=>$model,'form'=>$form]);
?>
</div>
<div id="messages" class="tab-pane fade">
<?php
echo Yii::$app->controller->renderPartial('adicionales',['model'=>$model,'form'=>$form]);
?>
</div>
</div>
</div>
<div class="form-group">
<?= Html::submitButton('<i class="ace-icon fa fa-floppy-o bigger-120 blue"></i> Guardar', ['class' => 'btn btn-white btn-info btn-bold']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| {'content_hash': 'aac016cf600d872400eabceaca232304', 'timestamp': '', 'source': 'github', 'line_count': 69, 'max_line_length': 148, 'avg_line_length': 25.985507246376812, 'alnum_prop': 0.4026770775237033, 'repo_name': 'hernangutier/bm', 'id': '6cc7f068e1e7d114d3846e3643af1fba44587580', 'size': '1793', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'views/responsables/_form.php', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Batchfile', 'bytes': '1030'}, {'name': 'HTML', 'bytes': '3595'}, {'name': 'JavaScript', 'bytes': '583185'}, {'name': 'PHP', 'bytes': '821650'}]} |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.docgen;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.docgen.annot.DocCategory;
import com.google.devtools.build.docgen.starlark.StarlarkBuiltinDoc;
import com.google.devtools.build.docgen.starlark.StarlarkDocUtils;
import com.google.devtools.build.docgen.starlark.StarlarkMethodDoc;
import com.google.devtools.build.lib.util.Classpath.ClassPathException;
import java.io.File;
import java.io.IOException;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
import net.starlark.java.annot.StarlarkBuiltin;
/** A class to assemble documentation for Starlark. */
public final class StarlarkDocumentationProcessor {
private static final ImmutableList<Category> GLOBAL_CATEGORIES =
ImmutableList.<Category>of(Category.NONE, Category.TOP_LEVEL_TYPE);
private StarlarkDocumentationProcessor() {}
/** Generates the Starlark documentation to the given output directory. */
public static void generateDocumentation(String outputDir, String... args)
throws IOException, ClassPathException {
parseOptions(args);
Map<String, StarlarkBuiltinDoc> modules =
new TreeMap<>(StarlarkDocumentationCollector.getAllModules());
// Generate the top level module first in the doc
StarlarkBuiltinDoc topLevelModule =
modules.remove(StarlarkDocumentationCollector.getTopLevelModule().name());
writePage(outputDir, topLevelModule);
// Use a LinkedHashMap to preserve ordering of categories, as the output iterates over
// this map's entry set to determine category ordering.
Map<Category, List<StarlarkBuiltinDoc>> modulesByCategory = new LinkedHashMap<>();
for (Category c : Category.values()) {
modulesByCategory.put(c, new ArrayList<StarlarkBuiltinDoc>());
}
modulesByCategory.get(Category.of(topLevelModule.getAnnotation())).add(topLevelModule);
for (StarlarkBuiltinDoc module : modules.values()) {
if (module.getAnnotation().documented()) {
writePage(outputDir, module);
modulesByCategory.get(Category.of(module.getAnnotation())).add(module);
}
}
Collator us = Collator.getInstance(Locale.US);
for (List<StarlarkBuiltinDoc> module : modulesByCategory.values()) {
Collections.sort(module, (doc1, doc2) -> us.compare(doc1.getTitle(), doc2.getTitle()));
}
writeCategoryPage(Category.CORE, outputDir, modulesByCategory);
writeCategoryPage(Category.CONFIGURATION_FRAGMENT, outputDir, modulesByCategory);
writeCategoryPage(Category.BUILTIN, outputDir, modulesByCategory);
writeCategoryPage(Category.PROVIDER, outputDir, modulesByCategory);
writeNavPage(outputDir, modulesByCategory.get(Category.TOP_LEVEL_TYPE));
// In the code, there are two StarlarkModuleCategory instances that have no heading:
// TOP_LEVEL_TYPE and NONE.
// TOP_LEVEL_TYPE also contains the "global" module.
// We remove both categories and the "global" module from the map and display them manually:
// - Methods in the "global" module are displayed under "Global Methods and Constants".
// - Modules in both categories are displayed under "Global Modules" (except for the global
// module itself).
List<String> globalFunctions = new ArrayList<>();
List<String> globalConstants = new ArrayList<>();
StarlarkBuiltinDoc globalModule = findGlobalModule(modulesByCategory);
for (StarlarkMethodDoc method : globalModule.getMethods()) {
if (method.documented()) {
if (method.isCallable()) {
globalFunctions.add(method.getName());
} else {
globalConstants.add(method.getName());
}
}
}
List<StarlarkBuiltinDoc> globalModules = new ArrayList<>();
for (Category globalCategory : GLOBAL_CATEGORIES) {
for (StarlarkBuiltinDoc module : modulesByCategory.remove(globalCategory)) {
if (!module.getName().equals(globalModule.getName())) {
globalModules.add(module);
}
}
}
Collections.sort(globalModules, (doc1, doc2) -> us.compare(doc1.getName(), doc2.getName()));
writeOverviewPage(
outputDir,
globalModule.getName(),
globalFunctions,
globalConstants,
globalModules,
modulesByCategory);
}
private static StarlarkBuiltinDoc findGlobalModule(
Map<Category, List<StarlarkBuiltinDoc>> modulesByCategory) {
List<StarlarkBuiltinDoc> topLevelModules = modulesByCategory.get(Category.TOP_LEVEL_TYPE);
String globalModuleName = StarlarkDocumentationCollector.getTopLevelModule().name();
for (StarlarkBuiltinDoc module : topLevelModules) {
if (module.getName().equals(globalModuleName)) {
return module;
}
}
throw new IllegalStateException("No globals module in the top level category.");
}
private static void writePage(String outputDir, StarlarkBuiltinDoc module) throws IOException {
File starlarkDocPath = new File(outputDir + "/" + module.getName() + ".html");
Page page = TemplateEngine.newPage(DocgenConsts.STARLARK_LIBRARY_TEMPLATE);
page.add("module", module);
page.write(starlarkDocPath);
}
private static void writeCategoryPage(
Category category, String outputDir, Map<Category, List<StarlarkBuiltinDoc>> modules)
throws IOException {
File starlarkDocPath =
new File(String.format("%s/skylark-%s.html", outputDir, category.getTemplateIdentifier()));
Page page = TemplateEngine.newPage(DocgenConsts.STARLARK_MODULE_CATEGORY_TEMPLATE);
page.add("category", category);
page.add("modules", modules.get(category));
page.add("description", StarlarkDocUtils.substituteVariables(category.description));
page.write(starlarkDocPath);
}
private static void writeNavPage(String outputDir, List<StarlarkBuiltinDoc> navModules)
throws IOException {
File navFile = new File(outputDir + "/skylark-nav.html");
Page page = TemplateEngine.newPage(DocgenConsts.STARLARK_NAV_TEMPLATE);
page.add("modules", navModules);
page.write(navFile);
}
private static void writeOverviewPage(
String outputDir,
String globalModuleName,
List<String> globalFunctions,
List<String> globalConstants,
List<StarlarkBuiltinDoc> globalModules,
Map<Category, List<StarlarkBuiltinDoc>> modulesPerCategory)
throws IOException {
File starlarkDocPath = new File(outputDir + "/skylark-overview.html");
Page page = TemplateEngine.newPage(DocgenConsts.STARLARK_OVERVIEW_TEMPLATE);
page.add("global_name", globalModuleName);
page.add("global_functions", globalFunctions);
page.add("global_constants", globalConstants);
page.add("global_modules", globalModules);
page.add("modules", modulesPerCategory);
page.write(starlarkDocPath);
}
private static void parseOptions(String... args) {
for (String arg : args) {
if (arg.startsWith("--be_root=")) {
DocgenConsts.BeDocsRoot = arg.split("--be_root=", 2)[1];
}
if (arg.startsWith("--doc_extension=")) {
DocgenConsts.documentationExtension = arg.split("--doc_extension=", 2)[1];
}
}
}
/**
* An enumeration of categories used to organize the API index. Instances of this class are
* accessed by templates, using reflection.
*/
public enum Category {
CONFIGURATION_FRAGMENT(
"Configuration Fragments",
"Configuration fragments give rules access to "
+ "language-specific parts of <a href=\"configuration.html\">"
+ "configuration</a>. "
+ "<p>Rule implementations can get them using "
+ "<code><a href=\"ctx.html#fragments\">ctx."
+ "fragments</a>.<i>[fragment name]</i></code>"),
PROVIDER(
"Providers",
"This section lists providers available on built-in rules. See the "
+ "<a href='../rules.$DOC_EXT#providers'>Rules page</a> for more on providers."),
BUILTIN("Built-in Types", "This section lists types of Starlark objects."),
// Used for top-level modules of functions in the global namespace. Such modules will always
// be usable solely by accessing their members, via modulename.funcname() or
// modulename.constantname.
// Examples: attr, cc_common, config, java_common
TOP_LEVEL_TYPE(null, null),
CORE(
"Core Starlark data types",
"This section lists the data types of the <a"
+ " href='https://github.com/bazelbuild/starlark/blob/master/spec.md#built-in-constants-and-functions'>Starlark"
+ " core language</a>."),
// Legacy uncategorized type; these are treated like TOP_LEVEL_TYPE in documentation.
NONE(null, null);
// Maps (essentially free-form) strings in annotations to permitted categories.
static Category of(StarlarkBuiltin annot) {
switch (annot.category()) {
case DocCategory.CONFIGURATION_FRAGMENT:
return CONFIGURATION_FRAGMENT;
case DocCategory.PROVIDER:
return PROVIDER;
case DocCategory.BUILTIN:
return BUILTIN;
case DocCategory.TOP_LEVEL_TYPE:
return TOP_LEVEL_TYPE;
case DocCategory.NONE:
return NONE;
case "core": // interpreter built-ins (e.g. int)
return CORE;
case "": // no annotation
return TOP_LEVEL_TYPE;
default:
throw new IllegalStateException(
String.format(
"docgen does not recognize DocCategory '%s' for StarlarkBuiltin '%s'",
annot.category(), annot.name()));
}
}
private Category(String title, String description) {
this.title = title;
this.description = description;
}
private final String title;
private final String description;
public String getTitle() {
return title;
}
public String getDescription() {
return description;
}
public String getTemplateIdentifier() {
return name().toLowerCase().replace("_", "-");
}
}
}
| {'content_hash': 'a91ee32b0e4b209c28b61fbba30890ba', 'timestamp': '', 'source': 'github', 'line_count': 270, 'max_line_length': 124, 'avg_line_length': 40.022222222222226, 'alnum_prop': 0.69702017397742, 'repo_name': 'davidzchen/bazel', 'id': '49ebafe1af5d8f70b2e69b79b82dc7818ad32ee5', 'size': '10806', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/google/devtools/build/docgen/StarlarkDocumentationProcessor.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '2997'}, {'name': 'C', 'bytes': '25475'}, {'name': 'C++', 'bytes': '1545155'}, {'name': 'Dockerfile', 'bytes': '1195'}, {'name': 'HTML', 'bytes': '21431'}, {'name': 'Java', 'bytes': '36685603'}, {'name': 'Makefile', 'bytes': '248'}, {'name': 'Objective-C', 'bytes': '10369'}, {'name': 'Objective-C++', 'bytes': '1043'}, {'name': 'PowerShell', 'bytes': '15431'}, {'name': 'Python', 'bytes': '2588113'}, {'name': 'Ruby', 'bytes': '152'}, {'name': 'Shell', 'bytes': '2062992'}, {'name': 'Smarty', 'bytes': '18679'}, {'name': 'Starlark', 'bytes': '45680'}]} |
/**
* Copyright 2015 StreamSets Inc.
*
* Licensed under the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.util;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public interface SystemProcess {
public void start() throws IOException;
public void start(Map<String, String> env) throws IOException;
public String getCommand();
public boolean isAlive();
public void cleanup();
public Collection<String> getAllOutput();
public Collection<String> getAllError();
public Collection<String> getOutput();
public Collection<String> getError();
public void kill(long timeoutBeforeForceKill) ;
public int exitValue();
public boolean waitFor(long timeout, TimeUnit unit);
}
| {'content_hash': 'eabd65a6210ac0531bc3a241c57e81cc', 'timestamp': '', 'source': 'github', 'line_count': 53, 'max_line_length': 75, 'avg_line_length': 29.07547169811321, 'alnum_prop': 0.754055807916937, 'repo_name': 'z123/datacollector', 'id': '9ff3e74ebaf0850e91302b6ee7ccdab894c805d5', 'size': '2139', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'bootstrap/src/main/java/com/streamsets/pipeline/util/SystemProcess.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ANTLR', 'bytes': '89702'}, {'name': 'CSS', 'bytes': '124298'}, {'name': 'Groovy', 'bytes': '11876'}, {'name': 'HTML', 'bytes': '494482'}, {'name': 'Java', 'bytes': '17299310'}, {'name': 'JavaScript', 'bytes': '1013775'}, {'name': 'Protocol Buffer', 'bytes': '3463'}, {'name': 'Python', 'bytes': '16912'}, {'name': 'Scala', 'bytes': '6805'}, {'name': 'Shell', 'bytes': '28783'}]} |
(function(factory) {
if (typeof module !== 'undefined' && module && module.exports) {
factory(require('assert'), require('../tokenizer.js'));
} else if (typeof define === 'function' && define.amd) { // Require.js & AMD
define([ 'chai', 'retoken'], function(chai, retoken) {
factory(chai.assert, retoken);
});
} else {
factory(window.assert, window.retoken);
mocha.checkLeaks();
mocha.run();
}
})(function(assert, tokenizer) {
describe('--', function() {
describe('.toArray', function() {
it('should convert the tokenizer to an array', function() {
var tk = tokenizer(' ').push('a b c').extract(2);
var arr = tk.toArray();
assert.ok(arr instanceof Array);
assert.equal(arr.length, tk.length);
assert.equal(arr[0],'a');
assert.equal(arr[1], 'b');
assert.equal(arr[2], 'c');
})
});
})
});
| {'content_hash': '4b5d73a4472d9d90bd06fccf657d8ded', 'timestamp': '', 'source': 'github', 'line_count': 38, 'max_line_length': 79, 'avg_line_length': 24.789473684210527, 'alnum_prop': 0.5445859872611465, 'repo_name': 'dicksont/retoken', 'id': 'ff1a655d615cdce6b7f3c2728bde4d01fa7f181b', 'size': '2069', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'test/test_toarray.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '4014'}, {'name': 'JavaScript', 'bytes': '37368'}]} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>coqeal: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.6.1 / coqeal - 1.0.2</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
coqeal
<small>
1.0.2
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-10-26 02:16:09 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-26 02:16:09 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-ocamlbuild base OCamlbuild binary and libraries distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.6.1 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.02.3 The OCaml compiler (virtual package)
ocaml-base-compiler 4.02.3 Official 4.02.3 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "Cyril Cohen <[email protected]>"
homepage: "https://github.com/coq-community/coqeal"
dev-repo: "git+https://github.com/coq-community/coqeal.git"
bug-reports: "https://github.com/coq-community/coqeal/issues"
license: "MIT"
synopsis: "CoqEAL - The Coq Effective Algebra Library"
description: """
This Coq library contains a subset of the work that was developed in the context
of the ForMath EU FP7 project (2009-2013). It has two parts:
- theory, which contains developments in algebra and optimized algorithms on mathcomp data structures.
- refinements, which is a framework to ease change of data representations during a proof."""
build: [make "-j%{jobs}%"]
install: [make "install"]
depends: [
"coq" {>= "8.7" & < "8.12~"}
"coq-bignums" {>= "8.7" & < "8.12~"}
"coq-paramcoq" {>= "1.1.1"}
"coq-mathcomp-multinomials" {>= "1.4" & < "1.5~"}
"coq-mathcomp-algebra" {>= "1.9.0" & < "1.10~"}
]
tags: [
"category:Computer Science/Decision Procedures and Certified Algorithms/Correctness proofs of algorithms"
"keyword:effective algebra"
"keyword:elementary divisor rings"
"keyword:Smith normal form"
"keyword:mathematical components"
"keyword:Bareiss"
"keyword:Karatsuba multiplication"
"keyword:refinements"
"logpath:CoqEAL"
]
authors: [
"Guillaume Cano"
"Cyril Cohen"
"Maxime Dénès"
"Anders Mörtberg"
"Vincent Siles"
]
url {
src: "https://github.com/coq-community/coqeal/archive/refs/tags/1.0.2.tar.gz"
checksum: "sha512=bb9bb39026064ef627c07f2f7aaa2c13cac5d6cbc36efe2eaedff231784d23d3787ecf339bea423a182bbb5f829f55fcd3a8648ffdefdc87f7bb2e3c4cf74aef"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-coqeal.1.0.2 coq.8.6.1</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.6.1).
The following dependencies couldn't be met:
- coq-coqeal -> coq >= 8.7 -> ocaml >= 4.05.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-coqeal.1.0.2</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {'content_hash': '6e339e959e8445d7cc3ae1dca967aa39', 'timestamp': '', 'source': 'github', 'line_count': 187, 'max_line_length': 159, 'avg_line_length': 42.86096256684492, 'alnum_prop': 0.5689332501559575, 'repo_name': 'coq-bench/coq-bench.github.io', 'id': '08495b8743dc5b5f282a6d07a4757bcd123aea33', 'size': '8043', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'clean/Linux-x86_64-4.02.3-2.0.6/released/8.6.1/coqeal/1.0.2.html', 'mode': '33188', 'license': 'mit', 'language': []} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.