text
stringlengths 2
100k
| meta
dict |
---|---|
package pbservice
const (
OK = "OK"
ErrNoKey = "ErrNoKey"
ErrWrongServer = "ErrWrongServer"
)
type Err string
// Put or Append
type PutAppendArgs struct {
Key string
Value string
// You'll have to add definitions here.
// Field names must start with capital letters,
// otherwise RPC will break.
}
type PutAppendReply struct {
Err Err
}
type GetArgs struct {
Key string
// You'll have to add definitions here.
}
type GetReply struct {
Err Err
Value string
}
// Your RPC definitions here.
| {
"pile_set_name": "Github"
} |
<?php defined('SYSPATH') OR die('No direct access allowed.');
$lang['query_methods_not_allowed'] = 'Query methods kunnen niet gebruikt worden via ORM'; | {
"pile_set_name": "Github"
} |
# created by tools/tclZIC.tcl - do not edit
set TZData(:America/Lima) {
{-9223372036854775808 -18492 0 LMT}
{-2524503108 -18516 0 LMT}
{-1938538284 -14400 0 -05}
{-1002052800 -18000 0 -05}
{-986756400 -14400 1 -05}
{-971035200 -18000 0 -05}
{-955306800 -14400 1 -05}
{-939585600 -18000 0 -05}
{512712000 -18000 0 -05}
{544248000 -18000 0 -05}
{638942400 -18000 0 -05}
{765172800 -18000 0 -05}
}
| {
"pile_set_name": "Github"
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Παράρτημα A. Γλώσσες - Κατάλογος με κωδικούς κατά ISO 639</title>
<link rel="stylesheet" type="text/css" href="OmegaT.css">
<meta name="generator" content="DocBook XSL Stylesheets V1.79.1">
<link rel="home" href="index.html" title="OmegaT - Εγχειρίδιο χρήστη">
<link rel="up" href="index.html" title="OmegaT - Εγχειρίδιο χρήστη">
<link rel="prev" href="chapter.misc.html" title="Κεφάλαιο 20. Διάφορα θέματα">
<link rel="next" href="appendix.keyboard.html" title="Παράρτημα B. Συντμεύσεις πληκτρολογίου στο πρόγραμμα επεξεργασίας (editor)">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<div class="navheader">
<table width="100%" summary="Navigation header">
<tr>
<th colspan="3" align="center">Παράρτημα A. Γλώσσες - Κατάλογος με κωδικούς κατά ISO 639
</th>
</tr>
<tr>
<td width="20%" align="left"><a accesskey="p" href="chapter.misc.html">Προηγ</a>
</td>
<th width="60%" align="center"> </th>
<td width="20%" align="right"> <a accesskey="n" href="appendix.keyboard.html">Επόμενο</a></td>
</tr>
</table>
<hr>
</div>
<div class="appendix">
<div class="titlepage">
<div>
<div>
<h1 class="title"><a name="appendix.languages"></a>Παράρτημα A. Γλώσσες - Κατάλογος με κωδικούς κατά ISO 639<a class="indexterm" name="d0e8641"></a>
<a class="indexterm" name="d0e8645"></a>
<a class="indexterm" name="d0e8649"></a>
</h1>
</div>
</div>
</div>
<p>Παρακαλώ ελέγξτε τους<a class="ulink" href="http://www.sil.org/ISO639-3/codes.asp" target="_top"> Πίνακες με τους κωδικούς κατά ISO 639</a> για περισσότερες και πιο πρόσφατες πληροφορίες σχετικά με τους κωδικούς γλώσσας.
</p>
<div class="table"><a name="table.language.code.list"></a><p class="title"><b>Πίνακας A.1. Κατάλογος κωδικών γλώσσας κατά ISO 639-1/639-2</b></p>
<div class="table-contents">
<table class="table" summary="Κατάλογος κωδικών γλώσσας κατά ISO 639-1/639-2" border="1">
<colgroup>
<col align="left">
<col align="left">
<col align="left">
</colgroup>
<thead>
<tr>
<th align="left">Όνομα γλώσσας</th>
<th align="left">ISO 639-1</th>
<th align="left">ISO 639-2</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Αμπχαζιανά</td>
<td align="left">ab</td>
<td align="left">abk</td>
</tr>
<tr>
<td align="left">Αφάρ</td>
<td align="left">aa</td>
<td align="left">aar</td>
</tr>
<tr>
<td align="left">Αφρικάνς, Afrikaans</td>
<td align="left">af</td>
<td align="left">afr</td>
</tr>
<tr>
<td align="left">Ακάν</td>
<td align="left">ak</td>
<td align="left">aka</td>
</tr>
<tr>
<td align="left">Αλβανικά</td>
<td align="left">sq</td>
<td align="left">sqi</td>
</tr>
<tr>
<td align="left">Αμχαρικά</td>
<td align="left">am</td>
<td align="left">amh</td>
</tr>
<tr>
<td align="left">Αραβικά</td>
<td align="left">ar</td>
<td align="left">ara</td>
</tr>
<tr>
<td align="left">Αραγονέζικα</td>
<td align="left">an</td>
<td align="left">arg</td>
</tr>
<tr>
<td align="left">Αρμενικά</td>
<td align="left">hy</td>
<td align="left">hye</td>
</tr>
<tr>
<td align="left">Ασσαμέζικα, Assamese</td>
<td align="left">as</td>
<td align="left">asm</td>
</tr>
<tr>
<td align="left">Αβαρικά, Avaric</td>
<td align="left">av</td>
<td align="left">ava</td>
</tr>
<tr>
<td align="left">Αβεστανικά, Avestan</td>
<td align="left">ae</td>
<td align="left">ave</td>
</tr>
<tr>
<td align="left">Αϋμάρα, Aymara</td>
<td align="left">ay</td>
<td align="left">aym</td>
</tr>
<tr>
<td align="left">Αζέρικα</td>
<td align="left">az</td>
<td align="left">aze</td>
</tr>
<tr>
<td align="left">Μπαμπάρα</td>
<td align="left">bm</td>
<td align="left">bam</td>
</tr>
<tr>
<td align="left">Μπασκίρ, Bashkir</td>
<td align="left">ba</td>
<td align="left">bak</td>
</tr>
<tr>
<td align="left">Βασκικά</td>
<td align="left">eu</td>
<td align="left">eus</td>
</tr>
<tr>
<td align="left">Λευκορωσικά</td>
<td align="left">be</td>
<td align="left">bel</td>
</tr>
<tr>
<td align="left">Μπενγκαλέζικα</td>
<td align="left">bn</td>
<td align="left">ben</td>
</tr>
<tr>
<td align="left">Μπιχάρι, Bihari</td>
<td align="left">bh</td>
<td align="left">bih</td>
</tr>
<tr>
<td align="left">Μπισλάμα</td>
<td align="left">bi</td>
<td align="left">bis</td>
</tr>
<tr>
<td align="left">Βοσνιακά</td>
<td align="left">bs</td>
<td align="left">bos</td>
</tr>
<tr>
<td align="left">Βρετονικά, Breton</td>
<td align="left">br</td>
<td align="left">bre</td>
</tr>
<tr>
<td align="left">Βουλγαρικά</td>
<td align="left">bg</td>
<td align="left">bul</td>
</tr>
<tr>
<td align="left">Μπουρμέζικα, Burmese</td>
<td align="left">my</td>
<td align="left">mya</td>
</tr>
<tr>
<td align="left">Καταλανικά</td>
<td align="left">ca</td>
<td align="left">cat</td>
</tr>
<tr>
<td align="left">Τσαμόρο, Chamorro</td>
<td align="left">ch</td>
<td align="left">cha</td>
</tr>
<tr>
<td align="left">Τσετσενικά</td>
<td align="left">ce</td>
<td align="left">che</td>
</tr>
<tr>
<td align="left">Τσιτσέβα, Τσέβα, Νυάντζα</td>
<td align="left">ny</td>
<td align="left">nya</td>
</tr>
<tr>
<td align="left">Κινεζικά</td>
<td align="left">zh</td>
<td align="left">zho</td>
</tr>
<tr>
<td align="left">Τσουβάς, Chuvash</td>
<td align="left">cv</td>
<td align="left">chv</td>
</tr>
<tr>
<td align="left">Cornish</td>
<td align="left">kw</td>
<td align="left">cor</td>
</tr>
<tr>
<td align="left">Κορσικανικά</td>
<td align="left">co</td>
<td align="left">cos</td>
</tr>
<tr>
<td align="left">Cree</td>
<td align="left">cr</td>
<td align="left">cre</td>
</tr>
<tr>
<td align="left">Κροατικά</td>
<td align="left">hr</td>
<td align="left">hrv</td>
</tr>
<tr>
<td align="left">Τσεχικά</td>
<td align="left">cs</td>
<td align="left">ces</td>
</tr>
<tr>
<td align="left">Δανέζικα</td>
<td align="left">da</td>
<td align="left">dan</td>
</tr>
<tr>
<td align="left">Ντιβέχι, Ντιβέχι, Μαλδιβιανά</td>
<td align="left">dv</td>
<td align="left">div</td>
</tr>
<tr>
<td align="left">Ολανδικά</td>
<td align="left">nl</td>
<td align="left">nld</td>
</tr>
<tr>
<td align="left">Τζόνγκα, Dzongkha</td>
<td align="left">dz</td>
<td align="left">dzo</td>
</tr>
<tr>
<td align="left">Αγγλικά</td>
<td align="left">en</td>
<td align="left">eng</td>
</tr>
<tr>
<td align="left">Εσπεράντο</td>
<td align="left">eo</td>
<td align="left">epo</td>
</tr>
<tr>
<td align="left">Εσθονικά</td>
<td align="left">et</td>
<td align="left">est</td>
</tr>
<tr>
<td align="left">Ewe</td>
<td align="left">ee</td>
<td align="left">ewe</td>
</tr>
<tr>
<td align="left">Faroese</td>
<td align="left">fo</td>
<td align="left">fao</td>
</tr>
<tr>
<td align="left">Fijian</td>
<td align="left">fj</td>
<td align="left">fij</td>
</tr>
<tr>
<td align="left">Φινλανδικά</td>
<td align="left">fi</td>
<td align="left">fin</td>
</tr>
<tr>
<td align="left">Γαλλικά</td>
<td align="left">fr</td>
<td align="left">fra</td>
</tr>
<tr>
<td align="left">Φούλα, Φούλα, Πουλάαρ, Πουλάρ</td>
<td align="left">ff</td>
<td align="left">ful</td>
</tr>
<tr>
<td align="left">Γαλικιανά</td>
<td align="left">gl</td>
<td align="left">glg</td>
</tr>
<tr>
<td align="left">Γεωργιανά</td>
<td align="left">ka</td>
<td align="left">kat</td>
</tr>
<tr>
<td align="left">Γερμανικά</td>
<td align="left">de</td>
<td align="left">deu</td>
</tr>
<tr>
<td align="left">Ελληνικά, Νέα</td>
<td align="left">el</td>
<td align="left">ell</td>
</tr>
<tr>
<td align="left">Γκουαρανί</td>
<td align="left">gn</td>
<td align="left">grn</td>
</tr>
<tr>
<td align="left">Γκουτζαράτι</td>
<td align="left">gu</td>
<td align="left">guj</td>
</tr>
<tr>
<td align="left">Αϊτιανά, Κρεολικά Αίτιανά</td>
<td align="left">ht</td>
<td align="left">hat</td>
</tr>
<tr>
<td align="left">Χάουζα, Hausa</td>
<td align="left">ha</td>
<td align="left">hau</td>
</tr>
<tr>
<td align="left">Εβραϊκά (σύγχρονα)</td>
<td align="left">he</td>
<td align="left">heb</td>
</tr>
<tr>
<td align="left">Χερέρο</td>
<td align="left">hz</td>
<td align="left">her</td>
</tr>
<tr>
<td align="left">Χίντι, Hindi</td>
<td align="left">hi</td>
<td align="left">hin</td>
</tr>
<tr>
<td align="left">Χίρι Μότου</td>
<td align="left">ho</td>
<td align="left">hmo</td>
</tr>
<tr>
<td align="left">Ουγγρικά</td>
<td align="left">hu</td>
<td align="left">hun</td>
</tr>
<tr>
<td align="left">Ιντερλίνγκουα</td>
<td align="left">ia</td>
<td align="left">ina</td>
</tr>
<tr>
<td align="left">Ινδονησιακά</td>
<td align="left">id</td>
<td align="left">ind</td>
</tr>
<tr>
<td align="left">Interlingue</td>
<td align="left">ie</td>
<td align="left">ile</td>
</tr>
<tr>
<td align="left">Ιρλανδικά</td>
<td align="left">ga</td>
<td align="left">gle</td>
</tr>
<tr>
<td align="left">Ίγκμπο</td>
<td align="left">ig</td>
<td align="left">ibo</td>
</tr>
<tr>
<td align="left">Ινούπιακ</td>
<td align="left">ik</td>
<td align="left">ipk</td>
</tr>
<tr>
<td align="left">Ίντο, Ido</td>
<td align="left">io</td>
<td align="left">ido</td>
</tr>
<tr>
<td align="left">Ισλανδικά</td>
<td align="left">is</td>
<td align="left">isl</td>
</tr>
<tr>
<td align="left">Ιταλικά</td>
<td align="left">it</td>
<td align="left">ita</td>
</tr>
<tr>
<td align="left">Ινουκτίτουτ</td>
<td align="left">iu</td>
<td align="left">iku</td>
</tr>
<tr>
<td align="left">Ιαπωνικά</td>
<td align="left">ja</td>
<td align="left">jpn</td>
</tr>
<tr>
<td align="left">Ιαβαϊκά</td>
<td align="left">jv</td>
<td align="left">jav</td>
</tr>
<tr>
<td align="left">Καλάλισουτ, Γροιλανδικά</td>
<td align="left">kl</td>
<td align="left">kal</td>
</tr>
<tr>
<td align="left">Κάναντα, Kannada</td>
<td align="left">kn</td>
<td align="left">kan</td>
</tr>
<tr>
<td align="left">Κανούρι, Kanuri</td>
<td align="left">kr</td>
<td align="left">kau</td>
</tr>
<tr>
<td align="left">Κασμιριανά</td>
<td align="left">ks</td>
<td align="left">kas</td>
</tr>
<tr>
<td align="left">Καζακικά</td>
<td align="left">kk</td>
<td align="left">kaz</td>
</tr>
<tr>
<td align="left">Χμέρ, Khmer</td>
<td align="left">km</td>
<td align="left">khm</td>
</tr>
<tr>
<td align="left">Κικούγιου, Γκικούγιου</td>
<td align="left">ki</td>
<td align="left">kik</td>
</tr>
<tr>
<td align="left">Κινυαργουάντα, Kinyarwanda</td>
<td align="left">rw</td>
<td align="left">kin</td>
</tr>
<tr>
<td align="left">Κιργιζιανά, Κιργιζία</td>
<td align="left">ky</td>
<td align="left">kir</td>
</tr>
<tr>
<td align="left">Κόμι</td>
<td align="left">kv</td>
<td align="left">kom</td>
</tr>
<tr>
<td align="left">Κόγκο</td>
<td align="left">kg</td>
<td align="left">kon</td>
</tr>
<tr>
<td align="left">Κορεατικά</td>
<td align="left">ko</td>
<td align="left">kor</td>
</tr>
<tr>
<td align="left">Κουρδικά</td>
<td align="left">ku</td>
<td align="left">kur</td>
</tr>
<tr>
<td align="left">Κουανιάμα, Κουανιάμα</td>
<td align="left">kj</td>
<td align="left">kua</td>
</tr>
<tr>
<td align="left">Λατινικά</td>
<td align="left">la</td>
<td align="left">lat</td>
</tr>
<tr>
<td align="left">Λουξεμβουργιανά, Letzeburgesch</td>
<td align="left">lb</td>
<td align="left">ltz</td>
</tr>
<tr>
<td align="left">Λουγκάντα</td>
<td align="left">lg</td>
<td align="left">lug</td>
</tr>
<tr>
<td align="left">Λιμβουργιανά, Limburgan, Limburger</td>
<td align="left">li</td>
<td align="left">lim</td>
</tr>
<tr>
<td align="left">Λινγκάλα</td>
<td align="left">ln</td>
<td align="left">lin</td>
</tr>
<tr>
<td align="left">Λάο</td>
<td align="left">lo</td>
<td align="left">lao</td>
</tr>
<tr>
<td align="left">Λιθουανικά</td>
<td align="left">lt</td>
<td align="left">lit</td>
</tr>
<tr>
<td align="left">Λούμπα-Κατάνγκα</td>
<td align="left">lu</td>
<td align="left">lub</td>
</tr>
<tr>
<td align="left">Λατβιανά</td>
<td align="left">lv</td>
<td align="left">lav</td>
</tr>
<tr>
<td align="left">Μάνξ</td>
<td align="left">gv</td>
<td align="left">glv</td>
</tr>
<tr>
<td align="left">Σλαβομακεδονικά</td>
<td align="left">mk</td>
<td align="left">mkd</td>
</tr>
<tr>
<td align="left">Μαλαγάσυ</td>
<td align="left">mg</td>
<td align="left">mlg</td>
</tr>
<tr>
<td align="left">Μαλαισιανά</td>
<td align="left">ms</td>
<td align="left">msa</td>
</tr>
<tr>
<td align="left">Μαλαγιάλαμ</td>
<td align="left">ml</td>
<td align="left">mal</td>
</tr>
<tr>
<td align="left">Μαλτεζικά</td>
<td align="left">mt</td>
<td align="left">mlt</td>
</tr>
<tr>
<td align="left">Μαορί</td>
<td align="left">mi</td>
<td align="left">mri</td>
</tr>
<tr>
<td align="left">Μαράθι (Marāṭhī)</td>
<td align="left">mr</td>
<td align="left">mar</td>
</tr>
<tr>
<td align="left">Μαρσαλιανά, Marshallese</td>
<td align="left">mh</td>
<td align="left">mah</td>
</tr>
<tr>
<td align="left">Μογγολικά</td>
<td align="left">mn</td>
<td align="left">mon</td>
</tr>
<tr>
<td align="left">Ναούρου</td>
<td align="left">na</td>
<td align="left">nau</td>
</tr>
<tr>
<td align="left">Ναβάχο, Navaho</td>
<td align="left">nv</td>
<td align="left">nav</td>
</tr>
<tr>
<td align="left">Νορβηγικά Bokmål</td>
<td align="left">nb</td>
<td align="left">nob</td>
</tr>
<tr>
<td align="left">Βορ. Ντέμπελε</td>
<td align="left">nd</td>
<td align="left">nde</td>
</tr>
<tr>
<td align="left">Νεπαλέζικα</td>
<td align="left">ne</td>
<td align="left">nep</td>
</tr>
<tr>
<td align="left">Ντόνγκα</td>
<td align="left">ng</td>
<td align="left">ndo</td>
</tr>
<tr>
<td align="left">Νορβηγικά Νυνόρσκ</td>
<td align="left">nn</td>
<td align="left">nno</td>
</tr>
<tr>
<td align="left">Νορβηγικά</td>
<td align="left">no</td>
<td align="left">nor</td>
</tr>
<tr>
<td align="left">Νουόζου, Nuosu</td>
<td align="left">ii</td>
<td align="left">iii</td>
</tr>
<tr>
<td align="left">Νοτ. Ντέμπελε</td>
<td align="left">nr</td>
<td align="left">nbl</td>
</tr>
<tr>
<td align="left">Οξιτάνικα</td>
<td align="left">oc</td>
<td align="left">oci</td>
</tr>
<tr>
<td align="left">Οτζίμπουε, Οτζίμπουε</td>
<td align="left">oj</td>
<td align="left">oji</td>
</tr>
<tr>
<td align="left">Σλαβονικά Παλαιοεκκλησιαστικά, Σλαβική Εκκλησία, Σλαβονική Εκκλησία, Παλιά Βουλγαρικά, Παλιά Σλαβονικά</td>
<td align="left">cu</td>
<td align="left">chu</td>
</tr>
<tr>
<td align="left">Ορόμο</td>
<td align="left">om</td>
<td align="left">orm</td>
</tr>
<tr>
<td align="left">Ορίγια</td>
<td align="left">or</td>
<td align="left">ori</td>
</tr>
<tr>
<td align="left">Οσσετιανά, Οσσετικά</td>
<td align="left">os</td>
<td align="left">oss</td>
</tr>
<tr>
<td align="left">Παντζάμπι, Punjabi</td>
<td align="left">pa</td>
<td align="left">pan</td>
</tr>
<tr>
<td align="left">Πάλι, Pāli</td>
<td align="left">pi</td>
<td align="left">pli</td>
</tr>
<tr>
<td align="left">Περσικά</td>
<td align="left">fa</td>
<td align="left">fas</td>
</tr>
<tr>
<td align="left">Πολωνικά</td>
<td align="left">pl</td>
<td align="left">pol</td>
</tr>
<tr>
<td align="left">Πάστο, Pushto</td>
<td align="left">ps</td>
<td align="left">pus</td>
</tr>
<tr>
<td align="left">Πορτογαλικά</td>
<td align="left">pt</td>
<td align="left">por</td>
</tr>
<tr>
<td align="left">Κέτσουα</td>
<td align="left">qu</td>
<td align="left">que</td>
</tr>
<tr>
<td align="left">Ρωμανικά, Romansh</td>
<td align="left">rm</td>
<td align="left">roh</td>
</tr>
<tr>
<td align="left">Κιρούντι</td>
<td align="left">rn</td>
<td align="left">run</td>
</tr>
<tr>
<td align="left">Ρουμανικά, Μολδαβικά, Moldovan</td>
<td align="left">ro</td>
<td align="left">ron</td>
</tr>
<tr>
<td align="left">Ρωσικά</td>
<td align="left">ru</td>
<td align="left">rus</td>
</tr>
<tr>
<td align="left">Σανσκριτικά (Saṁskṛta)</td>
<td align="left">sa</td>
<td align="left">san</td>
</tr>
<tr>
<td align="left">Σαρδηνιακά</td>
<td align="left">sc</td>
<td align="left">srd</td>
</tr>
<tr>
<td align="left">Σίντι, Sindhi</td>
<td align="left">sd</td>
<td align="left">snd</td>
</tr>
<tr>
<td align="left">Βορ. Σάμι</td>
<td align="left">se</td>
<td align="left">sme</td>
</tr>
<tr>
<td align="left">Σαμοανικά, Samoan</td>
<td align="left">sm</td>
<td align="left">smo</td>
</tr>
<tr>
<td align="left">Σάνγκο</td>
<td align="left">sg</td>
<td align="left">sag</td>
</tr>
<tr>
<td align="left">Σερβικά</td>
<td align="left">sr</td>
<td align="left">srp</td>
</tr>
<tr>
<td align="left">Σκωτικά Γαελικά, Gaelic</td>
<td align="left">gd</td>
<td align="left">gla</td>
</tr>
<tr>
<td align="left">Σόνα, Shona</td>
<td align="left">sn</td>
<td align="left">sna</td>
</tr>
<tr>
<td align="left">Σινχάλα, Sinhalese</td>
<td align="left">si</td>
<td align="left">sin</td>
</tr>
<tr>
<td align="left">Σλοβακικά</td>
<td align="left">sk</td>
<td align="left">slk</td>
</tr>
<tr>
<td align="left">Σλοβενικά</td>
<td align="left">sl</td>
<td align="left">slv</td>
</tr>
<tr>
<td align="left">Σομαλέζικα</td>
<td align="left">so</td>
<td align="left">som</td>
</tr>
<tr>
<td align="left">Νοτ. Σόθο, Southern Sotho</td>
<td align="left">st</td>
<td align="left">sot</td>
</tr>
<tr>
<td align="left">Ισπανικά, Καστίλης</td>
<td align="left">es</td>
<td align="left">spa</td>
</tr>
<tr>
<td align="left">Σουδανέζικα</td>
<td align="left">su</td>
<td align="left">sun</td>
</tr>
<tr>
<td align="left">Σουαχίλι</td>
<td align="left">sw</td>
<td align="left">swa</td>
</tr>
<tr>
<td align="left">Σουάτι, Swati</td>
<td align="left">ss</td>
<td align="left">ssw</td>
</tr>
<tr>
<td align="left">Σουηδικά</td>
<td align="left">sv</td>
<td align="left">swe</td>
</tr>
<tr>
<td align="left">Ταμίλ</td>
<td align="left">ta</td>
<td align="left">tam</td>
</tr>
<tr>
<td align="left">Τελούγκου</td>
<td align="left">te</td>
<td align="left">tel</td>
</tr>
<tr>
<td align="left">Τατζικικά, Tajik</td>
<td align="left">tg</td>
<td align="left">tgk</td>
</tr>
<tr>
<td align="left">ΤάΪ, Thai</td>
<td align="left">th</td>
<td align="left">tha</td>
</tr>
<tr>
<td align="left">Τιγκρίνυα, Tigrinya</td>
<td align="left">ti</td>
<td align="left">tir</td>
</tr>
<tr>
<td align="left">Τυπικά Θιβετιανά, Θιβετιανά, Κεντρικά</td>
<td align="left">bo</td>
<td align="left">bod</td>
</tr>
<tr>
<td align="left">Τουρκμενικά</td>
<td align="left">tk</td>
<td align="left">tuk</td>
</tr>
<tr>
<td align="left">Τάγκαλογκ, Tagalog</td>
<td align="left">tl</td>
<td align="left">tgl</td>
</tr>
<tr>
<td align="left">Τσουάνα, Tswana</td>
<td align="left">tn</td>
<td align="left">tsn</td>
</tr>
<tr>
<td align="left">Τόγκα (Νήσοι Tonga)</td>
<td align="left">to</td>
<td align="left">ton</td>
</tr>
<tr>
<td align="left">Τουρκικά</td>
<td align="left">tr</td>
<td align="left">tur</td>
</tr>
<tr>
<td align="left">Τσόγκα, Tsonga</td>
<td align="left">ts</td>
<td align="left">tso</td>
</tr>
<tr>
<td align="left">Ταταρικά</td>
<td align="left">tt</td>
<td align="left">tat</td>
</tr>
<tr>
<td align="left">Τουί, Twi</td>
<td align="left">tw</td>
<td align="left">twi</td>
</tr>
<tr>
<td align="left">ΤαΪτιανά</td>
<td align="left">ty</td>
<td align="left">tah</td>
</tr>
<tr>
<td align="left">Ουιγουρικά, Uyghur</td>
<td align="left">ug</td>
<td align="left">uig</td>
</tr>
<tr>
<td align="left">Ουκρανικά</td>
<td align="left">uk</td>
<td align="left">ukr</td>
</tr>
<tr>
<td align="left">Ούρντου, Urdu</td>
<td align="left">ur</td>
<td align="left">urd</td>
</tr>
<tr>
<td align="left">Ουζμπεκικά</td>
<td align="left">uz</td>
<td align="left">uzb</td>
</tr>
<tr>
<td align="left">Βέντα, Venda</td>
<td align="left">ve</td>
<td align="left">ven</td>
</tr>
<tr>
<td align="left">Βιετναμέζικα</td>
<td align="left">vi</td>
<td align="left">vie</td>
</tr>
<tr>
<td align="left">Βόλαπουκ, Volapük</td>
<td align="left">vo</td>
<td align="left">vol</td>
</tr>
<tr>
<td align="left">Βαλούν, Walloon</td>
<td align="left">wa</td>
<td align="left">wln</td>
</tr>
<tr>
<td align="left">Ουαλικά, Welsh</td>
<td align="left">cy</td>
<td align="left">cym</td>
</tr>
<tr>
<td align="left">Γουόλοφ, Wolof</td>
<td align="left">wo</td>
<td align="left">wol</td>
</tr>
<tr>
<td align="left">Δυτ. Φρυσικά, West. Frisian</td>
<td align="left">fy</td>
<td align="left">fry</td>
</tr>
<tr>
<td align="left">Ξόζα, Xhosa</td>
<td align="left">xh</td>
<td align="left">xho</td>
</tr>
<tr>
<td align="left">Γίντις, Yiddish</td>
<td align="left">yi</td>
<td align="left">yid</td>
</tr>
<tr>
<td align="left">Γιορούμπα, Yoruba</td>
<td align="left">yo</td>
<td align="left">yor</td>
</tr>
<tr>
<td align="left">Ζουάνγκ, Τσουάνγκ</td>
<td align="left">za</td>
<td align="left">zha</td>
</tr>
<tr>
<td align="left">Ζουλού</td>
<td align="left">zu</td>
<td align="left">zul</td>
</tr>
</tbody>
</table>
</div>
</div><br class="table-break"></div>
<div class="navfooter">
<hr>
<table width="100%" summary="Navigation footer">
<tr>
<td width="40%" align="left"><a accesskey="p" href="chapter.misc.html">Προηγ</a>
</td>
<td width="20%" align="center"> </td>
<td width="40%" align="right"> <a accesskey="n" href="appendix.keyboard.html">Επόμενο</a></td>
</tr>
<tr>
<td width="40%" align="left" valign="top">Κεφάλαιο 20. Διάφορα θέματα </td>
<td width="20%" align="center"><a accesskey="h" href="index.html">Αρχή</a></td>
<td width="40%" align="right" valign="top"> Παράρτημα B. Συντμεύσεις πληκτρολογίου στο πρόγραμμα επεξεργασίας (editor)</td>
</tr>
</table>
</div>
</body>
</html> | {
"pile_set_name": "Github"
} |
/*
* arch/arm/mach-omap2/include/mach/clkdev.h
*/
#include <plat/clkdev.h>
| {
"pile_set_name": "Github"
} |
/* eslint-disable */
require('eventsource-polyfill')
var hotClient = require('webpack-hot-middleware/client?noInfo=true&reload=true')
hotClient.subscribe(function (event) {
if (event.action === 'reload') {
window.location.reload()
}
})
| {
"pile_set_name": "Github"
} |
#begin document (wb/sel/56/sel_5623); part 000
wb/sel/56/sel_5623 -1 0 [WORD] XX (TOP* - - - - * -
wb/sel/56/sel_5623 -1 1 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 2 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 3 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 4 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 5 [WORD] VERB * glide - 1 - * -
wb/sel/56/sel_5623 -1 6 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 7 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 8 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 9 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 10 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 11 [WORD] XX * - - - - * -
wb/sel/56/sel_5623 -1 12 [WORD] XX *) - - - - * -
#end document
| {
"pile_set_name": "Github"
} |
drop table if exists t1;
CREATE TABLE t1 (
id INT UNSIGNED AUTO_INCREMENT NOT NULL PRIMARY KEY,
a VARCHAR(200),
b TEXT
) ENGINE = InnoDB STATS_PERSISTENT=0;
INSERT INTO t1 (a,b) VALUES
('MySQL Tutorial','DBMS stands for DataBase ...') ,
('How To Use MySQL Well','After you went through a ...'),
('Optimizing MySQL','In this tutorial we will show ...');
ALTER TABLE t1 ADD FULLTEXT INDEX idx_1 (a);
Warnings:
Warning 124 InnoDB rebuilding table to add column FTS_DOC_ID
ALTER TABLE t1 ADD FULLTEXT INDEX idx_2 (b);
SHOW CREATE TABLE t1;
Table Create Table
t1 CREATE TABLE `t1` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`a` varchar(200) DEFAULT NULL,
`b` text,
PRIMARY KEY (`id`),
FULLTEXT KEY `idx_1` (`a`),
FULLTEXT KEY `idx_2` (`b`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci STATS_PERSISTENT=0
START TRANSACTION;
INSERT INTO t1 (a,b) VALUES
('1001 MySQL Tricks','1. Never run mysqld as root. 2. ...'),
('MySQL vs. YourSQL','In the following database comparison ...'),
('MySQL Security','When configured properly, MySQL ...');
ROLLBACK;
SELECT * FROM t1 WHERE MATCH (a)
AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(a) AGAINST("+mysql +Tutorial" IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(b) AGAINST("+Tutorial" IN BOOLEAN MODE);
id a b
3 Optimizing MySQL In this tutorial we will show ...
select * from t1 where MATCH(b) AGAINST("+stands +(DataBase)" IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(b) AGAINST("+DataBase -(comparison)" IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select *, MATCH(a) AGAINST("Optimizing MySQL" IN BOOLEAN MODE) as x from t1;
id a b x
1 MySQL Tutorial DBMS stands for DataBase ... 0.0906190574169159
2 How To Use MySQL Well After you went through a ... 0.0906190574169159
3 Optimizing MySQL In this tutorial we will show ... 0.6961383819580078
select *, MATCH(b) AGAINST("collections support" IN BOOLEAN MODE) as x from t1;
id a b x
1 MySQL Tutorial DBMS stands for DataBase ... 0
2 How To Use MySQL Well After you went through a ... 0
3 Optimizing MySQL In this tutorial we will show ... 0
select * from t1 where MATCH a AGAINST ("+Optimiz* +Optimiz*" IN BOOLEAN MODE);
id a b
3 Optimizing MySQL In this tutorial we will show ...
select * from t1 where MATCH b AGAINST ('"DBMS stands"' IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH b AGAINST ('"DBMS STANDS"' IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(b) AGAINST ("DataBase" WITH QUERY EXPANSION);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(a) AGAINST ("Security" WITH QUERY EXPANSION);
id a b
ALTER TABLE t1 DROP INDEX idx_1;
ALTER TABLE t1 DROP INDEX idx_2;
ALTER TABLE t1 ADD FULLTEXT INDEX idx_1 (a);
ALTER TABLE t1 ADD FULLTEXT INDEX idx_2 (b);
SELECT * FROM t1 WHERE MATCH (a)
AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(a) AGAINST("+mysql +Tutorial" IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(b) AGAINST("+Tutorial" IN BOOLEAN MODE);
id a b
3 Optimizing MySQL In this tutorial we will show ...
select * from t1 where MATCH(b) AGAINST("+stands +(DataBase)" IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(b) AGAINST("+DataBase -(comparison)" IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select *, MATCH(a) AGAINST("Optimizing MySQL" IN BOOLEAN MODE) as x from t1;
id a b x
1 MySQL Tutorial DBMS stands for DataBase ... 0.0906190574169159
2 How To Use MySQL Well After you went through a ... 0.0906190574169159
3 Optimizing MySQL In this tutorial we will show ... 0.6961383819580078
select *, MATCH(b) AGAINST("collections support" IN BOOLEAN MODE) as x from t1;
id a b x
1 MySQL Tutorial DBMS stands for DataBase ... 0
2 How To Use MySQL Well After you went through a ... 0
3 Optimizing MySQL In this tutorial we will show ... 0
select * from t1 where MATCH a AGAINST ("+Optimiz* +Optimiz*" IN BOOLEAN MODE);
id a b
3 Optimizing MySQL In this tutorial we will show ...
select * from t1 where MATCH b AGAINST ('"DBMS stands"' IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH b AGAINST ('"DBMS STANDS"' IN BOOLEAN MODE);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(b) AGAINST ("DataBase" WITH QUERY EXPANSION);
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
select * from t1 where MATCH(a) AGAINST ("Security" WITH QUERY EXPANSION);
id a b
INSERT INTO t1 (a,b) VALUES ('test query expansion','for database ...');
INSERT INTO t1 (a,b) VALUES
('test proximity search, test, proximity and phrase',
'search, with proximity innodb');
INSERT INTO t1 (a,b) VALUES
('test proximity fts search, test, proximity and phrase',
'search, with proximity innodb');
INSERT INTO t1 (a,b) VALUES
('test more of proximity for fts search, test, more proximity and phrase',
'search, with proximity innodb');
SELECT * FROM t1
WHERE MATCH (a)
AGAINST ('"proximity search"@3' IN BOOLEAN MODE);
id a b
8 test proximity search, test, proximity and phrase search, with proximity innodb
9 test proximity fts search, test, proximity and phrase search, with proximity innodb
SELECT * FROM t1
WHERE MATCH (a)
AGAINST ('"proximity search"@2' IN BOOLEAN MODE);
id a b
8 test proximity search, test, proximity and phrase search, with proximity innodb
SELECT * FROM t1
WHERE MATCH (b)
AGAINST ('"proximity innodb"@4' IN BOOLEAN MODE);
id a b
8 test proximity search, test, proximity and phrase search, with proximity innodb
9 test proximity fts search, test, proximity and phrase search, with proximity innodb
10 test more of proximity for fts search, test, more proximity and phrase search, with proximity innodb
SELECT * FROM t1
WHERE MATCH (a)
AGAINST ('"test proximity"@3' IN BOOLEAN MODE);
id a b
8 test proximity search, test, proximity and phrase search, with proximity innodb
9 test proximity fts search, test, proximity and phrase search, with proximity innodb
10 test more of proximity for fts search, test, more proximity and phrase search, with proximity innodb
SELECT * FROM t1
WHERE MATCH (a)
AGAINST ('"more test proximity"@3' IN BOOLEAN MODE);
id a b
10 test more of proximity for fts search, test, more proximity and phrase search, with proximity innodb
SELECT * FROM t1
WHERE MATCH (a)
AGAINST ('"more test proximity"@2' IN BOOLEAN MODE);
id a b
SELECT * FROM t1
WHERE MATCH (a)
AGAINST ('"more fts proximity"@02' IN BOOLEAN MODE);
id a b
SELECT * FROM t1 WHERE CONCAT(t1.a,t1.b) IN (
SELECT CONCAT(a,b) FROM t1 AS t2 WHERE
MATCH (t2.a) AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE)
) OR t1.id = 3 ;
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
3 Optimizing MySQL In this tutorial we will show ...
SELECT * FROM t1 WHERE CONCAT(t1.a,t1.b) IN (
SELECT CONCAT(a,b) FROM t1 AS t2
WHERE MATCH (t2.a) AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE)
AND t2.id != 3) ;
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
SELECT * FROM t1 WHERE id IN (SELECT MIN(id) FROM t1 WHERE
MATCH (b) AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE)) OR id = 3 ;
id a b
3 Optimizing MySQL In this tutorial we will show ...
SELECT * FROM t1 WHERE id NOT IN (SELECT MIN(id) FROM t1
WHERE MATCH (b) AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE)) ;
id a b
1 MySQL Tutorial DBMS stands for DataBase ...
2 How To Use MySQL Well After you went through a ...
7 test query expansion for database ...
8 test proximity search, test, proximity and phrase search, with proximity innodb
9 test proximity fts search, test, proximity and phrase search, with proximity innodb
10 test more of proximity for fts search, test, more proximity and phrase search, with proximity innodb
SELECT * FROM t1 WHERE EXISTS (SELECT t2.id FROM t1 AS t2 WHERE
MATCH (t2.b) AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE)
AND t1.id = t2.id) ;
id a b
3 Optimizing MySQL In this tutorial we will show ...
SELECT * FROM t1 WHERE NOT EXISTS (SELECT t2.id FROM t1 AS t2 WHERE
MATCH (t2.a) AGAINST ('Tutorial' IN NATURAL LANGUAGE MODE)
AND t1.id = t2.id) ;
id a b
2 How To Use MySQL Well After you went through a ...
3 Optimizing MySQL In this tutorial we will show ...
7 test query expansion for database ...
8 test proximity search, test, proximity and phrase search, with proximity innodb
9 test proximity fts search, test, proximity and phrase search, with proximity innodb
10 test more of proximity for fts search, test, more proximity and phrase search, with proximity innodb
SELECT * FROM t1 WHERE t1.id = (SELECT MAX(t2.id) FROM t1 AS t2 WHERE
MATCH(t2.a) AGAINST ('"proximity search"@3' IN BOOLEAN MODE));
id a b
9 test proximity fts search, test, proximity and phrase search, with proximity innodb
SELECT * FROM t1 WHERE t1.id > (SELECT MIN(t2.id) FROM t1 AS t2 WHERE
MATCH(t2.b) AGAINST ('"proximity innodb"@3' IN BOOLEAN MODE));
id a b
9 test proximity fts search, test, proximity and phrase search, with proximity innodb
10 test more of proximity for fts search, test, more proximity and phrase search, with proximity innodb
DROP TABLE t1;
| {
"pile_set_name": "Github"
} |
#ifndef __STREAM_WINDOWOUT_H
#define __STREAM_WINDOWOUT_H
#include "IInOutStreams.h"
typedef struct WindowOut
{
BYTE *Buffer;
UINT32 Pos;
} WindowOut;
extern WindowOut out_window;
#define OutWindowInit() \
{ \
out_window.Buffer = (BYTE *) out_stream.data; \
out_window.Pos = 0; \
}
#define OutWindowFlush() \
{ \
OutStreamSizeSet( out_window.Pos ); \
}
// BRCM modification
INLINE void OutWindowCopyBackBlock(UINT32 aDistance, UINT32 aLen)
{
BYTE *p = out_window.Buffer + out_window.Pos;
UINT32 i;
aDistance++;
for(i = 0; i < aLen; i++)
p[i] = p[i - aDistance];
out_window.Pos += aLen;
}
#define OutWindowPutOneByte(aByte) \
{ \
out_window.Buffer[out_window.Pos++] = aByte; \
}
#define OutWindowGetOneByte(anIndex) \
(out_window.Buffer[out_window.Pos + anIndex])
#endif
| {
"pile_set_name": "Github"
} |
# zim framework
ZDOTDIR="$HOME/.local/zsh"
ZIMDIR="$ZDOTDIR/.zim"
if [ ! -d "$ZIMDIR" ]; then
[ ! -d "$HOME/.local" ] && mkdir -p "$HOME/.local" 2> /dev/null
[ ! -d "$HOME/.local/bin" ] && mkdir -p "$HOME/.local/bin" 2> /dev/null
[ ! -d "$HOME/.local/zsh" ] && mkdir -p "$HOME/.local/zsh" 2> /dev/null
git clone --recursive https://github.com/zimfw/zimfw.git "$ZIMDIR"
setopt EXTENDED_GLOB
for template_file ( ${ZDOTDIR:-${HOME}}/.zim/templates/* ); do
user_file="${ZDOTDIR:-${HOME}}/.${template_file:t}"
touch ${user_file}
( print -rn "$(<${template_file})$(<${user_file})" >! ${user_file} ) 2>/dev/null
done
fi
export PS1="%n@%m:%~%# "
# syntax color definition
ZSH_HIGHLIGHT_HIGHLIGHTERS=(main brackets pattern)
typeset -A ZSH_HIGHLIGHT_STYLES
# ZSH_HIGHLIGHT_STYLES[command]=fg=white,bold
# ZSH_HIGHLIGHT_STYLES[alias]='fg=magenta,bold'
ZSH_HIGHLIGHT_STYLES[default]=none
ZSH_HIGHLIGHT_STYLES[unknown-token]=fg=009
ZSH_HIGHLIGHT_STYLES[reserved-word]=fg=009,standout
ZSH_HIGHLIGHT_STYLES[alias]=fg=cyan,bold
ZSH_HIGHLIGHT_STYLES[builtin]=fg=cyan,bold
ZSH_HIGHLIGHT_STYLES[function]=fg=cyan,bold
ZSH_HIGHLIGHT_STYLES[command]=fg=white,bold
ZSH_HIGHLIGHT_STYLES[precommand]=fg=white,underline
ZSH_HIGHLIGHT_STYLES[commandseparator]=none
ZSH_HIGHLIGHT_STYLES[hashed-command]=fg=009
ZSH_HIGHLIGHT_STYLES[path]=fg=214,underline
ZSH_HIGHLIGHT_STYLES[globbing]=fg=063
ZSH_HIGHLIGHT_STYLES[history-expansion]=fg=white,underline
ZSH_HIGHLIGHT_STYLES[single-hyphen-option]=none
ZSH_HIGHLIGHT_STYLES[double-hyphen-option]=none
ZSH_HIGHLIGHT_STYLES[back-quoted-argument]=none
ZSH_HIGHLIGHT_STYLES[single-quoted-argument]=fg=063
ZSH_HIGHLIGHT_STYLES[double-quoted-argument]=fg=063
ZSH_HIGHLIGHT_STYLES[dollar-double-quoted-argument]=fg=009
ZSH_HIGHLIGHT_STYLES[back-double-quoted-argument]=fg=009
ZSH_HIGHLIGHT_STYLES[assign]=none
[ -f "$HOME/.local/etc/init.sh" ] && source "$HOME/.local/etc/init.sh"
source "$HOME/.local/zsh/.zshrc"
if [[ -o login ]]; then
source "$HOME/.local/zsh/.zlogin"
fi
# export PS1="%n@%m:%~%# "
| {
"pile_set_name": "Github"
} |
module MicropostsHelper
end
| {
"pile_set_name": "Github"
} |
var benchmark = require('benchmark')
var suite = new benchmark.Suite()
global.NewBuffer = require('../../').Buffer // native-buffer-browserify
var LENGTH = 20
var newTarget = NewBuffer(LENGTH * 4)
for (var i = 0; i < LENGTH; i++) {
newTarget.writeUInt32LE(7000 + i, i * 4)
}
suite.add('NewBuffer#readUInt32LE', function () {
for (var i = 0; i < LENGTH; i++) {
var x = newTarget.readUInt32LE(i * 4)
}
})
.on('error', function (event) {
console.error(event.target.error.stack)
})
.on('cycle', function (event) {
console.log(String(event.target))
})
.run({ 'async': true })
| {
"pile_set_name": "Github"
} |
open Core_kernel
open Cmt_format
open Meja_lib
let load ~loc ~name:_ resolve_env filename =
(*Format.(fprintf err_formatter "Loading %s from %s...@." name filename) ;*)
let cmi_info = read_cmi filename in
let signature = Of_ocaml.to_signature cmi_info.cmi_sign in
let env = {Initial_env.env with resolve_env} in
let env = Envi.open_module env in
let env, _ = Typechecker.check_signature env signature in
(*Format.(fprintf err_formatter "Loaded@.") ;*)
let m, _ = Envi.pop_module ~loc env in
m
let () = Envi.Scope.load_module := load
let modname_of_filename file =
String.capitalize (Filename.chop_extension (Filename.basename file))
let load_directory env dirname =
let files = try Sys.readdir dirname with Sys_error _ -> [||] in
Array.iter files ~f:(fun file ->
match Filename.split_extension file with
| _, Some ("cmi" | "cmti") ->
let filename = Filename.concat dirname file in
let module_name = Ident.create_global (modname_of_filename file) in
Envi.register_external_module module_name (Envi.Deferred filename)
env
| _ ->
() )
| {
"pile_set_name": "Github"
} |
/*!
* Bootstrap-select v1.12.2 (http://silviomoreto.github.io/bootstrap-select)
*
* Copyright 2013-2017 bootstrap-select
* Licensed under MIT (https://github.com/silviomoreto/bootstrap-select/blob/master/LICENSE)
*/
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module unless amdModuleId is set
define(["jquery"], function (a0) {
return (factory(a0));
});
} else if (typeof module === 'object' && module.exports) {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory(require("jquery"));
} else {
factory(root["jQuery"]);
}
}(this, function (jQuery) {
(function ($) {
$.fn.selectpicker.defaults = {
noneSelectedText: 'Nothing selected',
noneResultsText: 'No results match {0}',
countSelectedText: function (numSelected, numTotal) {
return (numSelected == 1) ? "{0} item selected" : "{0} items selected";
},
maxOptionsText: function (numAll, numGroup) {
return [
(numAll == 1) ? 'Limit reached ({n} item max)' : 'Limit reached ({n} items max)',
(numGroup == 1) ? 'Group limit reached ({n} item max)' : 'Group limit reached ({n} items max)'
];
},
selectAllText: 'Select All',
deselectAllText: 'Deselect All',
multipleSeparator: ', '
};
})(jQuery);
}));
| {
"pile_set_name": "Github"
} |
<transfer-table tr-model="tableData">
<allocated>
<table st-table="tableData.displayedAllocated" st-safe-src="tableData.allocated" hz-table>
<thead>
<tr>
<th>Animal</th>
</tr>
</thead>
<tbody>
<tr ng-repeat="alRow in tableData.displayedAllocated">
<td>{$ alRow.animal $}</td>
<td>
<action-list>
<action callback="trCtrl.deallocate" item="alRow"></action>
</action-list>
</td>
</tr>
</tbody>
</table>
</allocated>
<available>
<table st-table="tableData.available" hz-table>
<thead>
<tr>
<th>Animal</th>
</tr>
</thead>
<tbody>
<tr ng-repeat="row in tableData.available" ng-if="!trCtrl.allocatedIds[row.id]">
<td>{$ row.animal $}</td>
<td>
<action-list>
<action callback="trCtrl.allocate" item="row"></action>
</action-list>
</td>
</tr>
</tbody>
</table>
</available>
</transfer-table>
| {
"pile_set_name": "Github"
} |
// ----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ----------------------------------------------------------------------------
using System.Collections.Generic;
namespace Microsoft.WindowsAzure.MobileServices
{
/// <summary>
/// An interface for platform-specific assemblies to provide utility functions
/// regarding Push capabilities.
/// </summary>
public interface IPushTestUtility
{
string GetPushHandle();
string GetUpdatedPushHandle();
Registration GetTemplateRegistrationForToast();
void ValidateTemplateRegistration(Registration registration);
void ValidateTemplateRegistrationBeforeRegister(Registration registration);
void ValidateTemplateRegistrationAfterRegister(Registration registration, string zumoInstallationId);
Registration GetNewNativeRegistration(string deviceId, IEnumerable<string> tags);
Registration GetNewTemplateRegistration(string deviceId, string bodyTemplate, string templateName);
string GetListNativeRegistrationResponse();
string GetListTemplateRegistrationResponse();
string GetListMixedRegistrationResponse();
}
}
| {
"pile_set_name": "Github"
} |
/*
*******************************************************************************
*
* Copyright (C) 1999-2011, International Business Machines
* Corporation and others. All Rights Reserved.
*
*******************************************************************************
* file name: unistr_props.cpp
* encoding: US-ASCII
* tab size: 8 (not used)
* indentation:2
*
* created on: 2004aug25
* created by: Markus W. Scherer
*
* Character property dependent functions moved here from unistr.cpp
*/
#include "unicode/utypes.h"
#include "unicode/uchar.h"
#include "unicode/unistr.h"
#include "unicode/utf16.h"
U_NAMESPACE_BEGIN
UnicodeString&
UnicodeString::trim()
{
if(isBogus()) {
return *this;
}
UChar *array = getArrayStart();
UChar32 c;
int32_t oldLength = this->length();
int32_t i = oldLength, length;
// first cut off trailing white space
for(;;) {
length = i;
if(i <= 0) {
break;
}
U16_PREV(array, 0, i, c);
if(!(c == 0x20 || u_isWhitespace(c))) {
break;
}
}
if(length < oldLength) {
setLength(length);
}
// find leading white space
int32_t start;
i = 0;
for(;;) {
start = i;
if(i >= length) {
break;
}
U16_NEXT(array, i, length, c);
if(!(c == 0x20 || u_isWhitespace(c))) {
break;
}
}
// move string forward over leading white space
if(start > 0) {
doReplace(0, start, 0, 0, 0);
}
return *this;
}
U_NAMESPACE_END
| {
"pile_set_name": "Github"
} |
#-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class Reports::ReportsService
class_attribute :report_types
def self.add_report(report)
self.report_types ||= {}
self.report_types[report.report_type] = report
end
def self.has_report_for?(report_type)
self.report_types.has_key? report_type
end
# automate this? by cycling through each instance of Reports::Report? or is this to automagically?
# and there is no reason, why plugins shouldn't be able to use this to add their own customized reports...
add_report Reports::SubprojectReport
add_report Reports::AuthorReport
add_report Reports::AssigneeReport
add_report Reports::ResponsibleReport
add_report Reports::TypeReport
add_report Reports::PriorityReport
add_report Reports::CategoryReport
add_report Reports::VersionReport
def initialize(project)
raise 'You must provide a project to report upon' unless project&.is_a?(Project)
@project = project
end
def report_for(report_type)
report_klass = self.class.report_types[report_type]
report_klass&.new(@project)
end
end
| {
"pile_set_name": "Github"
} |
/****************************************************************************
**
** Copyright (C) 2016 The Qt Company Ltd.
** Contact: https://www.qt.io/licensing/
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3 as published by the Free Software
** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-3.0.html.
**
****************************************************************************/
#pragma once
#include "constraints.h"
#include <functional>
namespace Sqlite {
class Column
{
public:
Column() = default;
Column(Utils::SmallStringView tableName,
Utils::SmallStringView name,
ColumnType type,
Constraints &&constraints = {})
: constraints(std::move(constraints))
, name(name)
, tableName(tableName)
, type(type)
{}
void clear()
{
name.clear();
type = ColumnType::Numeric;
constraints = {};
}
Utils::SmallString typeString() const
{
switch (type) {
case ColumnType::None:
return {};
case ColumnType::Numeric:
return "NUMERIC";
case ColumnType::Integer:
return "INTEGER";
case ColumnType::Real:
return "REAL";
case ColumnType::Text:
return "TEXT";
case ColumnType::Blob:
return "BLOB";
}
Q_UNREACHABLE();
}
friend bool operator==(const Column &first, const Column &second)
{
return first.name == second.name && first.type == second.type
&& first.constraints == second.constraints && first.tableName == second.tableName;
}
public:
Constraints constraints;
Utils::SmallString name;
Utils::SmallString tableName;
ColumnType type = ColumnType::Numeric;
}; // namespace Sqlite
using SqliteColumns = std::vector<Column>;
using SqliteColumnConstReference = std::reference_wrapper<const Column>;
using SqliteColumnConstReferences = std::vector<SqliteColumnConstReference>;
} // namespace Sqlite
| {
"pile_set_name": "Github"
} |
import time
# A cache of channel and repository info to allow users to install multiple
# packages without having to wait for the metadata to be downloaded more
# than once. The keys are managed locally by the utilizing code.
_channel_repository_cache = {}
def clear_cache():
_channel_repository_cache.clear()
def get_cache(key, default=None):
"""
Gets an in-memory cache value
:param key:
The string key
:param default:
The value to return if the key has not been set, or the ttl expired
:return:
The cached value, or default
"""
struct = _channel_repository_cache.get(key, {})
expires = struct.get('expires')
if expires and expires > time.time():
return struct.get('data')
return default
def merge_cache_over_settings(destination, setting, key_prefix):
"""
Take the cached value of `key` and put it into the key `setting` of
the destination.settings dict. Merge the values by overlaying the
cached setting over the existing info.
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
"""
existing = destination.settings.get(setting, {})
value = get_cache(key_prefix + '.' + setting)
if value:
existing.update(value)
destination.settings[setting] = existing
def merge_cache_under_settings(destination, setting, key_prefix, list_=False):
"""
Take the cached value of `key` and put it into the key `setting` of
the destination.settings dict. Merge the values by overlaying the
existing setting value over the cached info.
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
:param list_:
If a list should be used instead of a dict
"""
value = get_cache(key_prefix + '.' + setting)
if value:
existing = destination.settings.get(setting)
if existing:
if list_:
# Prevent duplicate values
base = dict(zip(value, [None] * len(value)))
for val in existing:
if val in base:
continue
value.append(val)
else:
value.update(existing)
destination.settings[setting] = value
def set_cache(key, data, ttl=300):
"""
Sets an in-memory cache value
:param key:
The string key
:param data:
The data to cache
:param ttl:
The integer number of second to cache the data for
"""
_channel_repository_cache[key] = {
'data': data,
'expires': time.time() + ttl
}
def set_cache_over_settings(destination, setting, key_prefix, value, ttl):
"""
Take the value passed, and merge it over the current `setting`. Once
complete, take the value and set the cache `key` and destination.settings
`setting` to that value, using the `ttl` for set_cache().
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
:param value:
The value to set
:param ttl:
The cache ttl to use
"""
existing = destination.settings.get(setting, {})
existing.update(value)
set_cache(key_prefix + '.' + setting, value, ttl)
destination.settings[setting] = value
def set_cache_under_settings(destination, setting, key_prefix, value, ttl, list_=False):
"""
Take the value passed, and merge the current `setting` over it. Once
complete, take the value and set the cache `key` and destination.settings
`setting` to that value, using the `ttl` for set_cache().
:param destination:
An object that has a `.settings` attribute that is a dict
:param setting:
The dict key to use when pushing the value into the settings dict
:param key_prefix:
The string to prefix to `setting` to make the cache key
:param value:
The value to set
:param ttl:
The cache ttl to use
"""
if value:
if list_:
existing = destination.settings.get(setting, [])
value.extend(existing)
else:
existing = destination.settings.get(setting, {})
value.update(existing)
set_cache(key_prefix + '.' + setting, value, ttl)
destination.settings[setting] = value
| {
"pile_set_name": "Github"
} |
'''
Copyright (C) 2014 Dr. John Lindsay <[email protected]>
This program is intended for instructional purposes only. The
following is an example of how to use Whitebox's scripting
capabilities to automate a geoprocessing workflow. The scripting
language is Python; more specifically it is Jython, the Python
implementation targeting the Java Virtual Machine (JVM).
In this script, we will take a digital elevation model (DEM),
remove all the topographic depressions from it (i.e. hydrologically
correct the DEM), calculate a flow direction pointer grid, use
the pointer file to perform a flow accumulation (i.e. upslope area)
calculation, then threshold the upslope area to derive valley lines
or streams. This is a fairly common workflow in spatial hydrology.
When you run a script from within Whitebox, a reference to the
Whitebox user interface (UI) will be automatically bound to your
script. It's variable name is 'pluginHost'. This is the primary
reason why the script must be run from within Whitebox's Scripter.
First we need the directory containing the data, and to set
the working directory to this. We will use the Vermont DEM contained
within the samples directory.
'''
import os
try:
separator = os.sep # The system-specific directory separator
wd = pluginHost.getApplicationDirectory() + separator + "resources" + separator + "samples" + separator + "Vermont DEM" + separator
pluginHost.setWorkingDirectory(wd)
demFile = wd + "Vermont DEM.dep"
# Notice that spaces are allowed in file names. There is also no
# restriction on the length of the file name...in fact longer,
# descriptive names are preferred. Whitebox is friendly!
# A raster or vector file can be displayed by specifying the file
# name as an argument of the returnData method of the pluginHost
pluginHost.returnData(demFile)
'''
Remove the depressions in the DEM using the 'FillDepressions' tool.
The help file for each tool in Whitebox contains a section detailing
the required input parameters needed to run the tool from a script.
These parameters are always fed to the tool in a String array, in
the case below, called 'args'. The tool is then run using the 'runPlugin'
method of the pluginHost. runPlugin takes the name of the tool (see
the tool's help for the proper name), the arguments string array,
followed by two Boolean arguments. The first of these Boolean
arguments determines whether the plugin will be run on its own
separate thread. In most scripting applications, this should be set
to 'False' because the results of this tool are needed as inputs to
subsequent tools. The second Boolean argument specifies whether the
data that are returned to the pluginHost after the tool is completed
should be suppressed. Many tools will automatically display images
or shapefiles or some text report when they've completed. It is often
the case in a workflow that you only want the final result to be
displayed, in which case all of the runPlugins should have this final
Boolean parameter set to 'True' except for the last operation, for
which it should be set to 'False' (i.e. don't suppress the output).
The data will still be written to disc if the output are supressed,
they simply won't be automatically displayed when the tool has
completed. If you don't specify this last Boolean parameter, the
output will be treated as normal.
'''
filledDEMFile = wd + "filled DEM.dep"
flatIncrement = "0.001" # Notice that although this is a numeric parameter, it is provided to the tool as a string.
args = [demFile, filledDEMFile, flatIncrement]
pluginHost.runPlugin("FillDepressions", args, False, True)
# Calculate the D8 pointer (flow direction) file.
pointerFile = wd + "pointer.dep"
args = [filledDEMFile, pointerFile]
pluginHost.runPlugin("FlowPointerD8", args, False, True)
# Perform the flow accumulation operation.
flowAccumFile = wd + "flow accumulation.dep"
outputType = "number of upslope grid cells"
logTransformOutput = "False"
args = [pointerFile, flowAccumFile, outputType, logTransformOutput]
pluginHost.runPlugin("FlowAccumD8", args, False, True)
# Extract the streams
streamsFile = wd + "streams.dep"
channelThreshold = "1000.0"
backValue = "NoData"
args = [flowAccumFile, streamsFile, channelThreshold, backValue]
pluginHost.runPlugin("ExtractStreams", args, False, False) # This final result will be displayed
'''
Note that in each of the examples above, I have created new variables
to hold each of the input parameters for the plugin tools. I've done
this more for clarity more than anything else. The script could be
substantially shorted if the shorter variables were directly entered
into the args array. For instance, I could have easily used:
args = [flowAccumFile, streamsFile, "1000.0", "NoData"]
for the last runPlugin and saved myself declaring the two variables.
Because the file names are generally used in subsequent operations,
it is a good idea to dedicate variables to those parameters.
'''
except Exception, e:
print e
pluginHost.showFeedback("Error during script execution.")
''' alternatively, you many want to send the exception to
the pluginHost.logException() method '''
finally:
print "I'm done!"
# pluginHost.updateProgress(0)
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 7d2ce97e1dec77b4fbcbedf5334f5375
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
import Foundation
import SourceKittenFramework
private var regexCache = [RegexCacheKey: NSRegularExpression]()
private let regexCacheLock = NSLock()
private struct RegexCacheKey: Hashable {
let pattern: String
let options: NSRegularExpression.Options
func hash(into hasher: inout Hasher) {
hasher.combine(pattern)
hasher.combine(options.rawValue)
}
}
extension NSRegularExpression {
internal static func cached(pattern: String, options: Options? = nil) throws -> NSRegularExpression {
let options = options ?? [.anchorsMatchLines, .dotMatchesLineSeparators]
let key = RegexCacheKey(pattern: pattern, options: options)
regexCacheLock.lock()
defer { regexCacheLock.unlock() }
if let result = regexCache[key] {
return result
}
let result = try NSRegularExpression(pattern: pattern, options: options)
regexCache[key] = result
return result
}
internal func matches(in stringView: StringView,
options: NSRegularExpression.MatchingOptions = []) -> [NSTextCheckingResult] {
return matches(in: stringView.string, options: options, range: stringView.range)
}
internal func matches(in stringView: StringView,
options: NSRegularExpression.MatchingOptions = [],
range: NSRange) -> [NSTextCheckingResult] {
return matches(in: stringView.string, options: options, range: range)
}
internal func matches(in file: SwiftLintFile,
options: NSRegularExpression.MatchingOptions = []) -> [NSTextCheckingResult] {
return matches(in: file.stringView.string, options: options, range: file.stringView.range)
}
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sentry.tests.e2e.solr;
import static org.apache.sentry.tests.e2e.solr.TestSentryServer.ADMIN_USER;
import static org.apache.sentry.core.model.solr.AdminOperation.COLLECTIONS;
import java.io.IOException;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.apache.sentry.core.model.solr.AdminOperation;
import org.apache.sentry.core.model.solr.SolrConstants;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.CollectionAdminRequest.ClusterStatus;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.junit.Test;
import org.restlet.data.MediaType;
import org.restlet.resource.ClientResource;
import org.restlet.resource.ResourceException;
@SuppressSSL
public class TestSolrAdminOperations extends SolrSentryServiceTestBase {
@Test
public void testQueryAdminOperation() throws Exception {
// Success.
adminQueryActionSuccess(ADMIN_USER);
// Failure
adminQueryActionFailure("user0");
// Now grant admin privileges to user0 (i.e. role0) and verify the admin operations again.
grantAdminPrivileges(ADMIN_USER, "role0", COLLECTIONS.getName(), SolrConstants.ALL);
adminQueryActionSuccess("user0");
// Now revoke admin update privileges from user0 (i.e. role0) and verify the admin operations again.
revokeAdminPrivileges(ADMIN_USER, "role0", COLLECTIONS.getName(), SolrConstants.UPDATE);
adminQueryActionSuccess("user0");
// Now revoke admin query privileges from user0 (i.e. role0) and verify the admin operations again.
revokeAdminPrivileges(ADMIN_USER, "role0", COLLECTIONS.getName(), SolrConstants.QUERY);
adminQueryActionFailure("user0");
}
@Test
public void testUpdateAdminOperation() throws Exception {
String collectionName = "testUpdateAdminOperation";
// Success.
grantCollectionPrivileges(ADMIN_USER, ADMIN_ROLE, collectionName, SolrConstants.UPDATE);
adminUpdateActionSuccess(ADMIN_USER, collectionName);
// Failure
adminUpdateActionFailure("user0", collectionName);
// Now grant admin privileges role0 and verify the admin operations again.
grantAdminPrivileges(ADMIN_USER, "role0", COLLECTIONS.getName(), SolrConstants.ALL);
grantCollectionPrivileges(ADMIN_USER, "role0", collectionName, SolrConstants.UPDATE);
adminUpdateActionSuccess("user0", collectionName);
// Now revoke admin query privileges from role0 and verify the admin operations again.
revokeAdminPrivileges(ADMIN_USER, "role0", COLLECTIONS.getName(), SolrConstants.QUERY);
adminUpdateActionSuccess(ADMIN_USER, collectionName);
// Now revoke admin update privileges from role0 and verify the admin operations again.
revokeAdminPrivileges(ADMIN_USER, "role0", COLLECTIONS.getName(), SolrConstants.UPDATE);
adminUpdateActionFailure("user0", collectionName);
}
@SuppressWarnings("rawtypes")
@Test
public void testMetricsQuerySuccess() throws Exception {
grantAdminPrivileges(ADMIN_USER, "role0", AdminOperation.METRICS.getName(), SolrConstants.QUERY);
String tmp = getAuthenticatedUser();
try {
setAuthenticationUser("user0");
String url = String.format("%s/admin/metrics?wt=json&group=jvm",
cluster.getJettySolrRunner(0).getBaseUrl().toString());
ClientResource resource = new ClientResource(url);
Map result = readNestedElement(deserialize(resource.get(MediaType.APPLICATION_JSON)), "metrics");
assertTrue(result.containsKey("solr.jvm"));
} finally {
revokeAdminPrivileges(ADMIN_USER, "role0", AdminOperation.METRICS.getName(), SolrConstants.QUERY);
setAuthenticationUser(tmp);
}
}
@Test
public void testMetricsQueryFailure() throws Exception {
String tmp = getAuthenticatedUser();
try {
setAuthenticationUser("user1");
String url = String.format("%s/admin/metrics?wt=json",
cluster.getJettySolrRunner(0).getBaseUrl().toString());
ClientResource resource = new ClientResource(url);
resource.get(MediaType.APPLICATION_JSON);
fail("This admin request should have failed with authorization error.");
} catch (ResourceException ex) {
assertEquals(HttpServletResponse.SC_FORBIDDEN , ex.getStatus().getCode());
} finally {
setAuthenticationUser(tmp);
}
}
protected void adminQueryActionSuccess(String userName) throws SolrServerException, IOException {
String tmp = getAuthenticatedUser();
try {
setAuthenticationUser(userName);
ClusterStatus clusterStatus = new ClusterStatus();
assertEquals(0, clusterStatus.process(cluster.getSolrClient()).getStatus());
} finally {
setAuthenticationUser(tmp);
}
}
protected void adminQueryActionFailure(String userName) throws SolrServerException, IOException {
String tmp = getAuthenticatedUser();
try {
setAuthenticationUser(userName);
ClusterStatus clusterStatus = new ClusterStatus();
clusterStatus.process(cluster.getSolrClient());
fail("This admin request should have failed with authorization error.");
} catch (RemoteSolrException ex) {
assertEquals(HttpServletResponse.SC_FORBIDDEN , ex.code());
} finally {
setAuthenticationUser(tmp);
}
}
protected void adminUpdateActionSuccess(String userName, String collectionName)
throws SolrServerException, IOException {
// Success.
String tmp = getAuthenticatedUser();
try {
// Create collection.
setAuthenticationUser(userName);
CollectionAdminRequest.Create createCmd =
CollectionAdminRequest.createCollection(collectionName, "cloud-minimal", 1, NUM_SERVERS);
assertEquals(0, createCmd.process(cluster.getSolrClient()).getStatus());
// Delete collection.
CollectionAdminRequest.Delete delCmd = CollectionAdminRequest.deleteCollection(collectionName);
assertEquals(0, delCmd.process(cluster.getSolrClient()).getStatus());
} finally {
setAuthenticationUser(tmp);
}
}
}
| {
"pile_set_name": "Github"
} |
'use strict';
const _ = require('lodash');
module.exports = {
async setupEvents() {
this.apis = _.filter(
this.templates.update.Resources,
(item) => this.provider.isApiType(item.Type))
.map((item) => item.Properties);
this.triggers = _.filter(
this.templates.update.Resources,
(item) => this.provider.isTriggerType(item.Type))
.map((item) => item.Properties);
await this.setupInvokeRole();
await this.createApisIfNeeded();
await this.createTriggersIfNeeded();
},
async setupInvokeRole() {
const invokeRoleResource = this.templates.update.Resources[this.provider.getInvokeRoleLogicalId()];
if(invokeRoleResource){
const role = invokeRoleResource.Properties;
// TODO: update if needed
this.invokeRole = await this.setupRole(role);
}
},
async createApisIfNeeded() {
if (!this.apis.length) {
return;
}
await this.createApiGroupIfNotExists();
await this.checkExistingApis();
await this.createOrUpdateApis();
await this.deployApis();
},
async createTriggersIfNeeded() {
if (!this.triggers.length) {
return;
}
await this.checkExistingTriggers();
await this.createOrUpdateTriggers();
},
async createApiGroupIfNotExists() {
const groupResource = this.templates.update.Resources[this.provider.getApiGroupLogicalId()];
if (!groupResource) {
return; // No API needed
}
const group = groupResource.Properties;
const groupName = group.GroupName;
const foundGroup = await this.provider.getApiGroup(groupName);
if (foundGroup) {
this.apiGroup = foundGroup;
this.serverless.cli.log(`API group ${group.GroupName} exists.`);
return foundGroup;
}
await this.createApiGroup(group);
},
async createApiGroup(group) {
this.serverless.cli.log(`Creating API group ${group.GroupName}...`);
const createdGroup = await this.provider.createApiGroup(group);
this.apiGroup = createdGroup;
this.serverless.cli.log(`Created API group ${group.GroupName}`);
return createdGroup;
},
async checkExistingApis() {
if (!this.apis.length) {
return;
}
const apis = await this.provider.getApis({
GroupId: this.apiGroup.GroupId
});
this.apiMap = new Map(apis.map((api) => [api.ApiName, api]));
this.apis.forEach((api) => {
if (!this.apiMap.get(api.ApiName)) {
this.apiMap.set(api.ApiName, false);
}
});
},
async createOrUpdateApis() {
if (!this.apis.length) {
return;
}
for (var i = 0; i < this.apis.length; i++) {
const api = this.apis[i];
await this.createOrUpdateApi(api);
}
},
async createOrUpdateApi(api) {
const group = this.apiGroup;
const role = this.invokeRole;
const apiInMap = this.apiMap.get(api.ApiName);
if (apiInMap) {
const apiProps = Object.assign({ApiId: apiInMap.ApiId}, api);
this.serverless.cli.log(`Updating API ${api.ApiName}...`);
try {
await this.provider.updateApi(group, role, apiProps);
this.serverless.cli.log(`Updated API ${api.ApiName}`);
} catch (err) {
this.serverless.cli.log(`Failed to update API ${api.ApiName}!`);
throw err;
}
return;
}
this.serverless.cli.log(`Creating API ${api.ApiName}...`);
let newApi;
try {
newApi = await this.provider.createApi(group, role, api);
} catch (err) {
this.serverless.cli.log(`Failed to create API ${api.ApiName}!`);
throw err;
}
this.serverless.cli.log(`Created API ${api.ApiName}`);
this.apiMap.set(api.ApiName, newApi);
},
async deployApis() {
const group = this.apiGroup;
for (var i = 0; i < this.apis.length; i++) {
const api = this.apis[i];
const apiProps = this.apiMap.get(api.ApiName);
this.serverless.cli.log(`Deploying API ${api.ApiName}...`);
try {
await this.provider.deployApi(group, apiProps);
this.serverless.cli.log(`Deployed API ${api.ApiName}`);
const config = api.RequestConfig;
const func = api.ServiceConfig.FunctionComputeConfig;
this.serverless.cli.log(`${config.RequestHttpMethod} ` +
`http://${this.apiGroup.SubDomain}${config.RequestPath} -> ` +
`${func.ServiceName}.${func.FunctionName}`);
} catch (err) {
this.serverless.cli.log(`Failed to deploy API ${api.ApiName}!`);
throw err;
}
}
},
async checkExistingTriggers() {
this.triggerMap = new Map();
for (var i = 0; i < this.triggers.length; i++) {
const trigger = this.triggers[i];
const foundTrigger = await this.provider.getTrigger(
trigger.serviceName, trigger.functionName, trigger.triggerName
);
if (foundTrigger) {
this.triggerMap.set(trigger.triggerName, foundTrigger);
}
}
},
async createOrUpdateTriggers() {
if (!this.triggers.length) {
return;
}
for (var i = 0; i < this.triggers.length; i++) {
const trigger = this.triggers[i];
await this.createOrUpdateTrigger(trigger);
}
},
async createOrUpdateTrigger(trigger) {
const role = this.invokeRole;
const triggerName = trigger.triggerName;
const serviceName = trigger.serviceName;
const functionName = trigger.functionName;
const triggerInMap = this.triggerMap.get(triggerName);
if (triggerInMap) {
this.serverless.cli.log(`Updating trigger ${triggerName}...`);
try {
await this.provider.updateTrigger(serviceName, functionName, triggerName, trigger, role);
this.serverless.cli.log(`Updated trigger ${triggerName}`);
} catch (err) {
this.serverless.cli.log(`Failed to update trigger ${triggerName}!`);
throw err;
}
return;
}
this.serverless.cli.log(`Creating trigger ${triggerName}...`);
try {
const newtrigger = await this.provider.createTrigger(serviceName, functionName, trigger, role);
this.serverless.cli.log(`Created trigger ${triggerName}`);
this.triggerMap.set(triggerName, newtrigger);
} catch (err) {
this.serverless.cli.log(`Failed to create trigger ${triggerName}!`);
throw err;
}
}
};
| {
"pile_set_name": "Github"
} |
/*
** $Id: linit.c,v 1.14.1.1 2007/12/27 13:02:25 roberto Exp $
** Initialization of libraries for lua.c
** See Copyright Notice in lua.h
*/
#define linit_c
#define LUA_LIB
#include "lua.h"
#include "lualib.h"
#include "lauxlib.h"
static const luaL_Reg lualibs[] = {
{"", luaopen_base},
{LUA_LOADLIBNAME, luaopen_package},
{LUA_TABLIBNAME, luaopen_table},
{LUA_IOLIBNAME, luaopen_io},
{LUA_OSLIBNAME, luaopen_os},
{LUA_STRLIBNAME, luaopen_string},
{LUA_MATHLIBNAME, luaopen_math},
{LUA_DBLIBNAME, luaopen_debug},
{NULL, NULL}
};
LUALIB_API void luaL_openlibs (lua_State *L) {
const luaL_Reg *lib = lualibs;
for (; lib->func; lib++) {
lua_pushcfunction(L, lib->func);
lua_pushstring(L, lib->name);
lua_call(L, 1, 0);
}
}
| {
"pile_set_name": "Github"
} |
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
const URL = "http://mochi.test:8888/?t=" + Date.now();
const URL1 = URL + "#1";
const URL2 = URL + "#2";
const URL3 = URL + "#3";
var tmp = {};
Cc["@mozilla.org/moz/jssubscript-loader;1"]
.getService(Ci.mozIJSSubScriptLoader)
.loadSubScript("resource://gre/modules/PageThumbs.jsm", tmp);
const EXPIRATION_MIN_CHUNK_SIZE = 50;
const {PageThumbsExpiration} = tmp;
function* runTests() {
// Create dummy URLs.
let dummyURLs = [];
for (let i = 0; i < EXPIRATION_MIN_CHUNK_SIZE + 10; i++) {
dummyURLs.push(URL + "#dummy" + i);
}
// Make sure our thumbnails aren't expired too early.
dontExpireThumbnailURLs([URL1, URL2, URL3].concat(dummyURLs));
// Create three thumbnails.
yield createDummyThumbnail(URL1);
ok(thumbnailExists(URL1), "first thumbnail created");
yield createDummyThumbnail(URL2);
ok(thumbnailExists(URL2), "second thumbnail created");
yield createDummyThumbnail(URL3);
ok(thumbnailExists(URL3), "third thumbnail created");
// Remove the third thumbnail.
yield expireThumbnails([URL1, URL2]);
ok(thumbnailExists(URL1), "first thumbnail still exists");
ok(thumbnailExists(URL2), "second thumbnail still exists");
ok(!thumbnailExists(URL3), "third thumbnail has been removed");
// Remove the second thumbnail.
yield expireThumbnails([URL1]);
ok(thumbnailExists(URL1), "first thumbnail still exists");
ok(!thumbnailExists(URL2), "second thumbnail has been removed");
// Remove all thumbnails.
yield expireThumbnails([]);
ok(!thumbnailExists(URL1), "all thumbnails have been removed");
// Create some more files than the min chunk size.
for (let url of dummyURLs) {
yield createDummyThumbnail(url);
}
ok(dummyURLs.every(thumbnailExists), "all dummy thumbnails created");
// Expire thumbnails and expect 10 remaining.
yield expireThumbnails([]);
let remainingURLs = dummyURLs.filter(thumbnailExists);
is(remainingURLs.length, 10, "10 dummy thumbnails remaining");
// Expire thumbnails again. All should be gone by now.
yield expireThumbnails([]);
remainingURLs = remainingURLs.filter(thumbnailExists);
is(remainingURLs.length, 0, "no dummy thumbnails remaining");
}
function createDummyThumbnail(aURL) {
info("Creating dummy thumbnail for " + aURL);
let dummy = new Uint8Array(10);
for (let i = 0; i < 10; ++i) {
dummy[i] = i;
}
PageThumbsStorage.writeData(aURL, dummy).then(
function onSuccess() {
info("createDummyThumbnail succeeded");
executeSoon(next);
},
function onFailure(error) {
ok(false, "createDummyThumbnail failed " + error);
}
);
}
function expireThumbnails(aKeep) {
PageThumbsExpiration.expireThumbnails(aKeep).then(
function onSuccess() {
info("expireThumbnails succeeded");
executeSoon(next);
},
function onFailure(error) {
ok(false, "expireThumbnails failed " + error);
}
);
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2018-2019. data2viz sàrl.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.data2viz.viz
import org.w3c.dom.*
import kotlin.math.*
fun CircleNode.render(context: CanvasRenderingContext2D) {
context.beginPath()
context.arc(x, y, radius, .0, 2 * PI, false)
fill?.let {
context.fill()
}
stroke?.let {
context.stroke()
}
}
| {
"pile_set_name": "Github"
} |
// SSDT for Zenbook UX510 (Kabylake)
DefinitionBlock ("", "SSDT", 2, "hack", "ux510kl", 0)
{
#define NO_DEFINITIONBLOCK
// audio
#include "include/SSDT-ALC256.dsl"
#include "include/layout13_HDEF.asl"
// battery
#include "include/SSDT-BATT.dsl"
// keyboard backlight/fn keys/als
#include "include/SSDT-ATK-KABY.dsl"
#include "include/SSDT-RALS.dsl"
#include "include/SSDT-ALSC.dsl"
// backlight
#include "include/SSDT-PNLF.dsl"
// disable DGPU
#include "include/SSDT-RP01_PEGP.dsl"
// usb
#include "include/SSDT-XHC.dsl"
#include "include/SSDT-USBX.dsl"
// others
#include "include/SSDT-HACK.dsl"
#include "include/SSDT-PTSWAK.dsl"
#include "include/SSDT-LPC.dsl"
#include "include/SSDT-IGPU.dsl"
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE module SYSTEM "module.dtd">
<module name="nav_line_border" dir="nav">
<doc>
<description>
navigate along a border line (line 1-2) with turns in the same direction
you can use this function to navigate along a border if it is essetial not to cross it
navigation is along line p1, p2 with turns in the same direction to make sure you dont cross the line
take care youre navigation radius is not to small in strong wind conditions!
In the flight plan:
<!--
<call fun="nav_line_border_setup()"/>
<call fun="nav_line_border_run(WP_waypoint1_name, WP_waypoint1_name, nav_radius)"/>
-->
</description>
</doc>
<header>
<file name="nav_line_border.h"/>
</header>
<makefile target="ap|sim|nps">
<file name="nav_line_border.c"/>
</makefile>
</module>
| {
"pile_set_name": "Github"
} |
<!-- 128 LRI / RLI -->
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
<!-- 64 PDI -->
⁩⁩⁩⁩⁩⁩⁩⁩
⁩⁩⁩⁩⁩⁩⁩⁩
⁩⁩⁩⁩⁩⁩⁩⁩
⁩⁩⁩⁩⁩⁩⁩⁩
⁩⁩⁩⁩⁩⁩⁩⁩
⁩⁩⁩⁩⁩⁩⁩⁩
⁩⁩⁩⁩⁩⁩⁩⁩
⁩⁩⁩⁩⁩⁩⁩⁩
<!-- 64 LRI / RLI -->
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
⁦⁧⁦⁧⁦⁧⁦⁧
| {
"pile_set_name": "Github"
} |
<?xml version='1.0'?>
<!--
Simple highlighter for HTML output. Follows the Eclipse color scheme.
-->
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:xslthl="http://xslthl.sf.net"
exclude-result-prefixes="xslthl"
version='1.0'>
<xsl:template match='xslthl:keyword'>
<span class="hl-keyword"><xsl:value-of select='.'/></span>
</xsl:template>
<xsl:template match='xslthl:comment'>
<span class="hl-comment"><xsl:value-of select='.'/></span>
</xsl:template>
<xsl:template match='xslthl:oneline-comment'>
<span class="hl-comment"><xsl:value-of select='.'/></span>
</xsl:template>
<xsl:template match='xslthl:multiline-comment'>
<span class="hl-multiline-comment"><xsl:value-of select='.'/></span>
</xsl:template>
<xsl:template match='xslthl:tag'>
<span class="hl-tag"><xsl:value-of select='.'/></span>
</xsl:template>
<xsl:template match='xslthl:attribute'>
<span class="hl-attribute"><xsl:value-of select='.'/></span>
</xsl:template>
<xsl:template match='xslthl:value'>
<span class="hl-value"><xsl:value-of select='.'/></span>
</xsl:template>
<xsl:template match='xslthl:string'>
<span class="hl-string"><xsl:value-of select='.'/></span>
</xsl:template>
</xsl:stylesheet> | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<resources></resources>
| {
"pile_set_name": "Github"
} |
{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "JndnmDMp66FL"
},
"source": [
"##### Copyright 2020 Google LLC.\n",
"\n",
"Licensed under the Apache License, Version 2.0 (the \"License\");"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "both",
"colab": {},
"colab_type": "code",
"id": "hMqWDc_m6rUC"
},
"outputs": [],
"source": [
"#@title Default title text\n",
"# Licensed under the Apache License, Version 2.0 (the \"License\");\n",
"# you may not use this file except in compliance with the License.\n",
"# You may obtain a copy of the License at\n",
"#\n",
"# https://www.apache.org/licenses/LICENSE-2.0\n",
"#\n",
"# Unless required by applicable law or agreed to in writing, software\n",
"# distributed under the License is distributed on an \"AS IS\" BASIS,\n",
"# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
"# See the License for the specific language governing permissions and\n",
"# limitations under the License."
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "pW8P5zUZFjPQ"
},
"source": [
"# Train your own Keyword Spotting Model.\n",
"[Open in Google Colab](https://colab.research.google.com/github/google-research/google-research/blob/master/speech_embedding/record_train.ipynb)\n",
"\n",
"Before running any cells please enable GPUs for this notebook to speed it up. \n",
"\n",
"* *Edit* → *Notebook Settings*\n",
"* select *GPU* from the *Hardware Accelerator* drop-down\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "X4vndgdGztBS"
},
"outputs": [],
"source": [
"#@title Imports\n",
"%tensorflow_version 1.x\n",
"from __future__ import division\n",
"\n",
"import collections\n",
"import IPython\n",
"import functools\n",
"import math\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import io\n",
"import os\n",
"import tensorflow as tf\n",
"import tensorflow_hub as hub\n",
"import random\n",
"import scipy.io.wavfile\n",
"import tarfile\n",
"import time\n",
"import sys\n",
"\n",
"from google.colab import output\n",
"from google.colab import widgets\n",
"from base64 import b64decode\n",
"\n",
"!pip install ffmpeg-python\n",
"import ffmpeg"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "3C3TL3WbztBd"
},
"outputs": [],
"source": [
"#@title Helper functions and classes\n",
"def normalized_read(filename):\n",
" \"\"\"Reads and normalizes a wavfile.\"\"\"\n",
" _, data = scipy.io.wavfile.read(open(filename, mode='rb'))\n",
" samples_99_percentile = np.percentile(np.abs(data), 99.9)\n",
" normalized_samples = data / samples_99_percentile\n",
" normalized_samples = np.clip(normalized_samples, -1, 1)\n",
" return normalized_samples\n",
"\n",
"class EmbeddingDataFileList(object):\n",
" \"\"\"Container that loads audio, stores it as embeddings and can\n",
" rebalance it.\"\"\"\n",
"\n",
" def __init__(self, filelist,\n",
" data_dest_dir,\n",
" targets=None,\n",
" label_max=10000,\n",
" negative_label=\"negative\",\n",
" negative_multiplier=25,\n",
" target_samples=32000,\n",
" progress_bar=None,\n",
" embedding_model=None):\n",
" \"\"\"Creates an instance of `EmbeddingDataFileList`.\"\"\"\n",
" self._negative_label = negative_label\n",
" self._data_per_label = collections.defaultdict(list)\n",
" self._labelcounts = {}\n",
" self._label_list = targets\n",
" total_examples = sum([min(len(x), label_max) for x in filelist.values()])\n",
" total_examples -= min(len(filelist[negative_label]), label_max)\n",
" total_examples += min(len(filelist[negative_label]), negative_multiplier * label_max)\n",
" print(\"loading %d examples\" % total_examples)\n",
" example_count = 0\n",
" for label in filelist:\n",
" if label not in self._label_list:\n",
" raise ValueError(\"Unknown label:\", label)\n",
" label_files = filelist[label]\n",
" random.shuffle(label_files)\n",
" if label == negative_label:\n",
" multplier = negative_multiplier\n",
" else:\n",
" multplier = 1\n",
" for wav_file in label_files[:label_max * multplier]:\n",
" data = normalized_read(os.path.join(data_dest_dir, wav_file))\n",
" required_padding = target_samples - data.shape[0]\n",
" if required_padding \u003e 0:\n",
" data = np.pad(data, (required_padding, required_padding), 'constant')\n",
" self._labelcounts[label] = self._labelcounts.get(label, 0) + 1\n",
" if embedding_model:\n",
" data = embedding_model.create_embedding(data)[0][0,:,:,:]\n",
" self._data_per_label[label].append(data)\n",
" if progress_bar is not None:\n",
" example_count += 1\n",
" progress_bar.update(progress(100 * example_count/total_examples))\n",
"\n",
" @property\n",
" def labels(self):\n",
" return self._label_list\n",
"\n",
" def get_label(self, idx):\n",
" return self.labels.index(idx)\n",
"\n",
" def _get_filtered_data(self, label, filter_fn):\n",
" idx = self.labels.index(label)\n",
" return [(filter_fn(x), idx) for x in self._data_per_label[label]]\n",
"\n",
" def _multply_data(self, data, factor):\n",
" samples = int((factor - math.floor(factor)) * len(data))\n",
" return int(factor) * data + random.sample(data, samples)\n",
"\n",
" def full_rebalance(self, negatives, labeled):\n",
" \"\"\"Rebalances for a given ratio of labeled to negatives.\"\"\"\n",
" negative_count = self._labelcounts[self._negative_label]\n",
" labeled_count = sum(self._labelcounts[key]\n",
" for key in self._labelcounts.keys()\n",
" if key != self._negative_label)\n",
" labeled_multiply = labeled * negative_count / (negatives * labeled_count)\n",
" for label in self._data_per_label:\n",
" if label == self._negative_label:\n",
" continue\n",
" self._data_per_label[label] = self._multply_data(\n",
" self._data_per_label[label], labeled_multiply)\n",
" self._labelcounts[label] = len(self._data_per_label[label])\n",
"\n",
" def get_all_data_shuffled(self, filter_fn):\n",
" \"\"\"Returns a shuffled list containing all the data.\"\"\"\n",
" return self.get_all_data(filter_fn, shuffled=True)\n",
"\n",
" def get_all_data(self, filter_fn, shuffled=False):\n",
" \"\"\"Returns a list containing all the data.\"\"\"\n",
" data = []\n",
" for label in self._data_per_label:\n",
" data += self._get_filtered_data(label, filter_fn)\n",
" if shuffled:\n",
" random.shuffle(data)\n",
" return data\n",
"\n",
"def cut_middle_frame(embedding, num_frames, flatten):\n",
" \"\"\"Extrats the middle frames for an embedding.\"\"\"\n",
" left_context = (embedding.shape[0] - num_frames) // 2\n",
" if flatten:\n",
" return embedding[left_context:left_context+num_frames].flatten()\n",
" else:\n",
" return embedding[left_context:left_context+num_frames]\n",
"\n",
"\n",
"def progress(value, maximum=100):\n",
" return IPython.display.HTML(\"\"\"\n",
" \u003cprogress value='{value}' max='{max}' style='width: 80%'\u003e{value}\u003c/progress\u003e\n",
" \"\"\".format(value=value, max=maximum))"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "rEuL8h_GztBj"
},
"outputs": [],
"source": [
"#@title HeadTrainerClass and head model functions\n",
"\n",
"def _fully_connected_model_fn(embeddings, num_labels):\n",
" \"\"\"Builds the head model and adds a fully connected output layer.\"\"\"\n",
" net = tf.layers.flatten(embeddings)\n",
" logits = tf.compat.v1.layers.dense(net, num_labels, activation=None)\n",
" return logits\n",
"\n",
"framework = tf.contrib.framework\n",
"layers = tf.contrib.layers\n",
"\n",
"def _conv_head_model_fn(embeddings, num_labels, context):\n",
" \"\"\"Builds the head model and adds a fully connected output layer.\"\"\"\n",
" activation_fn = tf.nn.elu\n",
" normalizer_fn = functools.partial(\n",
" layers.batch_norm, scale=True, is_training=True)\n",
" with framework.arg_scope([layers.conv2d], biases_initializer=None,\n",
" activation_fn=None, stride=1, padding=\"SAME\"):\n",
" net = embeddings\n",
" net = layers.conv2d(net, 96, [3, 1])\n",
" net = normalizer_fn(net)\n",
" net = activation_fn(net)\n",
" net = layers.max_pool2d(net, [2, 1], stride=[2, 1], padding=\"VALID\")\n",
" context //= 2\n",
" net = layers.conv2d(net, 96, [3, 1])\n",
" net = normalizer_fn(net)\n",
" net = activation_fn(net)\n",
" net = layers.max_pool2d(net, [context, net.shape[2]], padding=\"VALID\")\n",
" net = tf.layers.flatten(net)\n",
" logits = layers.fully_connected(\n",
" net, num_labels, activation_fn=None)\n",
" return logits\n",
"\n",
"class HeadTrainer(object):\n",
" \"\"\"A tensorflow classifier to quickly train and test on embeddings.\n",
"\n",
" Only use this if you are training a very small model on a very limited amount\n",
" of data. If you expect the training to take any more than 15 - 20 min then use\n",
" something else.\n",
" \"\"\"\n",
"\n",
" def __init__(self, model_fn, input_shape, num_targets,\n",
" head_learning_rate=0.001, batch_size=64):\n",
" \"\"\"Creates a `HeadTrainer`.\n",
"\n",
" Args:\n",
" model_fn: function that builds the tensorflow model, defines its loss\n",
" and returns the tuple (predictions, loss, accuracy).\n",
" input_shape: describes the shape of the models input feature.\n",
" Does not include a the batch dimension.\n",
" num_targets: Target number of keywords.\n",
" \"\"\"\n",
" self._input_shape = input_shape\n",
" self._output_dim = num_targets\n",
" self._batch_size = batch_size\n",
" self._graph = tf.Graph()\n",
" with self._graph.as_default():\n",
" self._feature = tf.placeholder(tf.float32, shape=([None] + input_shape))\n",
" self._labels = tf.placeholder(tf.int64, shape=(None))\n",
" module_spec = hub.create_module_spec(\n",
" module_fn=self._get_headmodule_fn(model_fn, num_targets))\n",
" self._module = hub.Module(module_spec, trainable=True)\n",
" logits = self._module(self._feature)\n",
" self._predictions = tf.nn.softmax(logits)\n",
" self._loss, self._accuracy = self._get_loss(\n",
" logits, self._labels, self._predictions)\n",
" self._update_weights = tf.train.AdamOptimizer(\n",
" learning_rate=head_learning_rate).minimize(self._loss)\n",
" self._sess = tf.Session(graph=self._graph)\n",
" with self._sess.as_default():\n",
" with self._graph.as_default():\n",
" self._sess.run(tf.local_variables_initializer())\n",
" self._sess.run(tf.global_variables_initializer())\n",
"\n",
" def _get_headmodule_fn(self, model_fn, num_targets):\n",
" \"\"\"Wraps the model_fn in a tf hub module.\"\"\"\n",
" def module_fn():\n",
" embeddings = tf.placeholder(\n",
" tf.float32, shape=([None] + self._input_shape))\n",
" logit = model_fn(embeddings, num_targets)\n",
" hub.add_signature(name='default', inputs=embeddings, outputs=logit)\n",
" return module_fn\n",
"\n",
"\n",
" def _get_loss(self, logits, labels, predictions):\n",
" \"\"\"Defines the model's loss and accuracy.\"\"\"\n",
" xentropy_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n",
" logits=logits, labels=labels)\n",
" loss = tf.reduce_mean(xentropy_loss)\n",
" accuracy = tf.contrib.metrics.accuracy(tf.argmax(predictions, 1), labels)\n",
" return loss, accuracy\n",
"\n",
" def save_head_model(self, save_directory):\n",
" \"\"\"Saves the model.\"\"\"\n",
" with self._graph.as_default():\n",
" self._module.export(save_directory, self._sess)\n",
"\n",
"\n",
" def _feature_transform(self, batch_features, batch_labels):\n",
" \"\"\"Transforms lists of features and labels into into model inputs.\"\"\"\n",
" return np.stack(batch_features), np.stack(batch_labels)\n",
"\n",
" def _batch_data(self, data, batch_size=None):\n",
" \"\"\"Splits the input data into batches.\"\"\"\n",
" batch_features = []\n",
" batch_labels = []\n",
" batch_size = batch_size or len(data)\n",
" for feature, label in data:\n",
" if feature.shape != tuple(self._input_shape):\n",
" raise ValueError(\n",
" \"Feature shape ({}) doesn't match model shape ({})\".format(\n",
" feature.shape, self._input_shape))\n",
" if not 0 \u003c= label \u003c self._output_dim:\n",
"\n",
" raise ValueError('Label value ({}) outside of target range'.format(\n",
" label))\n",
" batch_features.append(feature)\n",
" batch_labels.append(label)\n",
" if len(batch_features) == batch_size:\n",
" yield self._feature_transform(batch_features, batch_labels)\n",
" del batch_features[:]\n",
" del batch_labels[:]\n",
" if batch_features:\n",
" yield self._feature_transform(batch_features, batch_labels)\n",
"\n",
" def epoch_train(self, data, epochs=1, batch_size=None):\n",
" \"\"\"Trains the model on the provided data.\n",
"\n",
" Args:\n",
" data: List of tuples (feature, label) where feature is a np array of\n",
" shape `self._input_shape` and label an int less than self._output_dim.\n",
" epochs: Number of times this data should be trained on.\n",
" batch_size: Number of feature, label pairs per batch. Overwrites\n",
" `self._batch_size` when set.\n",
"\n",
" Returns:\n",
" tuple of accuracy, loss;\n",
" accuracy: Average training accuracy.\n",
" loss: Loss of the final batch.\n",
" \"\"\"\n",
" batch_size = batch_size or self._batch_size\n",
" accuracy_list = []\n",
" for _ in range(epochs):\n",
" for features, labels in self._batch_data(data, batch_size):\n",
" loss, accuracy, _ = self._sess.run(\n",
" [self._loss, self._accuracy, self._update_weights],\n",
" feed_dict={self._feature: features, self._labels: labels})\n",
" accuracy_list.append(accuracy)\n",
" return (sum(accuracy_list))/len(accuracy_list), loss\n",
"\n",
" def test(self, data, batch_size=None):\n",
" \"\"\"Evaluates the model on the provided data.\n",
"\n",
" Args:\n",
" data: List of tuples (feature, label) where feature is a np array of\n",
" shape `self._input_shape` and label an int less than self._output_dim.\n",
" batch_size: Number of feature, label pairs per batch. Overwrites\n",
" `self._batch_size` when set.\n",
"\n",
" Returns:\n",
" tuple of accuracy, loss;\n",
" accuracy: Average training accuracy.\n",
" loss: Loss of the final batch.\n",
" \"\"\"\n",
" batch_size = batch_size or self._batch_size\n",
" accuracy_list = []\n",
" for features, labels in self._batch_data(data, batch_size):\n",
" loss, accuracy = self._sess.run(\n",
" [self._loss, self._accuracy],\n",
" feed_dict={self._feature: features, self._labels: labels})\n",
" accuracy_list.append(accuracy)\n",
" return sum(accuracy_list)/len(accuracy_list), loss\n",
"\n",
" def infer(self, example_feature):\n",
" \"\"\"Runs inference on example_feature.\"\"\"\n",
" if example_feature.shape != tuple(self._input_shape):\n",
" raise ValueError(\n",
" \"Feature shape ({}) doesn't match model shape ({})\".format(\n",
" example_feature.shape, self._input_shape))\n",
" return self._sess.run(\n",
" self._predictions,\n",
" feed_dict={self._feature: np.expand_dims(example_feature, axis=0)})"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "FVLlx0xSztBn"
},
"outputs": [],
"source": [
"#@title TfHubWrapper Class\n",
"\n",
"class TfHubWrapper(object):\n",
" \"\"\"A loads a tf hub embedding model.\"\"\"\n",
" def __init__(self, embedding_model_dir):\n",
" \"\"\"Creates a `SavedModelWraper`.\"\"\"\n",
" self._graph = tf.Graph()\n",
" self._sess = tf.Session(graph=self._graph)\n",
" with self._graph.as_default():\n",
" with self._sess.as_default():\n",
" module_spec = hub.load_module_spec(embedding_model_dir)\n",
" embedding_module = hub.Module(module_spec)\n",
" self._samples = tf.placeholder(\n",
" tf.float32, shape=[1, None], name='audio_samples')\n",
" self._embedding = embedding_module(self._samples)\n",
" self._sess.run(tf.global_variables_initializer())\n",
" print(\"Embedding model loaded, embedding shape:\", self._embedding.shape)\n",
"\n",
" def create_embedding(self, samples):\n",
" samples = samples.reshape((1, -1))\n",
" output = self._sess.run(\n",
" [self._embedding],\n",
" feed_dict={self._samples: samples})\n",
" return output"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "eMlokdbTZ4_n"
},
"outputs": [],
"source": [
"#@title Define AudioClipRecorder Class\n",
"AUDIOCLIP_HTML ='''\n",
"\u003cspan style=\"font-size:30px\"\u003eRecorded audio clips of {keyphrase}:\u003c/span\u003e \n",
"\u003cdiv id='target{keyphrase}'\u003e\u003c/div\u003e\n",
"\u003cspan id = \"status_label{keyphrase}\" style=\"font-size:30px\"\u003e\n",
" Ready to record.\u003c/span\u003e\n",
"\u003cbutton id='Add{keyphrase}Audio'\u003eRecord\u003c/button\u003e\n",
"\u003cscript\u003e\n",
"var recorder;\n",
"var base64data = 0;\n",
"\n",
"function sleep(ms) {{\n",
" return new Promise(resolve =\u003e setTimeout(resolve, ms));\n",
"}}\n",
"\n",
"var handleSuccess = function(stream) {{\n",
" recorder = new MediaRecorder(stream);\n",
" recorder.ondataavailable = function(e) {{ \n",
" reader = new FileReader();\n",
" reader.readAsDataURL(e.data); \n",
" reader.onloadend = function() {{\n",
" base64data = reader.result;\n",
" }}\n",
" }};\n",
" recorder.start();\n",
"}};\n",
"\n",
"document.querySelector('#Add{keyphrase}Audio').onclick = () =\u003e {{\n",
" var label = document.getElementById(\"status_label{keyphrase}\"); \n",
" navigator.mediaDevices.getUserMedia({{audio: true}}).then(handleSuccess);\n",
" label.innerHTML = \"Recording ... please say {keyphrase}!\".fontcolor(\"red\");; \n",
" sleep({clip_length_ms}).then(() =\u003e {{\n",
" recorder.stop();\n",
" label.innerHTML = \"Recording finished ... processing audio.\"; \n",
" sleep(1000).then(() =\u003e {{\n",
" google.colab.kernel.invokeFunction('notebook.AddAudioItem{keyphrase}',\n",
" [base64data.toString()], {{}});\n",
" label.innerHTML = \"Ready to record.\";\n",
" }});\n",
"}});\n",
"}};\n",
"\u003c/script\u003e'''\n",
"\n",
"class AudioClipRecorder:\n",
" \"\"\"Python class that creates a JS microphone clip recorder.\"\"\"\n",
"\n",
" def __init__(self, keyphrase=\"test\", clip_length_ms=2100):\n",
" \"\"\"Creates an AudioClipRecorder instance.\n",
"\n",
" When created this class prints an empty \u003cdiv\u003e tag into which the\n",
" recorded clips will be printed and a record audio button that uses\n",
" javascript to access the microphone and record an audio clip.\n",
" \n",
" Args:\n",
" keyphrase: The name of the keyphrase that should be recorded.\n",
" This will be displayed in the recording prompt and used as a\n",
" directory name when the recordings are exported.\n",
" clip_length_ms: The length (in ms) of each recorded audio clip.\n",
" Due to the async nature of javascript this actual amount of recorded\n",
" audio may vary by a ~20-80ms.\n",
" \"\"\"\n",
" self._counter = 0\n",
" self._keyphrase = keyphrase\n",
" self._audio_clips = {}\n",
" IPython.display.display(IPython.display.HTML(AUDIOCLIP_HTML.format(\n",
" keyphrase=keyphrase, clip_length_ms=clip_length_ms)))\n",
" output.register_callback('notebook.AddAudioItem' + keyphrase,\n",
" self.add_list_item)\n",
" output.register_callback('notebook.RemoveAudioItem' + keyphrase,\n",
" self.rm_audio)\n",
"\n",
" def add_list_item(self, data):\n",
" \"\"\"Adds the recorded audio to the list of clips.\n",
"\n",
" This function is called from javascript after clip_length_ms audio has\n",
" been recorded. It prints the recorded audio clip to the \u003cdiv\u003e together with\n",
" a button that allows for it to be deleted.\n",
"\n",
" Args:\n",
" data: The recorded audio in webm format.\n",
" \"\"\"\n",
" raw_string_data = data.split(',')[1]\n",
" samples, rate = self.decode_webm(raw_string_data)\n",
" length_samples = len(samples)\n",
" with output.redirect_to_element('#target{keyphrase}'.format(\n",
" keyphrase=self._keyphrase)):\n",
" with output.use_tags('{keyphrase}_audio_{counter}'.format(\n",
" counter=self._counter, keyphrase=self._keyphrase)):\n",
" IPython.display.display(IPython.display.HTML('''Audio clip {counter} - \n",
" {length} samples - \n",
" \u003cbutton id=\\'delbutton{keyphrase}{counter}\\'\u003edel\u003c/button\u003e\n",
" \u003cscript\u003e\n",
" document.querySelector('#delbutton{keyphrase}{counter}').onclick = () =\u003e {{\n",
" google.colab.kernel.invokeFunction('notebook.RemoveAudioItem{keyphrase}', [{counter}], {{}});\n",
" }};\n",
" \u003c/script\u003e'''.format(counter=self._counter, length=length_samples,\n",
" keyphrase=self._keyphrase)))\n",
" IPython.display.display(IPython.display.Audio(data=samples, rate=rate))\n",
" IPython.display.display(IPython.display.HTML('\u003cbr\u003e\u003cbr\u003e'))\n",
" self._audio_clips[self._counter]=samples\n",
" self._counter+=1\n",
"\n",
" def rm_audio(self, count):\n",
" \"\"\"Removes the audioclip 'count' from the list of clips.\"\"\"\n",
" output.clear(output_tags=\"{0}_audio_{1}\".format(self._keyphrase, count))\n",
" self._audio_clips.pop(count)\n",
"\n",
" def decode_webm(self, data):\n",
" \"\"\"Decodes a webm audio clip in a np.array of samples.\"\"\"\n",
" sample_rate=16000\n",
" process = (ffmpeg\n",
" .input('pipe:0')\n",
" .output('pipe:1', format='s16le', ar=sample_rate)\n",
" .run_async(pipe_stdin=True, pipe_stdout=True, pipe_stderr=True,\n",
" quiet=True, overwrite_output=True)\n",
" )\n",
" output, err = process.communicate(input=b64decode(data))\n",
" audio = np.frombuffer(output, dtype=np.int16).astype(np.float32)\n",
" return audio, sample_rate\n",
"\n",
" def save_as_wav_files(self, base_output_dir,\n",
" file_prefix='recording_', file_suffix=''):\n",
" \"\"\"Exports all audio clips as wav files.\n",
"\n",
" The files wav files will be written to 'base_output_dir/self._keyphrase'.\n",
" And will be named: file_prefix + str(clip_id) + file_suffix + '.wav' \n",
" \"\"\"\n",
" if not os.path.exists(base_output_dir):\n",
" os.mkdir(base_output_dir)\n",
" keyphrase_output_dir = os.path.join(base_output_dir, self._keyphrase)\n",
" if not os.path.exists(keyphrase_output_dir):\n",
" os.mkdir(keyphrase_output_dir)\n",
" for clip_id in self._audio_clips:\n",
" filename = file_prefix + str(clip_id) + file_suffix + '.wav'\n",
" output_file = os.path.join(keyphrase_output_dir, filename)\n",
" print(\"Creating:\", output_file)\n",
" scipy.io.wavfile.write(output_file, 16000, self._audio_clips[clip_id])\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "GdyxtK-mrO8l"
},
"outputs": [],
"source": [
"#@title Define AudioClipEval Class\n",
"class AudioClipEval(AudioClipRecorder):\n",
" def __init__(self, embedding_model, head_model, filter_fn, labels,\n",
" name=\"eval1\", clip_length_ms=2100):\n",
" \"\"\"Creates an AudioClipEval instance.\n",
"\n",
" When created this class prints an empty \u003cdiv\u003e tag into which the\n",
" recorded clips will be printed and a record audio button that uses\n",
" javascript to access the microphone and record an audio clip.\n",
" \n",
" Args:\n",
" embedding_model: The embedding model.\n",
" head_model: The default head model.\n",
" filter_fn: function that prepared the input to the head model.\n",
" labels: List of head model target labels.\n",
" keyphrase: The name of the keyphrase that should be recorded.\n",
" This will be displayed in the recording prompt and used as a\n",
" directory name when the recordings are exported.\n",
" clip_length_ms: The length (in ms) of each recorded audio clip.\n",
" Due to the async nature of javascript this actual amount of recorded\n",
" audio may vary by a ~20-80ms.\n",
" \"\"\"\n",
" self._counter = 0\n",
" self._keyphrase = name\n",
" keyphrase = name\n",
" self._audio_clips = {}\n",
" self._embedding_model = embedding_model\n",
" self._head_model = head_model\n",
" self._filter_fn = filter_fn\n",
" self._labels = labels\n",
" IPython.display.display(IPython.display.HTML(\n",
" AUDIOCLIP_HTML.format(keyphrase=keyphrase, clip_length_ms=clip_length_ms)))\n",
" output.register_callback('notebook.AddAudioItem' + keyphrase,\n",
" self.add_list_item)\n",
" output.register_callback('notebook.RemoveAudioItem' + keyphrase,\n",
" self.rm_audio)\n",
"\n",
" def add_list_item(self, data):\n",
" \"\"\"Adds the recorded audio to the list of clips and classifies it.\n",
"\n",
" This function is called from javascript after clip_length_ms audio has\n",
" been recorded. It prints the recorded audio clip to the \u003cdiv\u003e together with\n",
" a button that allows for it to be deleted.\n",
"\n",
" Args:\n",
" data: The recorded audio in webm format.\n",
" \"\"\"\n",
" raw_string_data = data.split(',')[1]\n",
" samples, rate = self.decode_webm(raw_string_data)\n",
" length_samples = len(samples)\n",
" detection, confidence = self.eval_audio(samples)\n",
" with output.redirect_to_element('#target{keyphrase}'.format(\n",
" keyphrase=self._keyphrase)):\n",
" with output.use_tags('{keyphrase}_audio_{counter}'.format(\n",
" counter=self._counter, keyphrase=self._keyphrase)):\n",
" IPython.display.display(IPython.display.HTML('''Audio clip {counter} - \n",
" {length} samples - \n",
" \u003cbutton id=\\'delbutton{counter}\\'\u003edel\u003c/button\u003e\n",
" \u003cscript\u003e\n",
" document.querySelector('#delbutton{counter}').onclick = () =\u003e {{\n",
" google.colab.kernel.invokeFunction('notebook.RemoveAudioItem{keyphrase}', [{counter}], {{}});\n",
" }};\n",
" \u003c/script\u003e'''.format(counter=self._counter, length=length_samples,\n",
" keyphrase=self._keyphrase)))\n",
" IPython.display.display(IPython.display.Audio(data=samples, rate=rate))\n",
" IPython.display.display(IPython.display.HTML(\n",
" '''\u003cspan id = \"result{counter}\" style=\"font-size:24px\"\u003e\n",
" detected: {detection} ({confidence})\u003cspan\u003e'''.format(\n",
" counter=self._counter, detection=detection,\n",
" confidence=confidence)))\n",
" IPython.display.display(IPython.display.HTML('\u003cbr\u003e\u003cbr\u003e'))\n",
" self._audio_clips[self._counter]=samples\n",
" self._counter+=1\n",
"\n",
" def eval_audio(self, samples, head_model=None):\n",
" \"\"\"Classifies the audio using the current or a provided model.\"\"\"\n",
" embeddings = self._embedding_model.create_embedding(samples)[0][0,:,:,:]\n",
" if head_model:\n",
" probs = head_model.infer(self._filter_fn(embeddings))\n",
" else:\n",
" probs = self._head_model.infer(self._filter_fn(embeddings))\n",
" return self._labels[np.argmax(probs)], np.amax(probs)\n",
"\n",
" def eval_on_new_model(self, head_model):\n",
" \"\"\"Reclassifies the clips using a new head model.\"\"\"\n",
" for clip_id in self._audio_clips:\n",
" samples = self._audio_clips[clip_id]\n",
" length_samples = len(samples)\n",
" detection, confidence = self.eval_audio(samples, head_model=head_model)\n",
" IPython.display.display(IPython.display.HTML(\n",
" '''Audio clip {counter} - {length} samples - \n",
" \u003cspan id = \"result{counter}\" style=\"font-size:24px\"\u003e\n",
" detected: {detection} ({confidence})\u003cspan\u003e'''.format(\n",
" counter=clip_id, length=length_samples,\n",
" detection=detection, confidence=confidence))) \n",
" IPython.display.display(IPython.display.Audio(data=samples, rate=16000))"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "gR6n9PMGIHSv"
},
"source": [
"## Load the embedding model\n",
"\n",
"The following info messages can be ignored\n",
"\n",
"\u003e *INFO:tensorflow:Saver not created because there are no variables in the graph to restore*\n",
"\n",
"Don't worry tf hub is restoring all the variables.\n",
"\n",
"You can test the model by having it produce an embedding on zeros:\n",
"\n",
"\n",
"```\n",
"speech_embedding_model.create_embedding(np.zeros((1,66000)))\n",
"```\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"colab": {},
"colab_type": "code",
"id": "O4LRwMdsIVpo"
},
"outputs": [],
"source": [
"embedding_model_url = \"https://tfhub.dev/google/speech_embedding/1\"\n",
"speech_embedding_model = TfHubWrapper(embedding_model_url)"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "omDLyJkac0RH"
},
"source": [
"## Record training data or copy from google drive\n",
"\n",
"The following cells allow you to define a set of target keyphrases and record some examples for training.\n",
"\n",
"### Optional Google Drive access.\n",
"\n",
"The recorded wav files can be uploaded (and later download) from your Google drive using [PyDrive](https://gsuitedevs.github.io/PyDrive/docs/build/html/index.html). When you run the *Set up Google drive access* cell it will prompt you to log in and grant this colab permission to access your Google drive. Only if you do this will you be able to run the other Google drive cells.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "sdTXzHMSO5U5"
},
"outputs": [],
"source": [
"#@title Optional: Set up Google drive access\n",
"!pip install PyDrive\n",
"from pydrive.auth import GoogleAuth\n",
"from pydrive.drive import GoogleDrive\n",
"from google.colab import auth\n",
"from oauth2client.client import GoogleCredentials\n",
"auth.authenticate_user()\n",
"gauth = GoogleAuth()\n",
"gauth.credentials = GoogleCredentials.get_application_default()\n",
"drive = GoogleDrive(gauth)"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "G7cs8_fiN4-W"
},
"outputs": [],
"source": [
"#@title Optional: Download and untar an archive from drive\n",
"\n",
"filename = ''#@param {type:\"string\"}\n",
"#@markdown You can find the file_id by looking at its share-link.\n",
"#@markdown e.g. *1b9Lkfie2NHX-O06vPGrqzyGcGWUPul36*\n",
"file_id = ''#@param {type:\"string\"}\n",
"\n",
"downloaded = drive.CreateFile({'id':file_id})\n",
"downloaded.GetContentFile(filename)\n",
"with tarfile.open(filename, 'r:gz') as data_tar_file:\n",
" for member_info in data_tar_file.getmembers():\n",
" print(member_info.name)\n",
" data_tar_file.extract(member_info)"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "zKdnYUSpcY0G"
},
"outputs": [],
"source": [
"#@title Setup recording session and define model targets\n",
"\n",
"#@markdown Only use letters and _ for the **RECORDING_NAME** and **TARGET_WORDS**. \n",
"RECORDING_NAME = 'transportation' #@param {type:\"string\"}\n",
"target_word1 = 'hogwarts_express' #@param {type:\"string\"}\n",
"target_word2 = 'nautilus' #@param {type:\"string\"}\n",
"target_word3 = 'millennium_falcon' #@param {type:\"string\"}\n",
"target_word4 = 'enterprise' #@param {type:\"string\"}\n",
"target_word5 = '' #@param {type:\"string\"}\n",
"target_word6 = '' #@param {type:\"string\"}\n",
"clip_lengh_ms = 2100 #@param {type:\"integer\"}\n",
"\n",
"#@markdown ### Microphone access\n",
"#@markdown Please connect the microphone that you want to use\n",
"#@markdown before running this cell. You may also be asked to\n",
"#@markdown to grant colab permission to use it.\n",
"#@markdown If you have any problems check your browser settings\n",
"#@markdown and rerun the cell.\n",
"\n",
"target_words = [target_word1, target_word2, target_word3,\n",
" target_word4, target_word5, target_word6]\n",
"\n",
"OWN_TARGET_WORDS = ','.join([w for w in target_words if w is not ''])\n",
"OWN_MODEL_LABELS = ['negative', 'silence'] + OWN_TARGET_WORDS.split(',')\n",
"\n",
"word_list = OWN_TARGET_WORDS.split(',')\n",
"\n",
"t = widgets.TabBar(word_list)\n",
"\n",
"clip_recorders = {}\n",
"for label in word_list:\n",
" with t.output_to(word_list.index(label)):\n",
" clip_recorders[label] = AudioClipRecorder(keyphrase=label,\n",
" clip_length_ms=2100)\n",
"\n",
"with t.output_to(0):\n",
" print()"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "xLhKuO9eUljA"
},
"outputs": [],
"source": [
"#@title Create wav files from recording session.\n",
"\n",
"session = 'recording1_'#@param {type:\"string\"}\n",
"speaker = '_spk1'#@param {type:\"string\"}\n",
"\n",
"for label in clip_recorders:\n",
" clip_recorders[label].save_as_wav_files(base_output_dir=RECORDING_NAME,\n",
" file_prefix=session,\n",
" file_suffix=speaker)"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "3RJnwChkgl_6"
},
"outputs": [],
"source": [
"#@title Load files for training.\n",
" \n",
"all_train_example_files = collections.defaultdict(list)\n",
"\n",
"for label in OWN_TARGET_WORDS.split(','):\n",
" label_dir = os.path.join(RECORDING_NAME, label)\n",
" all_label_files = [\n",
" os.path.join(label, f)\n",
" for f in os.listdir(label_dir)\n",
" if os.path.isfile(os.path.join(label_dir, f))\n",
" ]\n",
" all_train_example_files[label].extend(all_label_files)\n",
"\n",
"progress_bar = IPython.display.display(progress(0, 100), display_id=True)\n",
"print(\"loading train data\")\n",
"train_data = EmbeddingDataFileList(\n",
" all_train_example_files, RECORDING_NAME,\n",
" targets=OWN_MODEL_LABELS, embedding_model=speech_embedding_model,\n",
" progress_bar=progress_bar)"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "FtWvEIPDGKhq"
},
"outputs": [],
"source": [
"#@title Optional: save recorded data to drive.\n",
"\n",
"archive_name = RECORDING_NAME + \"_\" + str(int(time.time())) +\".tar.gz\"\n",
"\n",
"def make_tarfile(output_filename, source_dir):\n",
" with tarfile.open(output_filename, \"w:gz\") as tar:\n",
" tar.add(source_dir, arcname=os.path.basename(source_dir))\n",
"\n",
"make_tarfile(archive_name, RECORDING_NAME)\n",
"\n",
"file1 = drive.CreateFile({'title': archive_name})\n",
"file1.SetContentFile(archive_name)\n",
"file1.Upload()\n",
"print('Saving to drive: %s, id: %s' % (file1['title'], file1['id']))"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "A1bAhd8U4h70"
},
"source": [
"# Train a model on your recorded data"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "79le-IpzVgsr"
},
"outputs": [],
"source": [
"#@title Run training\n",
"\n",
"#@markdown We assume that the keyphrase is spoken roughly in the middle\n",
"#@markdown of the loaded audio clips. With **context_size** we can choose the \n",
"#@markdown number of embeddings around the middle to use as a model input.\n",
"context_size = 16 #@param {type:\"slider\", min:1, max:28, step:1}\n",
"\n",
"filter_fn = functools.partial(cut_middle_frame, num_frames=context_size, flatten=False)\n",
"all_train_data = train_data.get_all_data_shuffled(filter_fn=filter_fn)\n",
"all_eval_data = None\n",
"\n",
"head_model = \"Convolutional\" #@param [\"Convolutional\", \"Fully_Connected\"] {type:\"string\"}\n",
"\n",
"#@markdown Suggested **learning_rate** range: 0.00001 - 0.01.\n",
"learning_rate = 0.001 #@param {type:\"number\"}\n",
"batch_size = 32\n",
"#@markdown **epochs_per_eval** and **train_eval_loops** control how long the\n",
"#@markdown the model is trained. An epoch is defined as the model having seen\n",
"#@markdown each example at least once, with some examples twice to ensure the\n",
"#@markdown correct labeled / negatives balance.\n",
"\n",
"epochs_per_eval = 1 #@param {type:\"slider\", min:1, max:15, step:1}\n",
"train_eval_loops = 30 #@param {type:\"slider\", min:5, max:80, step:5}\n",
"\n",
"if head_model == \"Convolutional\":\n",
" model_fn = functools.partial(_conv_head_model_fn, context=context_size)\n",
"else:\n",
" model_fn = _fully_connected_model_fn\n",
"\n",
"trainer = HeadTrainer(model_fn=model_fn,\n",
" input_shape=[context_size,1,96],\n",
" num_targets=len(OWN_MODEL_LABELS),\n",
" head_learning_rate=learning_rate,\n",
" batch_size=batch_size)\n",
"\n",
"data_trained_on = 0\n",
"data = [] \n",
"train_results = []\n",
"eval_results = []\n",
"max_data = len(all_train_data) * epochs_per_eval * train_eval_loops + 10\n",
"\n",
"def plot_step(plot, max_data, data, train_results, eval_results):\n",
" plot.clf()\n",
" plot.xlim(0, max_data)\n",
" plot.ylim(0.85, 1.05)\n",
" plot.plot(data, train_results, \"bo\")\n",
" plot.plot(data, train_results, \"b\", label=\"train_results\")\n",
" if eval_results:\n",
" plot.plot(data, eval_results, \"ro\")\n",
" plot.plot(data, eval_results, \"r\", label=\"eval_results\")\n",
" plot.legend(loc='lower right', fontsize=24)\n",
" plot.xlabel('number of examples trained on', fontsize=22)\n",
" plot.ylabel('Accuracy', fontsize=22)\n",
" plot.xticks(fontsize=20)\n",
" plot.yticks(fontsize=20) \n",
"\n",
"plt.figure(figsize=(25, 7))\n",
"for loop in range(train_eval_loops):\n",
" train_accuracy, loss = trainer.epoch_train(all_train_data,\n",
" epochs=epochs_per_eval)\n",
" train_results.append(train_accuracy)\n",
" if all_eval_data:\n",
" eval_accuracy, loss = trainer.test(all_eval_data)\n",
" eval_results.append(eval_accuracy)\n",
" else:\n",
" eval_results = None\n",
"\n",
" data_trained_on += len(all_train_data) * epochs_per_eval\n",
" data.append(data_trained_on)\n",
" plot_step(plt, max_data, data, train_results, eval_results)\n",
"\n",
" IPython.display.display(plt.gcf())\n",
" if all_eval_data:\n",
" print(\"Highest eval accuracy: %.2f percent.\" % (100 * max(eval_results)))\n",
" IPython.display.clear_output(wait=True)\n",
"\n",
"if all_eval_data:\n",
" print(\"Highest eval accuracy: %.2f percent.\" % (100 * max(eval_results)))\n"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "3WAc7vr2sVAy"
},
"outputs": [],
"source": [
"#@title Test the model\n",
"clip_eval = AudioClipEval(speech_embedding_model, trainer, filter_fn, OWN_MODEL_LABELS)"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "TD4MhDAc0TJv"
},
"outputs": [],
"source": [
"#@title Rerun the test using a new head model (train a new head model first)\n",
"\n",
"clip_eval.eval_on_new_model(trainer)"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "n_7IPrnuqoKR"
},
"source": [
"## FAQ\n",
"\n",
"Q: **My model isn't very good?**\n",
"\n",
"A: The head model is very small and depends a lot on the initialisation weights:\n",
" * This default setup doesn't have a negative class so it will always detect *something*. \n",
" * Try retraining it a couple of times.\n",
" * Reduce the learning rate a little bit.\n",
" * Add more training examples:\n",
" * At 1 - 5 examples per keyphrase the model probably won't be very good.\n",
" * With around 10-20 examples per keyphrase it may work reasonably well; however, it may still fail to learn a keyphrase.\n",
" * If you only have examples from a single speaker, then it may only learn how that speaker pronounces the keyphrase.\n",
" * Make sure your keyphrase are distinctive enough:\n",
" * e.g. heads up vs ketchup\n",
"\n",
"\n",
"\n",
"\n",
"Q: **Can I export the model and use it somewhere?**\n",
"\n",
"A: Yes, there's some example code in the following cells that demonstrate how that could be done. However, this simple example model is only training a between-word classifier.\n",
"If you want to use it in any relaistic setting, you will probably also want to add:\n",
" * A negative or non-target-word speech class: You could do this by recording 2-10 min of continuous speech that doesn't contain your target keyphrases.\n",
" * A non-speech / silence / background-noise class: The speech commands dataset contains some examples of non-speech background audio that could be used for this, and/or you could just leave your mircophone on and record some ambient audio from the future deployement location."
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "PQp-pXtr2oCG"
},
"source": [
"# Export and reuse the head model\n",
"The following cells show how the head model you just trained can be exported and reused in a graph."
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "s2GxL706F-BD"
},
"outputs": [],
"source": [
"#@title Save the head model\n",
"\n",
"head_model_module_dir = \"head_model_module_dir\"\n",
"trainer.save_head_model(head_model_module_dir)"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "r6PfTOh7HIt1"
},
"outputs": [],
"source": [
"#@title FullModelWrapper - Example Class\n",
"\n",
"class FullModelWrapper(object):\n",
" \"\"\"A loads a save model classifier.\"\"\"\n",
" def __init__(self, embedding_model_dir, head_model_dir):\n",
" self._graph = tf.Graph()\n",
" self._sess = tf.Session(graph=self._graph)\n",
" with self._graph.as_default():\n",
" embedding_module_spec = hub.load_module_spec(embedding_model_dir)\n",
" embedding_module = hub.Module(embedding_module_spec)\n",
" head_module_spec = hub.load_module_spec(head_model_dir)\n",
" head_module = hub.Module(head_module_spec)\n",
" self._samples = tf.placeholder(\n",
" tf.float32, shape=[1, None], name='audio_samples')\n",
" embedding = embedding_module(self._samples)\n",
" logits = head_module(embedding)\n",
" self._predictions = tf.nn.softmax(logits)\n",
" with self._sess.as_default():\n",
" self._sess.run(tf.global_variables_initializer())\n",
" \n",
" def infer(self, samples):\n",
" samples = samples.reshape((1, -1))\n",
" output = self._sess.run(\n",
" [self._predictions],\n",
" feed_dict={self._samples: samples})\n",
" return output"
]
},
{
"cell_type": "code",
"execution_count": 0,
"metadata": {
"cellView": "form",
"colab": {},
"colab_type": "code",
"id": "gS9gCV8SKIfe"
},
"outputs": [],
"source": [
"#@title Test the full model on zeros\n",
"full_model = FullModelWrapper(embedding_model_url, head_model_module_dir)\n",
"full_model.infer(np.zeros((1,32000)))"
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"collapsed_sections": [
"JndnmDMp66FL"
],
"name": "Speech Embeddings 2: Record and train on your own data.ipynb",
"private_outputs": true,
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
| {
"pile_set_name": "Github"
} |
package net.sf.jabref.plugin;
import net.sf.jabref.JabRefFrame;
import net.sf.jabref.MnemonicAwareAction;
import net.sf.jabref.Globals;
import net.sf.jabref.GUIGlobals;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.io.File;
import java.net.URL;
import java.net.MalformedURLException;
/**
* Created by IntelliJ IDEA.
* User: alver
* Date: Mar 27, 2009
* Time: 11:33:56 PM
* To change this template use File | Settings | File Templates.
*/
public class PluginInstallerAction extends MnemonicAwareAction {
private JabRefFrame frame;
public PluginInstallerAction(JabRefFrame frame) {
super(GUIGlobals.getImage("plugin"));
//System.out.println();
this.frame = frame;
putValue(NAME, Globals.menuTitle("Manage plugins"));
}
public void actionPerformed(ActionEvent actionEvent) {
ManagePluginsDialog mpd = new ManagePluginsDialog(frame);
mpd.setVisible(true);
}
}
| {
"pile_set_name": "Github"
} |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utils for building and training NN models.
"""
from __future__ import division
import math
import numpy
import tensorflow as tf
class LayerParameters(object):
"""class that defines a non-conv layer."""
def __init__(self):
self.name = ""
self.num_units = 0
self._with_bias = False
self.relu = False
self.gradient_l2norm_bound = 0.0
self.bias_gradient_l2norm_bound = 0.0
self.trainable = True
self.weight_decay = 0.0
class ConvParameters(object):
"""class that defines a conv layer."""
def __init__(self):
self.patch_size = 5
self.stride = 1
self.in_channels = 1
self.out_channels = 0
self.with_bias = True
self.relu = True
self.max_pool = True
self.max_pool_size = 2
self.max_pool_stride = 2
self.trainable = False
self.in_size = 28
self.name = ""
self.num_outputs = 0
self.bias_stddev = 0.1
# Parameters for a layered neural network.
class NetworkParameters(object):
"""class that define the overall model structure."""
def __init__(self):
self.input_size = 0
self.projection_type = 'NONE' # NONE, RANDOM, PCA
self.projection_dimensions = 0
self.default_gradient_l2norm_bound = 0.0
self.layer_parameters = [] # List of LayerParameters
self.conv_parameters = [] # List of ConvParameters
def GetTensorOpName(x):
"""Get the name of the op that created a tensor.
Useful for naming related tensors, as ':' in name field of op is not permitted
Args:
x: the input tensor.
Returns:
the name of the op.
"""
t = x.name.rsplit(":", 1)
if len(t) == 1:
return x.name
else:
return t[0]
def BuildNetwork(inputs, network_parameters):
"""Build a network using the given parameters.
Args:
inputs: a Tensor of floats containing the input data.
network_parameters: NetworkParameters object
that describes the parameters for the network.
Returns:
output, training_parameters: where the outputs (a tensor) is the output
of the network, and training_parameters (a dictionary that maps the
name of each variable to a dictionary of parameters) is the parameters
used during training.
"""
training_parameters = {}
num_inputs = network_parameters.input_size
outputs = inputs
projection = None
# First apply convolutions, if needed
for conv_param in network_parameters.conv_parameters:
outputs = tf.reshape(
outputs,
[-1, conv_param.in_size, conv_param.in_size,
conv_param.in_channels])
conv_weights_name = "%s_conv_weight" % (conv_param.name)
conv_bias_name = "%s_conv_bias" % (conv_param.name)
conv_std_dev = 1.0 / (conv_param.patch_size
* math.sqrt(conv_param.in_channels))
conv_weights = tf.Variable(
tf.truncated_normal([conv_param.patch_size,
conv_param.patch_size,
conv_param.in_channels,
conv_param.out_channels],
stddev=conv_std_dev),
trainable=conv_param.trainable,
name=conv_weights_name)
conv_bias = tf.Variable(
tf.truncated_normal([conv_param.out_channels],
stddev=conv_param.bias_stddev),
trainable=conv_param.trainable,
name=conv_bias_name)
training_parameters[conv_weights_name] = {}
training_parameters[conv_bias_name] = {}
conv = tf.nn.conv2d(outputs, conv_weights,
strides=[1, conv_param.stride,
conv_param.stride, 1],
padding="SAME")
relud = tf.nn.relu(conv + conv_bias)
mpd = tf.nn.max_pool(relud, ksize=[1,
conv_param.max_pool_size,
conv_param.max_pool_size, 1],
strides=[1, conv_param.max_pool_stride,
conv_param.max_pool_stride, 1],
padding="SAME")
outputs = mpd
num_inputs = conv_param.num_outputs
# this should equal
# in_size * in_size * out_channels / (stride * max_pool_stride)
# once all the convs are done, reshape to make it flat
outputs = tf.reshape(outputs, [-1, num_inputs])
# Now project, if needed
if network_parameters.projection_type is not "NONE":
projection = tf.Variable(tf.truncated_normal(
[num_inputs, network_parameters.projection_dimensions],
stddev=1.0 / math.sqrt(num_inputs)), trainable=False, name="projection")
num_inputs = network_parameters.projection_dimensions
outputs = tf.matmul(outputs, projection)
# Now apply any other layers
for layer_parameters in network_parameters.layer_parameters:
num_units = layer_parameters.num_units
hidden_weights_name = "%s_weight" % (layer_parameters.name)
hidden_weights = tf.Variable(
tf.truncated_normal([num_inputs, num_units],
stddev=1.0 / math.sqrt(num_inputs)),
name=hidden_weights_name, trainable=layer_parameters.trainable)
training_parameters[hidden_weights_name] = {}
if layer_parameters.gradient_l2norm_bound:
training_parameters[hidden_weights_name]["gradient_l2norm_bound"] = (
layer_parameters.gradient_l2norm_bound)
if layer_parameters.weight_decay:
training_parameters[hidden_weights_name]["weight_decay"] = (
layer_parameters.weight_decay)
outputs = tf.matmul(outputs, hidden_weights)
if layer_parameters.with_bias:
hidden_biases_name = "%s_bias" % (layer_parameters.name)
hidden_biases = tf.Variable(tf.zeros([num_units]),
name=hidden_biases_name)
training_parameters[hidden_biases_name] = {}
if layer_parameters.bias_gradient_l2norm_bound:
training_parameters[hidden_biases_name][
"bias_gradient_l2norm_bound"] = (
layer_parameters.bias_gradient_l2norm_bound)
outputs += hidden_biases
if layer_parameters.relu:
outputs = tf.nn.relu(outputs)
# num_inputs for the next layer is num_units in the current layer.
num_inputs = num_units
return outputs, projection, training_parameters
def VaryRate(start, end, saturate_epochs, epoch):
"""Compute a linearly varying number.
Decrease linearly from start to end until epoch saturate_epochs.
Args:
start: the initial number.
end: the end number.
saturate_epochs: after this we do not reduce the number; if less than
or equal to zero, just return start.
epoch: the current learning epoch.
Returns:
the caculated number.
"""
if saturate_epochs <= 0:
return start
step = (start - end) / (saturate_epochs - 1)
if epoch < saturate_epochs:
return start - step * epoch
else:
return end
def BatchClipByL2norm(t, upper_bound, name=None):
"""Clip an array of tensors by L2 norm.
Shrink each dimension-0 slice of tensor (for matrix it is each row) such
that the l2 norm is at most upper_bound. Here we clip each row as it
corresponds to each example in the batch.
Args:
t: the input tensor.
upper_bound: the upperbound of the L2 norm.
name: optional name.
Returns:
the clipped tensor.
"""
assert upper_bound > 0
with tf.name_scope(values=[t, upper_bound], name=name,
default_name="batch_clip_by_l2norm") as name:
saved_shape = tf.shape(t)
batch_size = tf.slice(saved_shape, [0], [1])
t2 = tf.reshape(t, tf.concat(axis=0, values=[batch_size, [-1]]))
upper_bound_inv = tf.fill(tf.slice(saved_shape, [0], [1]),
tf.constant(1.0/upper_bound))
# Add a small number to avoid divide by 0
l2norm_inv = tf.rsqrt(tf.reduce_sum(t2 * t2, [1]) + 0.000001)
scale = tf.minimum(l2norm_inv, upper_bound_inv) * upper_bound
clipped_t = tf.matmul(tf.diag(scale), t2)
clipped_t = tf.reshape(clipped_t, saved_shape, name=name)
return clipped_t
def SoftThreshold(t, threshold_ratio, name=None):
"""Soft-threshold a tensor by the mean value.
Softthreshold each dimension-0 vector (for matrix it is each column) by
the mean of absolute value multiplied by the threshold_ratio factor. Here
we soft threshold each column as it corresponds to each unit in a layer.
Args:
t: the input tensor.
threshold_ratio: the threshold ratio.
name: the optional name for the returned tensor.
Returns:
the thresholded tensor, where each entry is soft-thresholded by
threshold_ratio times the mean of the aboslute value of each column.
"""
assert threshold_ratio >= 0
with tf.name_scope(values=[t, threshold_ratio], name=name,
default_name="soft_thresholding") as name:
saved_shape = tf.shape(t)
t2 = tf.reshape(t, tf.concat(axis=0, values=[tf.slice(saved_shape, [0], [1]), -1]))
t_abs = tf.abs(t2)
t_x = tf.sign(t2) * tf.nn.relu(t_abs -
(tf.reduce_mean(t_abs, [0],
keep_dims=True) *
threshold_ratio))
return tf.reshape(t_x, saved_shape, name=name)
def AddGaussianNoise(t, sigma, name=None):
"""Add i.i.d. Gaussian noise (0, sigma^2) to every entry of t.
Args:
t: the input tensor.
sigma: the stddev of the Gaussian noise.
name: optional name.
Returns:
the noisy tensor.
"""
with tf.name_scope(values=[t, sigma], name=name,
default_name="add_gaussian_noise") as name:
noisy_t = t + tf.random_normal(tf.shape(t), stddev=sigma)
return noisy_t
def GenerateBinomialTable(m):
"""Generate binomial table.
Args:
m: the size of the table.
Returns:
A two dimensional array T where T[i][j] = (i choose j),
for 0<= i, j <=m.
"""
table = numpy.zeros((m + 1, m + 1), dtype=numpy.float64)
for i in range(m + 1):
table[i, 0] = 1
for i in range(1, m + 1):
for j in range(1, m + 1):
v = table[i - 1, j] + table[i - 1, j -1]
assert not math.isnan(v) and not math.isinf(v)
table[i, j] = v
return tf.convert_to_tensor(table)
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2013. wyouflf ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lidroid.xutils.http;
import com.lidroid.xutils.HttpUtils;
import com.lidroid.xutils.exception.HttpException;
import com.lidroid.xutils.http.callback.DefaultHttpRedirectHandler;
import com.lidroid.xutils.http.callback.HttpRedirectHandler;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpRequestRetryHandler;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.impl.client.AbstractHttpClient;
import org.apache.http.protocol.HttpContext;
import java.io.IOException;
import java.net.UnknownHostException;
public class SyncHttpHandler {
private final AbstractHttpClient client;
private final HttpContext context;
private HttpRedirectHandler httpRedirectHandler;
public void setHttpRedirectHandler(HttpRedirectHandler httpRedirectHandler) {
this.httpRedirectHandler = httpRedirectHandler;
}
private String requestUrl;
private String requestMethod;
private String charset; // The default charset of response header info.
private int retriedTimes = 0;
public SyncHttpHandler(AbstractHttpClient client, HttpContext context, String charset) {
this.client = client;
this.context = context;
this.charset = charset;
}
private long expiry = HttpCache.getDefaultExpiryTime();
public void setExpiry(long expiry) {
this.expiry = expiry;
}
public ResponseStream sendRequest(HttpRequestBase request) throws HttpException {
HttpRequestRetryHandler retryHandler = client.getHttpRequestRetryHandler();
while (true) {
boolean retry = true;
IOException exception = null;
try {
requestUrl = request.getURI().toString();
requestMethod = request.getMethod();
if (HttpUtils.sHttpCache.isEnabled(requestMethod)) {
String result = HttpUtils.sHttpCache.get(requestUrl);
if (result != null) {
return new ResponseStream(result);
}
}
HttpResponse response = client.execute(request, context);
return handleResponse(response);
} catch (UnknownHostException e) {
exception = e;
retry = retryHandler.retryRequest(exception, ++retriedTimes, context);
} catch (IOException e) {
exception = e;
retry = retryHandler.retryRequest(exception, ++retriedTimes, context);
} catch (NullPointerException e) {
exception = new IOException(e.getMessage());
exception.initCause(e);
retry = retryHandler.retryRequest(exception, ++retriedTimes, context);
} catch (HttpException e) {
throw e;
} catch (Throwable e) {
exception = new IOException(e.getMessage());
exception.initCause(e);
retry = retryHandler.retryRequest(exception, ++retriedTimes, context);
}
if (!retry) {
throw new HttpException(exception);
}
}
}
private ResponseStream handleResponse(HttpResponse response) throws HttpException, IOException {
if (response == null) {
throw new HttpException("response is null");
}
StatusLine status = response.getStatusLine();
int statusCode = status.getStatusCode();
if (statusCode < 300) {
ResponseStream responseStream = new ResponseStream(response, charset, requestUrl, expiry);
responseStream.setRequestMethod(requestMethod);
return responseStream;
} else if (statusCode == 301 || statusCode == 302) {
if (httpRedirectHandler == null) {
httpRedirectHandler = new DefaultHttpRedirectHandler();
}
HttpRequestBase request = httpRedirectHandler.getDirectRequest(response);
if (request != null) {
return this.sendRequest(request);
}
} else if (statusCode == 416) {
throw new HttpException(statusCode, "maybe the file has downloaded completely");
} else {
throw new HttpException(statusCode, status.getReasonPhrase());
}
return null;
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<image schemaversion="6.1" name="ovfconfig-test">
<description type="system">
<author>Robert Schweikert</author>
<contact>[email protected]</contact>
<specification>OVF configuration test</specification>
</description>
<preferences>
<type image="vmx" boot="vmxboot/suse-13.1" filesystem="ext4" boottimeout="1" format="ovf">
<machine arch="x86_64" guestOS="sles-64" HWversion="10" ncpus="2" min_memory="4096" ovftype="vmware">
<vmdisk disktype="scsi" controller="lsilogic"/>
<vmnic driver="vmxnet3" interface="eth0" mode="none"/>
</machine>
<size>10240</size>
</type>
<version>1.0.0</version>
<packagemanager>zypper</packagemanager>
<rpm-check-signatures>false</rpm-check-signatures>
<rpm-force>true</rpm-force>
<locale>en_US</locale>
<keytable>us.map.gz</keytable>
</preferences>
<users group="root">
<user password="linux" pwdformat="plain" home="/root" name="root"/>
</users>
<repository type="yast2">
<source path="/tmp"/>
</repository>
<packages type="image">
<package name="kernel-default"/>
<package name="lxc"/>
<package name="vim"/>
<namedCollection name="base"/>
</packages>
<packages type="bootstrap">
<package name="filesystem"/>
<package name="glibc-locale"/>
</packages>
</image>
| {
"pile_set_name": "Github"
} |
{
"action": {
"hacking": {
"variety": [
"Unknown"
],
"vector": [
"Web application"
]
},
"malware": {
"variety": [
"Capture app data"
],
"vector": [
"Direct install"
]
}
},
"actor": {
"external": {
"country": [
"Unknown"
],
"motive": [
"Financial"
],
"region": [
"000000"
],
"variety": [
"Unknown"
]
}
},
"asset": {
"assets": [
{
"variety": "S - Web application"
}
],
"cloud": [
"Unknown"
],
"notes": "Following enumerations present before veris 1.3.3 removed: asset.governance.Unknown."
},
"attribute": {
"confidentiality": {
"data": [
{
"variety": "Payment"
}
],
"data_disclosure": "Potentially",
"data_victim": [
"Customer"
],
"state": [
"Unknown"
]
},
"integrity": {
"variety": [
"Software installation"
]
}
},
"discovery_method": {
"external": {
"variety": [
"Unknown"
]
}
},
"discovery_notes": "Ext - Unrelated third party. Discovered by security researcher who made the notifications.",
"impact": {
"overall_rating": "Unknown"
},
"incident_id": "A0392A73-AE74-4D5D-BA38-79C6DE541A78",
"plus": {
"analysis_status": "Finalized",
"analyst": "swidup",
"attribute": {
"confidentiality": {
"credit_monitoring": "Unknown",
"data_misuse": "Y"
}
},
"created": "2016-10-25T01:05:00Z",
"dbir_year": 2017,
"github": "8631",
"master_id": "A0392A73-AE74-4D5D-BA38-79C6DE541A78",
"modified": "2016-10-25T01:10:00Z",
"sub_source": "gwillem",
"timeline": {
"notification": {
"year": 2016
}
}
},
"reference": "http://www.pcworld.com/article/3131040/security/thousands-of-online-shops-compromised-for-credit-card-theft.html; https://gwillem.github.io/2016/10/11/5900-online-stores-found-skimming/; https://gitlab.com/gwillem/public-snippets/snippets/28813",
"schema_version": "1.3.4",
"security_incident": "Confirmed",
"source_id": "vcdb",
"summary": "Online skimmers are installed on vulnerable storefronts to compromise credit card data. This was found by researcher G. Willem and published in a blog post as well as numerous news articles. This one GitHub issue is expanded to one incident per storefront listed in his dataset.",
"timeline": {
"incident": {
"year": 2016
}
},
"victim": {
"country": [
"Unknown"
],
"employee_count": "Unknown",
"industry": "44",
"region": [
"000000"
],
"victim_id": "psicologia365.com"
}
} | {
"pile_set_name": "Github"
} |
// N64 'Bare Metal' Fast Quantization Multi Block GFX 16-Bit Demo by krom (Peter Lemon):
arch n64.cpu
endian msb
output "FastQuantizationMultiBlockGFX16BIT.N64", create
fill 1052672 // Set ROM Size
origin $00000000
base $80000000 // Entry Point Of Code
include "LIB/N64.INC" // Include N64 Definitions
include "LIB/N64_HEADER.ASM" // Include 64 Byte Header & Vector Table
insert "LIB/N64_BOOTCODE.BIN" // Include 4032 Byte Boot Code
Start:
include "LIB/N64_GFX.INC" // Include Graphics Macros
N64_INIT() // Run N64 Initialisation Routine
ScreenNTSC(320, 240, BPP32, $A0100000) // Screen NTSC: 320x240, 32BPP, DRAM Origin $A0100000
la a0,Q // A0 = Q
la a1,DCTQBLOCKS // A1 = DCTQ Blocks
la a2,DCT // A2 = DCT/IDCT
ori t0,r0,(320/8)*(240/8) // T0 = Block Count
QBlockLoop:
ori t1,r0,63 // T1 = 63
// DCT Block Decode (Inverse Quantization)
QLoop:
lbu t2,0(a0) // T2 = Q
addiu a0,1 // Q++
lh t3,0(a1) // T3 = DCTQ
addiu a1,2 // DCTQ += 2
mult t2,t3 // T2 = DCTQ * Q
mflo t2
sh t2,0(a2) // DCT = T2
addiu a2,2 // DCT += 2
bnez t1,QLoop // IF (T1 != 0) Q Loop
subiu t1,1 // T1-- (Delay Slot)
subiu a0,64 // Q -= 64
bnez t0,QBlockLoop // IF (T0 != 0) Q Block Loop
subiu t0,1 // T0-- (Delay Slot)
la a0,DCT // A0 = DCT/IDCT
LoopIDCT:
// Fast IDCT Block Decode
// Pass 1: Process Columns From Input, Store Into Work Array.
define CTR(0)
while {CTR} < 8 { // Static Loop Columns
// Even part: Reverse The Even Part Of The Forward DCT. The Rotator Is SQRT(2)*C(-6).
lh t0,2*{CTR}+8*2*2(a0) // T0 = Z2 = DCT[CTR + 8*2]
lh t1,2*{CTR}+8*6*2(a0) // T1 = Z3 = DCT[CTR + 8*6]
add t2,t0,t1 // Z1 = (Z2 + Z3) * 0.541196100
addi t3,r0,4433 // T3 = 0.541196100
mult t2,t3
mflo t2 // T2 = Z1
addi t3,r0,-15137 // TMP2 = Z1 + (Z3 * -1.847759065)
mult t1,t3
mflo t1
add t1,t2 // T1 = TMP2
addi t3,r0,6270 // TMP3 = Z1 + (Z2 * 0.765366865)
mult t0,t3
mflo t0
add t0,t2 // T0 = TMP3
lh t4,2*{CTR}+8*0*2(a0) // T4 = Z2 = DCT[CTR + 8*0]
lh t5,2*{CTR}+8*4*2(a0) // T5 = Z3 = DCT[CTR + 8*4]
add t2,t4,t5 // TMP0 = (Z2 + Z3) << 13
sll t2,13 // T2 = TMP0
sub t3,t4,t5 // TMP1 = (Z2 - Z3) << 13
sll t3,13 // T3 = TMP1
add t4,t2,t0 // T4 = TMP10 = TMP0 + TMP3
add t5,t3,t1 // T5 = TMP11 = TMP1 + TMP2
sub t6,t3,t1 // T6 = TMP12 = TMP1 - TMP2
sub t7,t2,t0 // T7 = TMP13 = TMP0 - TMP3
// Odd Part Per Figure 8; The Matrix Is Unitary And Hence Its Transpose Is Its Inverse.
lh t0,2*{CTR}+8*7*2(a0) // T0 = TMP0 = DCT[CTR + 8*7]
lh t1,2*{CTR}+8*5*2(a0) // T1 = TMP1 = DCT[CTR + 8*5]
lh t2,2*{CTR}+8*3*2(a0) // T2 = TMP2 = DCT[CTR + 8*3]
lh t3,2*{CTR}+8*1*2(a0) // T3 = TMP3 = DCT[CTR + 8*1]
add s2,t0,t2 // S2 = Z3 = TMP0 + TMP2
add s3,t1,t3 // S3 = Z4 = TMP1 + TMP3
add s4,s2,s3 // Z5 = (Z3 + Z4) * 1.175875602 # SQRT(2) * C3
addi s0,r0,9633 // S0 = 1.175875602
mult s4,s0
mflo s4 // S4 = Z5
addi s0,r0,-16069 // Z3 *= -1.961570560 # SQRT(2) * (-C3-C5)
mult s2,s0
mflo s2 // S2 = Z3
addi s0,r0,-3196 // Z4 *= -0.390180644 # SQRT(2) * ( C5-C3)
mult s3,s0
mflo s3 // S3 = Z4
add s2,s4 // S2 = Z3 += Z5
add s3,s4 // S3 = Z4 += Z5
add s0,t0,t3 // S0 = Z1 = TMP0 + TMP3
add s1,t1,t2 // S1 = Z2 = TMP1 + TMP2
addi s4,r0,-7373 // Z1 *= -0.899976223 # SQRT(2) * ( C7-C3)
mult s0,s4
mflo s0 // S0 = Z1
addi s4,r0,-20995 // Z2 *= -2.562915447 # SQRT(2) * (-C1-C3)
mult s1,s4
mflo s1 // S1 = Z2
addi s4,r0,2446 // TMP0 *= 0.298631336 # SQRT(2) * (-C1+C3+C5-C7)
mult t0,s4
mflo t0 // T0 = TMP0
addi s4,r0,16819 // TMP1 *= 2.053119869 # SQRT(2) * ( C1+C3-C5+C7)
mult t1,s4
mflo t1 // T1 = TMP1
addi s4,r0,25172 // TMP2 *= 3.072711026 # SQRT(2) * ( C1+C3+C5-C7)
mult t2,s4
mflo t2 // T2 = TMP2
addi s4,r0,12299 // TMP3 *= 1.501321110 # SQRT(2) * ( C1+C3-C5-C7)
mult t3,s4
mflo t3 // T3 = TMP3
add t0,s0 // TMP0 += Z1 + Z3
add t0,s2 // T0 = TMP0
add t1,s1 // TMP1 += Z2 + Z4
add t1,s3 // T1 = TMP1
add t2,s1 // TMP2 += Z2 + Z3
add t2,s2 // T2 = TMP2
add t3,s0 // TMP3 += Z1 + Z4
add t3,s3 // R3 = TMP3
// Final Output Stage: Inputs Are TMP10..TMP13, TMP0..TMP3
add s0,t4,t3 // DCT[CTR + 8*0] = (TMP10 + TMP3) >> 11
sra s0,11
sh s0,2*{CTR}+8*0*2(a0)
sub s0,t4,t3 // DCT[CTR + 8*7] = (TMP10 - TMP3) >> 11
sra s0,11
sh s0,2*{CTR}+8*7*2(a0)
add s0,t5,t2 // DCT[CTR + 8*1] = (TMP11 + TMP2) >> 11
sra s0,11
sh s0,2*{CTR}+8*1*2(a0)
sub s0,t5,t2 // DCT[CTR + 8*6] = (TMP11 - TMP2) >> 11
sra s0,11
sh s0,2*{CTR}+8*6*2(a0)
add s0,t6,t1 // DCT[CTR + 8*2] = (TMP12 + TMP1) >> 11
sra s0,11
sh s0,2*{CTR}+8*2*2(a0)
sub s0,t6,t1 // DCT[CTR + 8*5] = (TMP12 - TMP1) >> 11
sra s0,11
sh s0,2*{CTR}+8*5*2(a0)
add s0,t7,t0 // DCT[CTR + 8*3] = (TMP13 + TMP0) >> 11
sra s0,11
sh s0,2*{CTR}+8*3*2(a0)
sub s0,t7,t0 // DCT[CTR + 8*4] = (TMP13 - TMP0) >> 11
sra s0,11
sh s0,2*{CTR}+8*4*2(a0)
evaluate CTR({CTR} + 1)
} // End Of Static Loop Columns
// Pass 2: Process Rows From Work Array, Store Into Output Array.
define CTR(0)
while {CTR} < 8 { // Static Loop Rows
// Even part: Reverse The Even Part Of The Forward DCT. The Rotator Is SQRT(2)*C(-6).
lh t0,2*{CTR}*8+2*2(a0) // T0 = Z2 = DCT[CTR*8 + 2]
lh t1,2*{CTR}*8+6*2(a0) // T1 = Z3 = DCT[CTR*8 + 6]
add t2,t0,t1 // Z1 = (Z2 + Z3) * 0.541196100
addi t3,r0,4433 // T3 = 0.541196100
mult t2,t3
mflo t2 // T2 = Z1
addi t3,r0,-15137 // TMP2 = Z1 + (Z3 * -1.847759065)
mult t1,t3
mflo t1
add t1,t2 // T1 = TMP2
addi t3,r0,6270 // TMP3 = Z1 + (Z2 * 0.765366865)
mult t0,t3
mflo t0
add t0,t2 // T0 = TMP3
lh t4,2*{CTR}*8+0*2(a0) // T4 = Z2 = DCT[CTR*8 + 0]
lh t5,2*{CTR}*8+4*2(a0) // T5 = Z3 = DCT[CTR*8 + 4]
add t2,t4,t5 // TMP0 = (Z2 + Z3) << 13
sll t2,13 // T2 = TMP0
sub t3,t4,t5 // TMP1 = (Z2 - Z3) << 13
sll t3,13 // T3 = TMP1
add t4,t2,t0 // T4 = TMP10 = TMP0 + TMP3
add t5,t3,t1 // T5 = TMP11 = TMP1 + TMP2
sub t6,t3,t1 // T6 = TMP12 = TMP1 - TMP2
sub t7,t2,t0 // T7 = TMP13 = TMP0 - TMP3
// Odd Part Per Figure 8; The Matrix Is Unitary And Hence Its Transpose Is Its Inverse.
lh t0,2*{CTR}*8+7*2(a0) // T0 = TMP0 = DCT[CTR*8 + 7]
lh t1,2*{CTR}*8+5*2(a0) // T1 = TMP1 = DCT[CTR*8 + 5]
lh t2,2*{CTR}*8+3*2(a0) // T2 = TMP2 = DCT[CTR*8 + 3]
lh t3,2*{CTR}*8+1*2(a0) // T3 = TMP3 = DCT[CTR*8 + 1]
add s2,t0,t2 // S2 = Z3 = TMP0 + TMP2
add s3,t1,t3 // S3 = Z4 = TMP1 + TMP3
add s4,s2,s3 // Z5 = (Z3 + Z4) * 1.175875602 # SQRT(2) * C3
addi s0,r0,9633 // S0 = 1.175875602
mult s4,s0
mflo s4 // S4 = Z5
addi s0,r0,-16069 // Z3 *= -1.961570560 # SQRT(2) * (-C3-C5)
mult s2,s0
mflo s2 // S2 = Z3
addi s0,r0,-3196 // Z4 *= -0.390180644 # SQRT(2) * ( C5-C3)
mult s3,s0
mflo s3 // S3 = Z4
add s2,s4 // S2 = Z3 += Z5
add s3,s4 // S3 = Z4 += Z5
add s0,t0,t3 // S0 = Z1 = TMP0 + TMP3
add s1,t1,t2 // S1 = Z2 = TMP1 + TMP2
addi s4,r0,-7373 // Z1 *= -0.899976223 # SQRT(2) * ( C7-C3)
mult s0,s4
mflo s0 // S0 = Z1
addi s4,r0,-20995 // Z2 *= -2.562915447 # SQRT(2) * (-C1-C3)
mult s1,s4
mflo s1 // S1 = Z2
addi s4,r0,2446 // TMP0 *= 0.298631336 # SQRT(2) * (-C1+C3+C5-C7)
mult t0,s4
mflo t0 // T0 = TMP0
addi s4,r0,16819 // TMP1 *= 2.053119869 # SQRT(2) * ( C1+C3-C5+C7)
mult t1,s4
mflo t1 // T1 = TMP1
addi s4,r0,25172 // TMP2 *= 3.072711026 # SQRT(2) * ( C1+C3+C5-C7)
mult t2,s4
mflo t2 // T2 = TMP2
addi s4,r0,12299 // TMP3 *= 1.501321110 # SQRT(2) * ( C1+C3-C5-C7)
mult t3,s4
mflo t3 // T3 = TMP3
add t0,s0 // TMP0 += Z1 + Z3
add t0,s2 // T0 = TMP0
add t1,s1 // TMP1 += Z2 + Z4
add t1,s3 // T1 = TMP1
add t2,s1 // TMP2 += Z2 + Z3
add t2,s2 // T2 = TMP2
add t3,s0 // TMP3 += Z1 + Z4
add t3,s3 // R3 = TMP3
// Final Output Stage: Inputs Are TMP10..TMP13, TMP0..TMP3
add s0,t4,t3 // DCT[CTR*8 + 0] = (TMP10 + TMP3) >> 18
sra s0,18
sh s0,2*{CTR}*8+0*2(a0)
sub s0,t4,t3 // DCT[CTR*8 + 7] = (TMP10 - TMP3) >> 18
sra s0,18
sh s0,2*{CTR}*8+7*2(a0)
add s0,t5,t2 // DCT[CTR*8 + 1] = (TMP11 + TMP2) >> 18
sra s0,18
sh s0,2*{CTR}*8+1*2(a0)
sub s0,t5,t2 // DCT[CTR*8 + 6] = (TMP11 - TMP2) >> 18
sra s0,18
sh s0,2*{CTR}*8+6*2(a0)
add s0,t6,t1 // DCT[CTR*8 + 2] = (TMP12 + TMP1) >> 18
sra s0,18
sh s0,2*{CTR}*8+2*2(a0)
sub s0,t6,t1 // DCT[CTR*8 + 5] = (TMP12 - TMP1) >> 18
sra s0,18
sh s0,2*{CTR}*8+5*2(a0)
add s0,t7,t0 // DCT[CTR*8 + 3] = (TMP13 + TMP0) >> 18
sra s0,18
sh s0,2*{CTR}*8+3*2(a0)
sub s0,t7,t0 // DCT[CTR*8 + 4] = (TMP13 - TMP0) >> 18
sra s0,18
sh s0,2*{CTR}*8+4*2(a0)
evaluate CTR({CTR} + 1)
} // End Of Static Loop Rows
la t0,DCT+((320*240)*2)-128 // T0 = DCT/IDCT End Offset
bne a0,t0,LoopIDCT // IF (DCT/IDCT != WRAM End Offset) Loop IDCT
addiu a0,128 // DCT/IDCT += 128 (Delay Slot)
ori s2,r0,40 // S2 = Block Row Count
ori s3,r0,29 // S3 = Block Column Count - 1
la a0,DCT // A0 = IDCT
lui a1,$A010 // A1 = VRAM
LoopBlocks:
// Copy IDCT Block To VRAM
ori t0,r0,7 // T0 = Y
ori t4,r0,255 // T4 = 255
LoopY: // While Y
ori t1,r0,7 // T1 = X
LoopX: // While X
lh t2,0(a0) // T2 = IDCT Block Pixel
addiu a0,2 // IDCT += 2
bgtz t2,Floor // Compare Pixel To 0
nop // (Delay Slot)
and t2,r0 // IF (Pixel < 0) Pixel = 0
Floor:
blt t2,t4,Ceiling // Compare Pixel To 255
nop // (Delay Slot)
or t2,t4,r0 // IF (Pixel > 255) Pixel = 255
Ceiling:
sll t3,t2,8
or t2,t3
sll t3,8
or t2,t3
sll t2,8
or t2,t4 // T2 = 32-BIT RGB Pixel
sw t2,0(a1) // Store Pixel To VRAM
addiu a1,4 // VRAM += 4
bnez t1,LoopX // IF (X != 0) Loop X
subiu t1,1 // X-- (Delay Slot)
addiu a1,1248 // Jump 1 Scanline Down, 8 Pixels Back
bnez t0,LoopY // IF (Y != 0) Loop Y
subiu t0,1 // Y-- (Delay Slot)
subiu s2,1 // Block Row Count--
bnez s2,LoopBlocks // IF (Block Row Count != 0) LoopBlocks
subiu a1,(320*8*4)-8*4 // Jump 8 Scanlines Up, 8 Pixels Forwards (Delay Slot)
addiu a1,(320*7*4) // Jump 7 Scanlines Down
ori s2,r0,40 // Block Row Count = 40
bnez s3,LoopBlocks // IF (Block Column Count != 0) LoopBlocks
subiu s3,1 // Block Column Count-- (Delay Slot)
Loop:
j Loop
nop // Delay Slot
//Q: // JPEG Standard Quantization 8x8 Result Matrix (Quality = 10)
// db 80,55,50,80,120,200,255,255
// db 60,60,70,95,130,255,255,255
// db 70,65,80,120,200,255,255,255
// db 70,85,110,145,255,255,255,255
// db 90,110,185,255,255,255,255,255
// db 120,175,255,255,255,255,255,255
// db 245,255,255,255,255,255,255,255
// db 255,255,255,255,255,255,255,255
//Q: // JPEG Standard Quantization 8x8 Result Matrix (Quality = 50)
// db 16,11,10,16,24,40,51,61
// db 12,12,14,19,26,58,60,55
// db 14,13,16,24,40,57,69,56
// db 14,17,22,29,51,87,80,62
// db 18,22,37,56,68,109,103,77
// db 24,35,55,64,81,104,113,92
// db 49,64,78,87,103,121,120,101
// db 72,92,95,98,112,100,103,99
Q: // JPEG Standard Quantization 8x8 Result Matrix (Quality = 90)
db 3,2,2,3,5,8,10,12
db 2,2,3,4,5,12,12,11
db 3,3,3,5,8,11,14,11
db 3,3,4,6,10,17,16,12
db 4,4,7,11,14,22,21,15
db 5,7,11,13,16,21,23,18
db 10,13,16,17,21,24,24,20
db 14,18,19,20,22,20,21,20
DCTQBLOCKS: // DCT Quantization 8x8 Matrix Blocks (Signed 16-Bit)
//insert "frame10.dct" // Frame Quality = 10
//insert "frame50.dct" // Frame Quality = 50
insert "frame90.dct" // Frame Quality = 90
DCT: // Discrete Cosine Transform (DCT) 8x8 Result Matrix
fill (320*240)*2 | {
"pile_set_name": "Github"
} |
/*
* VM-Run.cpp - Run Loop
*
* Copyright (c) 2008 Higepon(Taro Minowa) <[email protected]>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* $Id: VM-Run.cpp 183 2008-07-04 06:19:28Z higepon $
*/
#include "VM-Run.h" // Included only here.
using namespace scheme;
Object VM::runLoop(Object* code, jmp_buf returnPoint, bool returnTable /* = false */)
{
#ifdef USE_DIRECT_THREADED_CODE
#include "labels.cpp"
if (returnTable) {
#ifdef ENABLE_PROFILER
labelReturn_ = reinterpret_cast<intptr_t>(&&LABEL_RETURN); // used for profiler
#endif
return Object::makeRaw(dispatch_table);
}
#endif
returnCode_[0] = Object::makeRaw(INSTRUCTION(RETURN));
returnCode_[1] = Object::makeFixnum(0);
callCode_->set(0, Object::makeRaw(INSTRUCTION(CALL)));
callCode_->set(1, Object::makeFixnum(0));
callCode_->set(2, Object::makeRaw(INSTRUCTION(HALT)));
Object operand = Object::Undef;
// shourt cut pointers
EqHashTable* const nameSpace = nameSpace_.toEqHashTable();
pc_ = code;
for (;;) {
const Object insn = *pc_++;
SWITCH((int)insn.val) {
CASE(HALT)
{
return ac_;
}
CASE(CALL)
{
operand = fetchOperand();
call_entry:
#include "call.inc.cpp"
NEXT;
}
CASE(TAIL_CALL)
{
const Object depth = fetchOperand();
VM_ASSERT(depth.isFixnum());
const Object diff = fetchOperand();
VM_ASSERT(diff.isFixnum());
sp_ = shiftArgsToBottom(sp_, depth.toFixnum(), diff.toFixnum());
operand = depth;
#include "call.inc.cpp"
NEXT;
}
CASE(APPLY)
{
const Object args = pop();
if (args.isNil()) {
callCode_->set(1, Object::makeFixnum(0));
pc_ = callCode_->code();
} else {
if (! args.isPair()) {
callAssertionViolationAfter(this, "apply", "bug?", L1(ac_));
NEXT;
}
const int length = Pair::length(args);
const int shiftLen = length > 1 ? length - 1 : 0;
Object* const sp = sp_ + shiftLen + 1; //unShiftArgs(sp_, 0, shiftLen);////
pairArgsToStack(sp, 0, args);
callCode_->set(1, Object::makeFixnum(length));
pc_ = callCode_->code();
sp_ = sp;
}
NEXT;
}
CASE(PUSH)
{
push(ac_);
NEXT;
}
CASE(CONSTANT_PUSH)
{
const Object c = fetchOperand();
ac_ = c;
push(ac_);
NEXT1;
}
CASE(ASSIGN_FREE)
{
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
referFree(n).toBox()->set(ac_);
NEXT;
}
CASE(ASSIGN_GLOBAL)
{
const Object id = fetchOperand();
if (id.isGloc()) {
id.toGloc()->setValue(ac_);
} else {
const Object val = nameSpace->ref(id, notFound_);
if (val == notFound_) {
// psyntax requires this
const Object gloc = Object::makeGloc(ac_);
nameSpace->set(id, gloc);
*(pc_ - 1) = gloc;
} else {
VM_ASSERT(val.isGloc());
val.toGloc()->setValue(ac_);
*(pc_ - 1) = val;
}
}
ac_ = Object::Undef;
NEXT1;
}
CASE(ASSIGN_LOCAL)
{
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
referLocal(n.toFixnum()).toBox()->set(ac_);
NEXT;
}
CASE(BOX)
{
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
indexSet(sp_, n.toFixnum(), Object::makeBox(index(sp_, n.toFixnum())));
NEXT;
}
CASE(CAAR)
{
if (ac_.isPair()) {
ac_ = ac_.car();
if (ac_.isPair()) {
ac_ = ac_.car();
} else {
callAssertionViolationAfter(this, "caar", "pair required", Pair::list1(ac_));
}
} else {
callAssertionViolationAfter(this, "caar", "pair required", Pair::list1(ac_));
}
NEXT1;
}
CASE(CADR)
{
if (ac_.isPair()) {
ac_ = ac_.cdr();
if (ac_.isPair()) {
ac_ = ac_.car();
} else {
callAssertionViolationAfter(this, "cadr", "pair required", Pair::list1(ac_));
}
} else {
callAssertionViolationAfter(this, "cadr", "pair required", Pair::list1(ac_));
}
NEXT1;
}
// CASE(REFER_LOCAL_CAR)
// {
// const Object n = fetchOperand();
// VM_ASSERT(n.isFixnum());
// ac_ = referLocal(n.toFixnum());
// if (ac_.isPair()) {
// ac_ = ac_.car();
// } else {
// callAssertionViolationAfter("car", "pair required", Pair::list1(ac_));
// }
// NEXT1;
// }
// CASE(REFER_LOCAL_CDR)
// {
// const Object n = fetchOperand();
// VM_ASSERT(n.isFixnum());
// ac_ = referLocal(n.toFixnum());
// if (ac_.isPair()) {
// ac_ = ac_.cdr();
// } else {
// callAssertionViolationAfter("cdr", "pair required", Pair::list1(ac_));
// }
// NEXT1;
// }
// CASE(REFER_LOCAL_CONS)
// {
// const Object n = fetchOperand();
// VM_ASSERT(n.isFixnum());
// ac_ = Object::cons(pop(), referLocal(n.toFixnum()));
// NEXT1;
// }
CASE(CAR)
{
if (ac_.isPair()) {
ac_ = ac_.car();
} else {
callAssertionViolationAfter(this, "car", "pair required", Pair::list1(ac_));
}
NEXT1;
}
CASE(CAR_PUSH)
{
if (ac_.isPair()) {
push(ac_.car());
} else {
// todo エラーにこれを入れれば便利じゃ?
// LOG1("cl=~a\n", dc_.toClosure()->sourceInfoString());
callAssertionViolationAfter(this, "car", "pair required", Pair::list1(ac_));
}
NEXT1;
}
CASE(CDAR)
{
if (ac_.isPair()) {
ac_ = ac_.car();
if (ac_.isPair()) {
ac_ = ac_.cdr();
} else {
callAssertionViolationAfter(this, "cdar", "pair required", Pair::list1(ac_));
}
} else {
callAssertionViolationAfter(this, "cdar", "pair required", Pair::list1(ac_));
}
NEXT1;
}
CASE(CDDR)
{
if (ac_.isPair()) {
ac_ = ac_.cdr();
if (ac_.isPair()) {
ac_ = ac_.cdr();
} else {
callAssertionViolationAfter(this, "cddr", "pair required", Pair::list1(ac_));
}
} else {
callAssertionViolationAfter(this, "cddr", "pair required", Pair::list1(ac_));
}
NEXT1;
}
CASE(CDR)
{
if (ac_.isPair()) {
ac_ = ac_.cdr();
} else {
callAssertionViolationAfter(this, "cdr", "pair required", Pair::list1(ac_));
}
NEXT1;
}
// CASE(REFER_LOCAL_CDR_PUSH)
// {
// Object n = fetchOperand();
// VM_ASSERT(n.isFixnum());
// ac_ = referLocal(n.toFixnum());
// // Fall Through
// }
CASE(CDR_PUSH)
{
if (ac_.isPair()) {
push(ac_.cdr());
} else {
callAssertionViolationAfter(this, "cdr", "pair required", Pair::list1(ac_));
}
NEXT1;
}
CASE(CLOSURE)
{
const Object skipSizeObject = fetchOperand();
const Object argLengthObject = fetchOperand();
const Object isOptionalArgObjecg = fetchOperand();
const Object freeVariablesNumObject = fetchOperand();
const Object maxStackObject = fetchOperand();
const Object sourceInfo = fetchOperand();
VM_ASSERT(skipSizeObject.isFixnum());
const int skipSize = skipSizeObject.toFixnum();
VM_ASSERT(argLengthObject.isFixnum());
const int argLength = argLengthObject.toFixnum();
const bool isOptionalArg = !isOptionalArgObjecg.isFalse();
VM_ASSERT(freeVariablesNumObject.isFixnum());
const int freeVariablesNum = freeVariablesNumObject.toFixnum();
VM_ASSERT(maxStackObject.isFixnum());
const int maxStack =maxStackObject.toFixnum();
// LOG1("(CLOSURE) source=~a\n", sourceInfo);
ac_ = Object::makeClosure(pc_, skipSize, argLength, isOptionalArg, (sp_ - freeVariablesNum), freeVariablesNum, maxStack, sourceInfo);
sp_ -= freeVariablesNum;
pc_ += skipSize - 6;
NEXT1;
}
CASE(CONS)
{
ac_ = Object::cons(pop(), ac_);
NEXT1;
}
CASE(CONSTANT)
{
const Object c = fetchOperand();
ac_ = c;
NEXT1;
}
CASE(PUSH_CONSTANT)
{
push(ac_);
ac_ = fetchOperand();
NEXT1;
}
CASE(DEFINE_GLOBAL)
{
// Once multiple define was forbidden.
// But allowed to use on nmosh.
const Object id = fetchOperand();
nameSpace->set(id, Object::makeGloc(ac_));
NEXT;
}
CASE(DISPLAY)
{
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
const int freeVariablesNum = n.toFixnum();
// create display closure
const Object display = Object::makeClosure(NULL, 0, 0, false, sp_ - freeVariablesNum, freeVariablesNum, 0, Object::False);
display.toClosure()->prev = dc_;
dc_ = display;
sp_ = sp_ - freeVariablesNum;
NEXT;
}
CASE(ENTER)
{
const Object n = fetchOperand(); // not used
VM_ASSERT(n.isFixnum());
fp_ = sp_ - n.toFixnum();
NEXT;
}
CASE(PUSH_ENTER)
{
push(ac_);
const Object n = fetchOperand(); // not used
VM_ASSERT(n.isFixnum());
fp_ = sp_ - n.toFixnum();
NEXT;
}
CASE(EQ)
{
ac_ = Object::makeBool(pop().eq(ac_));
NEXT1;
}
CASE(EQV)
{
ac_ = Object::makeBool(eqv(pop(), ac_));
NEXT1;
}
CASE(EQUAL)
{
Equal e;
ac_ = Object::makeBool(e.equal(pop(), ac_));
NEXT1;
}
CASE(PUSH_FRAME)
{
push(ac_);
goto frame_entry;
}
CASE(FRAME)
{
frame_entry:
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
const int skipSize = n.toFixnum();
makeCallFrame(pc_ + skipSize - 1);
NEXT;
}
CASE(INDIRECT)
{
ac_ = ac_.toBox()->value();
NEXT1;
}
CASE(LEAVE)
{
operand= fetchOperand();
VM_ASSERT(operand.isFixnum());
Object* const sp = sp_ - operand.toFixnum();
const Object fpObject = index(sp, 0);
VM_ASSERT(fpObject.isObjectPointer());
fp_ = fpObject.toObjectPointer();
dc_ = index(sp, 1);
VM_ASSERT(dc_.isProcedure());
sp_ = sp - 2;
NEXT;
}
CASE(LET_FRAME)
{
const Object maxStack = fetchOperand();
if (maxStack.toFixnum() + sp_ >= stackEnd_) {
// printf("LET_FRAME: stack expansion\n");
expandStack(stackSize_ / 10);
}
push(dc_);
push(Object::makeObjectPointer(fp_));
NEXT;
}
CASE(LIST)
{
const Object numObject = fetchOperand();
VM_ASSERT(numObject.isFixnum());
const int num = numObject.toFixnum();
Object list = Object::Nil;
for (int i = 0; i < num; i++) {
list = Object::cons(index(sp_, i), list);
}
ac_ = list;
sp_ = sp_ - num;
NEXT1;
}
CASE(LOCAL_JMP)
{
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
pc_ += n.toFixnum() - 1;
NEXT;
}
CASE(MAKE_CONTINUATION)
{
const Object n = fetchOperand();
VM_ASSERT(sp_ >= stack_);
ac_ = Object::makeContinuation(Object::makeStack(stack_, sp_ - stack_),
n,
dynamicWinders());
NEXT1;
}
CASE(VECTOR)
{
const Object numObject = fetchOperand();
MOSH_ASSERT(numObject.isFixnum());
const int num = numObject.toFixnum();
Object vec = Object::makeVector(num);
if (num > 0) {
Vector* const v = vec.toVector();
Object arg = ac_;
for (int i = num - 1; i > 0 ; i--) {
v->set(i, arg);
arg = pop();
}
v->set(0, arg);
}
ac_ = vec;
NEXT1;
}
CASE(MAKE_VECTOR)
{
const Object n = pop();
if (n.isFixnum()) {
VM_ASSERT(n.isFixnum());
ac_ = Object::makeVector(n.toFixnum(), ac_);
} else {
callWrongTypeOfArgumentViolationAfter(this, "make-vector", "fixnum", L1(n));
}
NEXT1;
}
CASE(NOP)
{
NEXT;
}
CASE(NOT)
{
ac_ = ac_.isFalse() ? Object::True : Object::False;
NEXT1;
}
CASE(NULL_P)
{
ac_ = ac_.isNil() ? Object::True : Object::False;
NEXT1;
}
CASE(APPEND2)
{
const Object head = pop();
if (head.isList()) {
ac_ = Pair::append2(head, ac_);
} else {
callWrongTypeOfArgumentViolationAfter(this, "append", "list", L1(head));
}
NEXT1;
}
CASE(NUMBER_ADD)
{
const Object n = pop();
// short cut for Fixnum. Benmarks tell me this is strongly required.
if (n.isFixnum() && ac_.isFixnum()) {
const int32_t val = n.toFixnum() + ac_.toFixnum();
ac_ = Bignum::makeInteger(val);
} else {
const Object v = ac_;
ac_ = Arithmetic::add(n, v);
if (ac_.isFalse()) {
callWrongTypeOfArgumentViolationAfter(this, "+", "number", L2(n, v));
}
}
NEXT1;
}
CASE(NUMBER_EQUAL)
{
const Object n = pop();
// short cut for Fixnum. Benchmarks tell me this is strongly required.
if (n.isFixnum() && ac_.isFixnum()) {
ac_ = Object::makeBool(n.toFixnum() == ac_.toFixnum());
} else {
if (n.isNumber() && ac_.isNumber()) {
ac_ = Object::makeBool(Arithmetic::eq(n, ac_));
} else {
callWrongTypeOfArgumentViolationAfter(this, "=", "number", L2(n, ac_));
}
}
NEXT1;
}
CASE(NUMBER_GE)
{
NUM_CMP_LOCAL(>=, >=, ge);
NEXT1;
}
CASE(NUMBER_GT)
{
NUM_CMP_LOCAL(>, >, gt);
NEXT1;
}
CASE(NUMBER_LE)
{
NUM_CMP_LOCAL(<=, <=, le);
NEXT1;
}
CASE(NUMBER_LT)
{
NUM_CMP_LOCAL(<, <, lt);
NEXT1;
}
CASE(NUMBER_MUL)
{
const Object n = pop();
if (n.isFlonum()) {
if (ac_.isFlonum()) {
ac_ = Flonum::mul(n.toFlonum(), ac_.toFlonum());
NEXT1;
} else if (Arithmetic::isRealValued(ac_)) {
ac_ = Object::makeFlonum(n.toFlonum()->value() * Arithmetic::realToDouble(ac_));
NEXT1;
}
}
if (ac_.isFlonum()) {
if (Arithmetic::isRealValued(n)) {
ac_ = Object::makeFlonum(ac_.toFlonum()->value() * Arithmetic::realToDouble(n));
NEXT1;
}
}
ac_ = Arithmetic::mul(n, ac_);
if (ac_.isFalse()) {
callAssertionViolationAfter(this, "*", "wrong type arguments", L2(n, ac_));
}
NEXT1;
}
CASE(NUMBER_DIV)
{
bool isDiv0Error = false;
const Object n = pop();
ac_ = Arithmetic::div(n, ac_, isDiv0Error);
if (isDiv0Error) {
callAssertionViolationAfter(this, "/", "division by zero", L2(n, ac_));
} else if (ac_.isFalse()) {
callWrongTypeOfArgumentViolationAfter(this, "/", "number", L2(n, ac_));
}
NEXT1;
}
CASE(NUMBER_SUB)
{
const Object n = pop();
// short cut for Fixnum. Benmarks tell me this is strongly required.
if (n.isFixnum() && ac_.isFixnum()) {
const int32_t val = n.toFixnum() - ac_.toFixnum();
ac_ = Bignum::makeInteger(val);
} else {
ac_ = Arithmetic::sub(n, ac_);
if (ac_.isFalse()) {
callWrongTypeOfArgumentViolationAfter(this, "-", "number", L2(n, ac_));
}
}
NEXT1;
}
CASE(NUMBER_SUB_PUSH)
{
const Object n = pop();
// short cut for Fixnum. Benmarks tell me this is strongly required.
if (n.isFixnum() && ac_.isFixnum()) {
const int32_t val = n.toFixnum() - ac_.toFixnum();
ac_ = Bignum::makeInteger(val);
} else {
ac_ = Arithmetic::sub(n, ac_);
}
if (ac_.isFalse()) {
callWrongTypeOfArgumentViolationAfter(this, "-", "number", L2(n, ac_));
}
push(ac_);
NEXT1;
}
CASE(NUMBER_ADD_PUSH)
{
const Object n = pop();
// short cut for Fixnum. Benmarks tell me this is strongly required.
if (n.isFixnum() && ac_.isFixnum()) {
const int32_t val = n.toFixnum() + ac_.toFixnum();
ac_ = Bignum::makeInteger(val);
} else {
ac_ = Arithmetic::add(n, ac_);
if (ac_.isFalse()) {
callWrongTypeOfArgumentViolationAfter(this, "-", "number", L2(n, ac_));
NEXT1;
}
}
push(ac_);
NEXT1;
}
CASE(PAIR_P)
{
ac_ = Object::makeBool(ac_.isPair());
NEXT1;
}
CASE(READ)
{
bool errorOccured = false;
TextualInputPort* inputPort = NULL;
if (ac_.isNil()) {
inputPort = currentInputPort_.toTextualInputPort();
} else {
if (ac_.isTextualInputPort()) {
inputPort = ac_.toTextualInputPort();
} else if (ac_.isTextualInputOutputPort()) {
inputPort = ac_.toTextualInputOutputPort();
} else {
callAssertionViolationAfter(this, "read", "textual input port required", L1(ac_));
NEXT1;
}
}
TRY_WITHOUT_DSTR
ac_ = inputPort->getDatum(errorOccured);
if (errorOccured) {
callLexicalAndIOReadAfter(this, "read", inputPort->error());
}
CATCH(ioError)
ioError.arg1 = (ac_.isNil()) ? currentInputPort_ : ac_;
ioError.who = "read";
callIOErrorAfter(this, ioError);
NEXT1;
END_TRY
NEXT1;
}
CASE(READ_CHAR)
{
TextualInputPort* inputPort = NULL;
if (ac_.isNil()) {
inputPort = currentInputPort_.toTextualInputPort();
} else {
if (ac_.isTextualInputPort()) {
inputPort = ac_.toTextualInputPort();
} else if (ac_.isTextualInputOutputPort()) {
inputPort = ac_.toTextualInputOutputPort();
} else {
callAssertionViolationAfter(this, "read", "textual input port required", L1(ac_));
NEXT1;
}
}
TRY_WITHOUT_DSTR
const ucs4char c = inputPort->getChar();
ac_= c == EOF ? Object::Eof : Object::makeChar(c);
CATCH(ioError)
ioError.arg1 = (ac_.isNil()) ? currentInputPort_ : ac_;
ioError.who = "read-char";
callIOErrorAfter(this, ioError);
NEXT1;
END_TRY
NEXT1;
}
CASE(REDUCE)
{
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
sp_ = fp_ + n.toFixnum();;
NEXT;
}
CASE(REFER_FREE)
{
operand = fetchOperand();
VM_ASSERT(operand.isFixnum());
ac_ = referFree(operand);
NEXT1;
}
CASE(REFER_FREE_PUSH)
{
push(referFree(fetchOperand()));
NEXT;
}
CASE(REFER_FREE_CALL)
{
ac_ = referFree(fetchOperand());
operand = fetchOperand();
#include "call.inc.cpp"
NEXT;
}
CASE(REFER_GLOBAL)
{
const Object id = fetchOperand();
if (id.isGloc()) {
ac_ = id.toGloc()->value();
} else {
const Object val = nameSpace->ref(id, notFound_);
if (val == notFound_) {
callUndefinedViolationAfter(this,
L1(unGenSym(id)),
"unbound variable"
// R6RS mode requires demangle of symbol.
);
} else {
ac_ = val.toGloc()->value();
*(pc_ - 1) = val;
}
}
NEXT1;
}
CASE(REFER_GLOBAL_PUSH)
{
const Object id = fetchOperand();
if (id.isGloc()) {
ac_ = id.toGloc()->value();
} else {
const Object val = nameSpace->ref(id, notFound_);
if (val == notFound_) {
callAssertionViolationAfter(this,
"eval",
"unbound variable",
// R6RS mode requires demangle of symbol.
L1(unGenSym(id)));
} else {
ac_ = val.toGloc()->value();
*(pc_ - 1) = val;
}
}
push(ac_);
NEXT1;
}
CASE(REFER_GLOBAL_CALL)
{
const Object id = fetchOperand();
if (id.isGloc()) {
ac_ = id.toGloc()->value();
} else {
const Object val = nameSpace->ref(id, notFound_);
if (val == notFound_) {
callAssertionViolationAfter(this,
"eval",
"unbound variable",
L1(unGenSym(id)));
NEXT1; // for error handling
} else {
ac_ = val.toGloc()->value();
*(pc_ - 1) = val;
}
}
operand = fetchOperand();
#include "call.inc.cpp"
NEXT;
}
CASE(REFER_LOCAL)
{
operand = fetchOperand();
VM_ASSERT(operand.isFixnum());
ac_ = referLocal(operand.toFixnum());
NEXT1;
}
CASE(REFER_LOCAL_CALL)
{
operand = fetchOperand();
VM_ASSERT(operand.isFixnum());
ac_ = referLocal(operand.toFixnum());
operand = fetchOperand();
#include "call.inc.cpp"
NEXT;
}
// LOCAL_CALL is lighter than CALL
// We can omit checking closure type and arguments length.
CASE(LOCAL_CALL)
{
VM_ASSERT(ac_.isClosure());
const Closure* const c = ac_.toClosure();
if (c->maxStack + sp_ >= stackEnd_) {
// printf("CALL: stack expansion\n");
expandStack(stackSize_ / 10);
}
COUNT_CALL(ac_);
const Object argLength = fetchOperand();
VM_ASSERT(argLength.isFixnum());
dc_ = ac_;
cl_ = ac_;
pc_ = c->pc;
fp_ = sp_ - argLength.toFixnum();
NEXT;
}
CASE(LOCAL_TAIL_CALL)
{
const Object depth = fetchOperand();
VM_ASSERT(depth.isFixnum());
const Object diff = fetchOperand();
VM_ASSERT(diff.isFixnum());
sp_ = shiftArgsToBottom(sp_, depth.toFixnum(), diff.toFixnum());
VM_ASSERT(ac_.isClosure());
const Closure* const c = ac_.toClosure();
if (c->maxStack + sp_ >= stackEnd_) {
// printf("CALL: stack expansion\n");
expandStack(stackSize_ / 10);
}
COUNT_CALL(ac_);
const Object argLength = depth;
dc_ = ac_;
cl_ = ac_;
pc_ = c->pc;
fp_ = sp_ - argLength.toFixnum();
NEXT;
}
CASE(REFER_LOCAL_PUSH_CONSTANT)
{
const Object index = fetchOperand();
MOSH_ASSERT(index.isFixnum());
push(referLocal(index.toFixnum()));
ac_= fetchOperand();
NEXT1;
}
// appears on typical named let loop
CASE(REFER_LOCAL_BRANCH_NOT_NULL)
{
const Object i = fetchOperand();
MOSH_ASSERT(i.isFixnum());
ac_ = Object::makeBool(referLocal(i.toFixnum()).isNil());
BRANCH_ON_FALSE;
NEXT;
}
// appears on tak
CASE(REFER_LOCAL_BRANCH_NOT_LT)
{
const Object i = fetchOperand();
MOSH_ASSERT(i.isFixnum());
ac_ = referLocal(i.toFixnum());
NUM_CMP_LOCAL(<, <, lt);
BRANCH_ON_FALSE;
NEXT;
}
// appears on fib
CASE(REFER_LOCAL_PUSH_CONSTANT_BRANCH_NOT_LE)
{
const Object i = fetchOperand();
MOSH_ASSERT(i.isFixnum());
// we can omit "PUSH" insruction
ac_ = fetchOperand();
NUM_CMP(<=, <=, le, referLocal(i.toFixnum()));
BRANCH_ON_FALSE;
NEXT;
}
CASE(REFER_LOCAL_PUSH_CONSTANT_BRANCH_NOT_GE)
{
const Object i = fetchOperand();
MOSH_ASSERT(i.isFixnum());
// we can omit "PUSH" insruction
ac_ = fetchOperand();
NUM_CMP(>=, >=, ge, referLocal(i.toFixnum()));
BRANCH_ON_FALSE;
NEXT;
}
// appears on named let loop
CASE(REFER_LOCAL_PUSH_CONSTANT_BRANCH_NOT_NUMBER_EQUAL)
{
const Object i = fetchOperand();
MOSH_ASSERT(i.isFixnum());
// we can omit "PUSH" insruction
ac_ = fetchOperand();
NUM_CMP(==, =, eq, referLocal(i.toFixnum()));
BRANCH_ON_FALSE;
NEXT;
}
CASE(REFER_LOCAL_PUSH)
{
const Object n = fetchOperand();
VM_ASSERT(n.isFixnum());
push(referLocal(n.toFixnum()));
NEXT;
}
CASE(RESTORE_CONTINUATION)
{
// Stores arguments of the continuation to values registers.
const Object argumentsLength = fetchOperand();
VM_ASSERT(argumentsLength.isFixnum());
const int num = argumentsLength.toFixnum();
if (num > maxNumValues_ + 1) {
callAssertionViolationAfter(this, "values", "too many values", Pair::list1(argumentsLength));
}
numValues_ = num;
if (num != 0) {
for (int i = 0; i < num - 1; i++) {
values_[i] = index(sp_, num - i - 2);
}
ac_ = index(sp_, num - 1);
}
// Restore the stack
const Object stack = fetchOperand();
sp_ = stack_ + stack.toStack()->restore(stack_);
// Shift unnecessary stack
const int depth = 0;
const Object diffObject = fetchOperand();
VM_ASSERT(diffObject.isFixnum());
const int diff = diffObject.toFixnum();
sp_ = shiftArgsToBottom(sp_, depth, diff);
operand = Object::makeFixnum(0);
goto return_entry;
NEXT;
}
// CASE(NUMBER_ADD_RETURN)
// {
// const Object n = pop();
// // short cut for Fixnum. Benmarks tell me this is strongly required.
// if (n.isFixnum() && ac_.isFixnum()) {
// const int32_t val = n.toFixnum() + ac_.toFixnum();
// ac_ = Bignum::makeInteger(val);
// } else {
// ac_ = Arithmetic::add(n, ac_);
// }
// operand = fetchOperand();
// goto return_entry;
// }
CASE(RETURN)
{
operand = fetchOperand();
return_entry:
VM_ASSERT(operand.isFixnum());
Object* const sp = sp_ - operand.toFixnum();
const Object fpObject = index(sp, 0);
VM_ASSERT(fpObject.isObjectPointer());
fp_ = fpObject.toObjectPointer();
cl_ = index(sp, 1);
if (!cl_.isProcedure()) {
VM_LOG1("proc = ~a\n", cl_);
}
VM_ASSERT(cl_.isProcedure());
dc_ = index(sp, 2);
VM_ASSERT(dc_.isProcedure());
const Object pcObject = index(sp, 3);
VM_ASSERT(pcObject.isObjectPointer());
pc_ = pcObject.toObjectPointer();
sp_ = sp - 4;
NEXT;
}
CASE(SET_CAR)
{
const Object p = pop();
if (!p.isPair()) {
callAssertionViolationAfter(this, "set-car!", "pair required", Pair::list1(p));
NEXT1;
}
p.car() = ac_;
ac_ = Object::Undef;
NEXT1;
}
CASE(SET_CDR)
{
const Object p = pop();
if (!p.isPair()) {
callAssertionViolationAfter(this, "set-cdr!", "pair required", Pair::list1(p));
NEXT1;
}
p.cdr() = ac_;
ac_ = Object::Undef;
NEXT1;
}
//---------------------------- SHIFTJ -----------------------------
//
// SHIFT for embedded jump which appears in named let optimization.
// Two things happens.
// 1. SHIFT the stack (same as SHIFT operation)
// 2. Restore fp and c registers.
// This is necessary for jump which is across let or closure boundary.
// new-fp => new-sp - arg-length
//
CASE(SHIFTJ)
{
const Object depthObject = fetchOperand();
VM_ASSERT(depthObject.isFixnum());
const int depth = depthObject.toFixnum();
const Object diffObject = fetchOperand();
VM_ASSERT(diffObject.isFixnum());
const int diff = diffObject.toFixnum();
sp_ = shiftArgsToBottom(sp_, depth, diff);
fp_ = sp_ - depth;
const Object displayCount = fetchOperand();
VM_ASSERT(displayCount.isFixnum());
for (int i = displayCount.toFixnum(); i > 0; i--) {
dc_ = dc_.toClosure()->prev;
}
VM_ASSERT(dc_.isClosure());
NEXT;
}
// (SHIFT) instruction is deprecated
CASE(SHIFT)
{
const Object depthObject = fetchOperand();
VM_ASSERT(depthObject.isFixnum());
const int depth = depthObject.toFixnum();
const Object diffObject = fetchOperand();
VM_ASSERT(diffObject.isFixnum());
const int diff = diffObject.toFixnum();
sp_ = shiftArgsToBottom(sp_, depth, diff);
NEXT;
}
// (SHIFT_CALL) instruction is deprecated
CASE(SHIFT_CALL)
{
const Object depthObject = fetchOperand();
const Object diffObject = fetchOperand();
VM_ASSERT(depthObject.isFixnum());
MOSH_ASSERT(diffObject.isFixnum());
const int depth = depthObject.toFixnum();
const int diff = diffObject.toFixnum();
sp_ = shiftArgsToBottom(sp_, depth, diff);
operand = fetchOperand();
#include "call.inc.cpp"
NEXT;
MOSH_FATAL(false);
}
CASE(SYMBOL_P)
{
ac_ = Object::makeBool(ac_.isSymbol());
NEXT1;
}
CASE(TEST)
{
test_entry:
if (ac_.isFalse()) {
const Object skipSize = fetchOperand();
MOSH_ASSERT(skipSize.isFixnum());
skip(skipSize.toFixnum() - 1);
} else {
pc_++;
}
NEXT;
}
CASE(NOT_TEST)
{
ac_ = ac_.isFalse() ? Object::True : Object::False;
goto test_entry;
}
// Branch on not eq?
CASE(BRANCH_NOT_EQ)
{
ac_ = Object::makeBool(pop().eq(ac_));
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not eqv?
CASE(BRANCH_NOT_EQV)
{
ac_ = Object::makeBool(eqv(pop(), ac_));
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not equal?
CASE(BRANCH_NOT_EQUAL)
{
Equal e;
ac_ = Object::makeBool(e.equal(pop(), ac_));
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not less than or equal
CASE(BRANCH_NOT_LE)
{
NUM_CMP_LOCAL(<=, <=, le);
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not less than
CASE(BRANCH_NOT_LT)
{
NUM_CMP_LOCAL(<, <, lt);
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not greater than or equal
CASE(BRANCH_NOT_GE)
{
NUM_CMP_LOCAL(>=, >=, ge);
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not greater than
CASE(BRANCH_NOT_GT)
{
NUM_CMP_LOCAL(>, >, gt);
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not number equal
CASE(BRANCH_NOT_NUMBER_EQUAL)
{
NUM_CMP_LOCAL(==, =, eq);
BRANCH_ON_FALSE;
NEXT;
}
// Branch on not null
CASE(BRANCH_NOT_NULL)
{
ac_ = Object::makeBool(ac_.isNil());
BRANCH_ON_FALSE;
NEXT;
}
CASE(UNDEF)
{
ac_ = Object::Undef;
NEXT1;
}
CASE(VECTOR_LENGTH)
{
ac_ = Object::makeFixnum(ac_.toVector()->length());
NEXT1;
}
CASE(VECTOR_P)
{
ac_ = Object::makeBool(ac_.isVector());
NEXT1;
}
// CASE(REFER_LOCAL_VECTOR_REF)
// {
// const Object n = fetchOperand();
// MOSH_ASSERT(n.isFixnum());
// ac_ = referLocal(n.toFixnum());
// // *** Fall Through ***
// }
CASE(VECTOR_REF)
{
const Object obj = pop();
if (obj.isVector()) {
if (ac_.isFixnum()) {
const int index = ac_.toFixnum();
Vector* const v = obj.toVector();
if (v->isValidIndex(index)) {
ac_ = v->ref(index);
} else {
callAssertionViolationAfter(this,
"vector-ref",
"index out of range",
L1(ac_));
}
} else {
callAssertionViolationAfter(this,
"vector-ref",
"index exact integer required but got ",
L1(ac_));
}
} else {
callAssertionViolationAfter(this,
"vector-ref",
"vector required",
L1(obj));
}
NEXT1;
}
CASE(SIMPLE_STRUCT_REF)
{
const Object obj = pop();
if (obj.isSimpleStruct()) {
MOSH_ASSERT(ac_.isFixnum());
const int index = ac_.toFixnum();
SimpleStruct* const s = obj.toSimpleStruct();
if (s->isValidIndex(index)) {
ac_ = s->ref(index);
} else {
callAssertionViolationAfter(this,
"simple-struct-ref",
"index out of range",
L2(obj, ac_));
}
} else {
callAssertionViolationAfter(this,
"simple-struct-ref",
"simple-struct required",
L1(obj));
}
NEXT1;
}
// CASE(VECTOR_REF_PUSH)
// {
// const Object v = pop();
// MOSH_ASSERT(ac_.isFixnum());
// if (v.isVector()) {
// ac_ = v.toVector()->ref(ac_.toFixnum());
// } else {
// callAssertionViolationAfter("vector-ref",
// "vector required",
// L1(v));
// }
// push(ac_);
// NEXT1;
// }
// CASE(PUSH_CONSTANT_VECTOR_SET)
// {
// push(ac_);
// ac_ = fetchOperand();
// goto vector_set_entry;
// }
// CASE(REFER_LOCAL_VECTOR_SET)
// {
// const Object n = fetchOperand();
// MOSH_ASSERT(n.isFixnum());
// ac_ = referLocal(n.toFixnum());
// // *** Fall Through ***
// }
CASE(VECTOR_SET)
{
const Object n = pop();
const Object obj = pop();
if (obj.isVector()) {
if (n.isFixnum()) {
const int index = n.toFixnum();
Vector* const v = obj.toVector();
if (v->isValidIndex(index)) {
v->set(index, ac_);
ac_ = Object::Undef;
} else {
callAssertionViolationAfter(this,
"vector-set!",
"index out of range",
L1(n));
}
} else {
callAssertionViolationAfter(this,
"vector-set!",
"index, number required",
L1(n));
}
} else {
callAssertionViolationAfter(this,
"vector-set!",
"vector required",
L1(obj));
}
NEXT1;
}
CASE(VALUES)
{
// values stack layout
// (value 'a 'b 'c 'd)
// ==>
// =====
// a
// =====
// b
// =====
// c [ac_] = d
// =====
// values are stored in [valuez vector] and [a-reg] like following.
// #(b c d)
// [ac_] = a
const Object numObject = fetchOperand();
MOSH_ASSERT(numObject.isFixnum());
const int num = numObject.toFixnum();
if (num > maxNumValues_ + 1) {
callAssertionViolationAfter(this, "values", "too many values", Pair::list1(Object::makeFixnum(num)));
} else {
numValues_ = num;
if (num >= 0) {
for (int i = num - 1; i > 0; i--) {
values_[i - 1] = ac_;
ac_ = index(sp_, num - i - 1);
}
}
if (numValues_ > 1) {
sp_ = sp_ - (numValues_ - 1);
} else {
// there's no need to push
}
}
if (num == 0) {
ac_ = Object::Undef;
}
NEXT;
}
CASE(RECEIVE)
{
const Object reqargsObject = fetchOperand();
const Object optargObject = fetchOperand();
MOSH_ASSERT(reqargsObject.isFixnum());
const int reqargs = reqargsObject.toFixnum();
MOSH_ASSERT(optargObject.isFixnum());
const int optarg = optargObject.toFixnum();
if (numValues_ < reqargs) {
callAssertionViolationAfter(this,
"receive",
"received fewer values than expected",
L2(Object::makeFixnum(numValues_),
Object::makeFixnum(reqargs)));
NEXT;
} else if (optarg == 0 && numValues_ > reqargs) {
callAssertionViolationAfter(this,
"receive",
"received more values than expected",
L2(Object::makeFixnum(numValues_),
Object::makeFixnum(reqargs)));
NEXT;
}
// (receive (a b c) ...)
if (optarg == 0) {
if (reqargs > 0) {
push(ac_);
}
for (int i = 0; i < reqargs - 1; i++) {
push(values_[i]);
}
// (receive a ...)
} else if (reqargs == 0) {
Object ret = numValues_ == 0 ? Object::Nil : Pair::list1(ac_);
for (int i = 0; i < numValues_ - 1; i++) {
ret = Pair::appendD(ret, Pair::list1(values_[i]));
}
push(ret);
// (receive (a b . c) ...)
} else {
Object ret = Object::Nil;
push(ac_);
for (int i = 0; i < numValues_ - 1; i++) {
if (i < reqargs - 1) {
push(values_[i]);
} else {
ret = Pair::appendD(ret, Pair::list1(values_[i]));
}
}
push(ret);
}
NEXT1;
}
CASE(COMPILE_ERROR)
{
const Object who = fetchOperand();
const Object message = fetchOperand();
const Object irritants = fetchOperand();
callAssertionViolationAfter(this, who, message, irritants);
NEXT;
}
CASE(UNFIXED_JUMP)
{
callAssertionViolationAfter(this, "UNFIXED_JUMP", "bug of VM");
NEXT;
}
CASE(STOP)
{
printf("STOP for debug\n");
exit(-1);
}
CASE(DYNAMIC_WINDERS)
{
ac_ = dynamicWinders();
NEXT1;
}
DEFAULT
{
callAssertionViolationAfter(this, "VM", "unknown instruction, bug of VM");
NEXT;
}
} // SWITCH
}
}
| {
"pile_set_name": "Github"
} |
/*
* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.showcase.model;
import org.apache.commons.text.StringEscapeUtils;
import java.io.Serializable;
/**
* Skill.
*/
public class Skill implements IdEntity {
private static final long serialVersionUID = -4150317722693212439L;
private String name;
private String description;
public Skill() {
}
public Skill(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(name));
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(description));
}
public void setDescription(String description) {
this.description = description;
}
public Serializable getId() {
return getName();
}
public void setId(Serializable id) {
setName((String) id);
}
public String toString() {
return getName();
}
}
| {
"pile_set_name": "Github"
} |
/* ar-skbuff.c: socket buffer destruction handling
*
* Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
* Written by David Howells ([email protected])
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version
* 2 of the License, or (at your option) any later version.
*/
#include <linux/module.h>
#include <linux/net.h>
#include <linux/skbuff.h>
#include <net/sock.h>
#include <net/af_rxrpc.h>
#include "ar-internal.h"
/*
* set up for the ACK at the end of the receive phase when we discard the final
* receive phase data packet
* - called with softirqs disabled
*/
static void rxrpc_request_final_ACK(struct rxrpc_call *call)
{
/* the call may be aborted before we have a chance to ACK it */
write_lock(&call->state_lock);
switch (call->state) {
case RXRPC_CALL_CLIENT_RECV_REPLY:
call->state = RXRPC_CALL_CLIENT_FINAL_ACK;
_debug("request final ACK");
/* get an extra ref on the call for the final-ACK generator to
* release */
rxrpc_get_call(call);
set_bit(RXRPC_CALL_ACK_FINAL, &call->events);
if (try_to_del_timer_sync(&call->ack_timer) >= 0)
rxrpc_queue_call(call);
break;
case RXRPC_CALL_SERVER_RECV_REQUEST:
call->state = RXRPC_CALL_SERVER_ACK_REQUEST;
default:
break;
}
write_unlock(&call->state_lock);
}
/*
* drop the bottom ACK off of the call ACK window and advance the window
*/
static void rxrpc_hard_ACK_data(struct rxrpc_call *call,
struct rxrpc_skb_priv *sp)
{
int loop;
u32 seq;
spin_lock_bh(&call->lock);
_debug("hard ACK #%u", ntohl(sp->hdr.seq));
for (loop = 0; loop < RXRPC_ACKR_WINDOW_ASZ; loop++) {
call->ackr_window[loop] >>= 1;
call->ackr_window[loop] |=
call->ackr_window[loop + 1] << (BITS_PER_LONG - 1);
}
seq = ntohl(sp->hdr.seq);
ASSERTCMP(seq, ==, call->rx_data_eaten + 1);
call->rx_data_eaten = seq;
if (call->ackr_win_top < UINT_MAX)
call->ackr_win_top++;
ASSERTIFCMP(call->state <= RXRPC_CALL_COMPLETE,
call->rx_data_post, >=, call->rx_data_recv);
ASSERTIFCMP(call->state <= RXRPC_CALL_COMPLETE,
call->rx_data_recv, >=, call->rx_data_eaten);
if (sp->hdr.flags & RXRPC_LAST_PACKET) {
rxrpc_request_final_ACK(call);
} else if (atomic_dec_and_test(&call->ackr_not_idle) &&
test_and_clear_bit(RXRPC_CALL_TX_SOFT_ACK, &call->flags)) {
_debug("send Rx idle ACK");
__rxrpc_propose_ACK(call, RXRPC_ACK_IDLE, sp->hdr.serial,
true);
}
spin_unlock_bh(&call->lock);
}
/*
* destroy a packet that has an RxRPC control buffer
* - advance the hard-ACK state of the parent call (done here in case something
* in the kernel bypasses recvmsg() and steals the packet directly off of the
* socket receive queue)
*/
void rxrpc_packet_destructor(struct sk_buff *skb)
{
struct rxrpc_skb_priv *sp = rxrpc_skb(skb);
struct rxrpc_call *call = sp->call;
_enter("%p{%p}", skb, call);
if (call) {
/* send the final ACK on a client call */
if (sp->hdr.type == RXRPC_PACKET_TYPE_DATA)
rxrpc_hard_ACK_data(call, sp);
rxrpc_put_call(call);
sp->call = NULL;
}
if (skb->sk)
sock_rfree(skb);
_leave("");
}
/**
* rxrpc_kernel_free_skb - Free an RxRPC socket buffer
* @skb: The socket buffer to be freed
*
* Let RxRPC free its own socket buffer, permitting it to maintain debug
* accounting.
*/
void rxrpc_kernel_free_skb(struct sk_buff *skb)
{
rxrpc_free_skb(skb);
}
EXPORT_SYMBOL(rxrpc_kernel_free_skb);
| {
"pile_set_name": "Github"
} |
var parse = require('../');
var test = require('tape');
test('whitespace should be whitespace' , function (t) {
t.plan(1);
var x = parse([ '-x', '\t' ]).x;
t.equal(x, '\t');
});
| {
"pile_set_name": "Github"
} |
/** Used as the `TypeError` message for "Functions" methods. */
var FUNC_ERROR_TEXT = 'Expected a function';
/**
* Creates a function that negates the result of the predicate `func`. The
* `func` predicate is invoked with the `this` binding and arguments of the
* created function.
*
* @static
* @memberOf _
* @since 3.0.0
* @category Function
* @param {Function} predicate The predicate to negate.
* @returns {Function} Returns the new negated function.
* @example
*
* function isEven(n) {
* return n % 2 == 0;
* }
*
* _.filter([1, 2, 3, 4, 5, 6], _.negate(isEven));
* // => [1, 3, 5]
*/
function negate(predicate) {
if (typeof predicate != 'function') {
throw new TypeError(FUNC_ERROR_TEXT);
}
return function() {
return !predicate.apply(this, arguments);
};
}
module.exports = negate;
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2017 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.context.active;
/**
* @author Woonduk Kang(emeroad)
*/
public interface ActiveTraceHandle {
ActiveTraceHandle EMPTY_HANDLE = new ActiveTraceHandle() {
@Override
public void purge(long purgeTime) {
}
};
void purge(long purgeTime);
}
| {
"pile_set_name": "Github"
} |
<?php
include dirname(__FILE__) . '/../../../functions/config.php';
require_once(dirname(__FILE__) . '/../../../functions/functions.php');
if (!check_session()){
echo json_encode(array('error' => 'nologin'));
exit;
}
slash_vars();
$vm_id = $_POST['vm_id'];
$power_state = $_POST['power_state'];
if (!empty ($vm_id) && !empty ($power_state))
echo vmPowerCycle($vm_id, $power_state); | {
"pile_set_name": "Github"
} |
#pragma once
#include "Common\StepTimer.h"
#include "Common\DeviceResources.h"
#include "Content\Sample3DSceneRenderer.h"
#include "Content\SampleFpsTextRenderer.h"
// Renders Direct2D and 3D content on the screen.
namespace DolphinUniversal
{
class DolphinUniversalMain : public DX::IDeviceNotify
{
public:
DolphinUniversalMain(const std::shared_ptr<DX::DeviceResources>& deviceResources);
~DolphinUniversalMain();
void CreateWindowSizeDependentResources();
void Update();
bool Render();
// IDeviceNotify
virtual void OnDeviceLost();
virtual void OnDeviceRestored();
private:
// Cached pointer to device resources.
std::shared_ptr<DX::DeviceResources> m_deviceResources;
// TODO: Replace with your own content renderers.
std::unique_ptr<Sample3DSceneRenderer> m_sceneRenderer;
#ifdef USE_FPS
std::unique_ptr<SampleFpsTextRenderer> m_fpsTextRenderer;
#endif
// Rendering loop timer.
DX::StepTimer m_timer;
};
} | {
"pile_set_name": "Github"
} |
/*
* LZMA compressed kernel loader for Atheros AR7XXX/AR9XXX based boards
*
* Copyright (C) 2011 Gabor Juhos <[email protected]>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*
*/
#ifndef __CACHE_H
#define __CACHE_H
void flush_cache(unsigned long start_addr, unsigned long size);
#endif /* __CACHE_H */
| {
"pile_set_name": "Github"
} |
:020000023000CC
:10FC000001C0F3C0112484B790E890936100109272
:10FC10006100882369F0982F9A70923049F081FF33
:10FC200002C097EF94BF282E80E002D10C94000010
:10FC300085E08093810082E08093300188E18093A9
:10FC4000310183E08093340186E0809332018EE0BD
:10FC5000EFD0279A84E02EE33EEF91E030938500C9
:10FC60002093840096BBB09BFECF1F9AA89540912D
:10FC7000300147FD02C0815089F7CED0813479F43C
:10FC8000CBD0C82FDBD0C23811F480E004C088E0AC
:10FC9000C13809F083E0B9D080E1B7D0EECF82342B
:10FCA00019F484E1D3D0F8CF853411F485E0FACF8C
:10FCB000853581F4B1D0E82EAFD0F82E87FF07C08C
:10FCC0008BB781608BBFEE0CFF1CB8D0E5CF8BB734
:10FCD0008E7FF8CF863579F49FD08D3451F49CD047
:10FCE000CBB79AD0C170880F8C2B8BBF81E0AED080
:10FCF000CCCF83E0FCCF843609F046C08DD0C82F2E
:10FD0000D0E0DC2FCC2788D0C82B86D0D82E5E013F
:10FD10008EEFB81A00E012E04801EFEF8E1A9E0A4B
:10FD20007BD0F801808384018A149B04A9F786D0D4
:10FD3000F5E410E000E0DF1609F150E040E063E098
:10FD4000C70153D08701C12C92E0D92EF601419111
:10FD500051916F0161E0C80148D00E5F1F4F22979B
:10FD6000A9F750E040E065E0C7013FD095CF608142
:10FD7000C8018E0D9F1D79D00F5F1F4FF801FE5FE8
:10FD8000C017D107A1F788CF843701F545D0C82F18
:10FD9000D0E0DC2FCC2740D0C82B3ED0D82E4ED080
:10FDA0008701F5E4DF120BC0CE0DDF1DC80155D071
:10FDB0002CD00F5F1F4FC017D107C1F76DCFF801CF
:10FDC00087918F0122D02197D1F766CF853739F4FB
:10FDD00035D08EE11AD088E918D081E05CCF81352A
:10FDE00009F073CF88E024D070CFFC010A0167BF0F
:10FDF000E895112407B600FCFDCF667029F0452B6D
:10FE000019F481E187BFE89508959091300195FF3D
:10FE1000FCCF8093360108958091300187FFFCCF9D
:10FE20008091300184FD01C0A8958091360108952C
:10FE3000E0E6F0E098E1908380830895EDDF803282
:10FE400019F088E0F5DFFFCF84E1DFCFCF93C82F33
:10FE5000E3DFC150E9F7CF91F1CFF999FECF92BD21
:10FE600081BDF89A992780B50895262FF999FECF7C
:10FE70001FBA92BD81BD20BD0FB6F894FA9AF99AC7
:06FE80000FBE019608957B
:02FFFE000008F9
:040000033000FC00CD
:00000001FF
| {
"pile_set_name": "Github"
} |
import 'package:flutter/material.dart';
import 'package:harpy/components/common/list/list_card_animation.dart';
/// Builds a sliver indicating that no replies were found for the
/// [RepliesScreen].
class NoRepliesFound extends StatelessWidget {
const NoRepliesFound();
@override
Widget build(BuildContext context) {
final ThemeData theme = Theme.of(context);
return SliverFillRemaining(
hasScrollBody: false,
child: ListCardAnimation(
key: const Key('replies'),
child: Padding(
padding: const EdgeInsets.all(32),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Text(
'No replies found',
style: theme.textTheme.subtitle1,
),
const SizedBox(height: 8),
Text(
'Only replies of the last 7 days can be retrieved.',
style: theme.textTheme.subtitle2,
),
],
),
),
),
);
}
}
| {
"pile_set_name": "Github"
} |
package info.nightscout.androidaps.plugins.pump.insight.ids;
import info.nightscout.androidaps.plugins.pump.insight.app_layer.history.HistoryReadingDirection;
import info.nightscout.androidaps.plugins.pump.insight.utils.IDStorage;
public class HistoryReadingDirectionIDs {
public static final IDStorage<HistoryReadingDirection, Integer> IDS = new IDStorage<>();
static {
IDS.put(HistoryReadingDirection.FORWARD, 31);
IDS.put(HistoryReadingDirection.BACKWARD, 227);
}
}
| {
"pile_set_name": "Github"
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Drawing;
internal static partial class Interop
{
internal static partial class User32
{
public struct MINMAXINFO
{
public Point ptReserved;
public Point ptMaxSize;
public Point ptMaxPosition;
public Point ptMinTrackSize;
public Point ptMaxTrackSize;
}
}
}
| {
"pile_set_name": "Github"
} |
{
"__comment": "Generated by generateResources.py function: blockstate",
"forge_marker": 1,
"defaults": {
"textures": {
"bottom": "tfc:blocks/wood/door/lower/sycamore",
"top": "tfc:blocks/wood/door/upper/sycamore"
}
},
"variants": {
"facing=east,half=lower,hinge=left,open=false": {
"model": "door_bottom"
},
"facing=south,half=lower,hinge=left,open=false": {
"model": "door_bottom",
"y": 90
},
"facing=west,half=lower,hinge=left,open=false": {
"model": "door_bottom",
"y": 180
},
"facing=north,half=lower,hinge=left,open=false": {
"model": "door_bottom",
"y": 270
},
"facing=east,half=lower,hinge=right,open=false": {
"model": "door_bottom_rh"
},
"facing=south,half=lower,hinge=right,open=false": {
"model": "door_bottom_rh",
"y": 90
},
"facing=west,half=lower,hinge=right,open=false": {
"model": "door_bottom_rh",
"y": 180
},
"facing=north,half=lower,hinge=right,open=false": {
"model": "door_bottom_rh",
"y": 270
},
"facing=east,half=lower,hinge=left,open=true": {
"model": "door_bottom_rh",
"y": 90
},
"facing=south,half=lower,hinge=left,open=true": {
"model": "door_bottom_rh",
"y": 180
},
"facing=west,half=lower,hinge=left,open=true": {
"model": "door_bottom_rh",
"y": 270
},
"facing=north,half=lower,hinge=left,open=true": {
"model": "door_bottom_rh"
},
"facing=east,half=lower,hinge=right,open=true": {
"model": "door_bottom",
"y": 270
},
"facing=south,half=lower,hinge=right,open=true": {
"model": "door_bottom"
},
"facing=west,half=lower,hinge=right,open=true": {
"model": "door_bottom",
"y": 90
},
"facing=north,half=lower,hinge=right,open=true": {
"model": "door_bottom",
"y": 180
},
"facing=east,half=upper,hinge=left,open=false": {
"model": "tfc:door_top_tfc"
},
"facing=south,half=upper,hinge=left,open=false": {
"model": "tfc:door_top_tfc",
"y": 90
},
"facing=west,half=upper,hinge=left,open=false": {
"model": "tfc:door_top_tfc",
"y": 180
},
"facing=north,half=upper,hinge=left,open=false": {
"model": "tfc:door_top_tfc",
"y": 270
},
"facing=east,half=upper,hinge=right,open=false": {
"model": "tfc:door_top_rh_tfc"
},
"facing=south,half=upper,hinge=right,open=false": {
"model": "tfc:door_top_rh_tfc",
"y": 90
},
"facing=west,half=upper,hinge=right,open=false": {
"model": "tfc:door_top_rh_tfc",
"y": 180
},
"facing=north,half=upper,hinge=right,open=false": {
"model": "tfc:door_top_rh_tfc",
"y": 270
},
"facing=east,half=upper,hinge=left,open=true": {
"model": "tfc:door_top_rh_tfc",
"y": 90
},
"facing=south,half=upper,hinge=left,open=true": {
"model": "tfc:door_top_rh_tfc",
"y": 180
},
"facing=west,half=upper,hinge=left,open=true": {
"model": "tfc:door_top_rh_tfc",
"y": 270
},
"facing=north,half=upper,hinge=left,open=true": {
"model": "tfc:door_top_rh_tfc"
},
"facing=east,half=upper,hinge=right,open=true": {
"model": "tfc:door_top_tfc",
"y": 270
},
"facing=south,half=upper,hinge=right,open=true": {
"model": "tfc:door_top_tfc"
},
"facing=west,half=upper,hinge=right,open=true": {
"model": "tfc:door_top_tfc",
"y": 90
},
"facing=north,half=upper,hinge=right,open=true": {
"model": "tfc:door_top_tfc",
"y": 180
}
}
} | {
"pile_set_name": "Github"
} |
package hcsshim
import (
"github.com/Microsoft/hcsshim/internal/hns"
)
// Subnet is assoicated with a network and represents a list
// of subnets available to the network
type Subnet = hns.Subnet
// MacPool is assoicated with a network and represents a list
// of macaddresses available to the network
type MacPool = hns.MacPool
// HNSNetwork represents a network in HNS
type HNSNetwork = hns.HNSNetwork
// HNSNetworkRequest makes a call into HNS to update/query a single network
func HNSNetworkRequest(method, path, request string) (*HNSNetwork, error) {
return hns.HNSNetworkRequest(method, path, request)
}
// HNSListNetworkRequest makes a HNS call to query the list of available networks
func HNSListNetworkRequest(method, path, request string) ([]HNSNetwork, error) {
return hns.HNSListNetworkRequest(method, path, request)
}
// GetHNSNetworkByID
func GetHNSNetworkByID(networkID string) (*HNSNetwork, error) {
return hns.GetHNSNetworkByID(networkID)
}
// GetHNSNetworkName filtered by Name
func GetHNSNetworkByName(networkName string) (*HNSNetwork, error) {
return hns.GetHNSNetworkByName(networkName)
}
| {
"pile_set_name": "Github"
} |
package pflag
import (
"bytes"
"fmt"
"strconv"
"strings"
)
// -- stringToInt Value
type stringToIntValue struct {
value *map[string]int
changed bool
}
func newStringToIntValue(val map[string]int, p *map[string]int) *stringToIntValue {
ssv := new(stringToIntValue)
ssv.value = p
*ssv.value = val
return ssv
}
// Format: a=1,b=2
func (s *stringToIntValue) Set(val string) error {
ss := strings.Split(val, ",")
out := make(map[string]int, len(ss))
for _, pair := range ss {
kv := strings.SplitN(pair, "=", 2)
if len(kv) != 2 {
return fmt.Errorf("%s must be formatted as key=value", pair)
}
var err error
out[kv[0]], err = strconv.Atoi(kv[1])
if err != nil {
return err
}
}
if !s.changed {
*s.value = out
} else {
for k, v := range out {
(*s.value)[k] = v
}
}
s.changed = true
return nil
}
func (s *stringToIntValue) Type() string {
return "stringToInt"
}
func (s *stringToIntValue) String() string {
var buf bytes.Buffer
i := 0
for k, v := range *s.value {
if i > 0 {
buf.WriteRune(',')
}
buf.WriteString(k)
buf.WriteRune('=')
buf.WriteString(strconv.Itoa(v))
i++
}
return "[" + buf.String() + "]"
}
func stringToIntConv(val string) (interface{}, error) {
val = strings.Trim(val, "[]")
// An empty string would cause an empty map
if len(val) == 0 {
return map[string]int{}, nil
}
ss := strings.Split(val, ",")
out := make(map[string]int, len(ss))
for _, pair := range ss {
kv := strings.SplitN(pair, "=", 2)
if len(kv) != 2 {
return nil, fmt.Errorf("%s must be formatted as key=value", pair)
}
var err error
out[kv[0]], err = strconv.Atoi(kv[1])
if err != nil {
return nil, err
}
}
return out, nil
}
// GetStringToInt return the map[string]int value of a flag with the given name
func (f *FlagSet) GetStringToInt(name string) (map[string]int, error) {
val, err := f.getFlagType(name, "stringToInt", stringToIntConv)
if err != nil {
return map[string]int{}, err
}
return val.(map[string]int), nil
}
// StringToIntVar defines a string flag with specified name, default value, and usage string.
// The argument p points to a map[string]int variable in which to store the values of the multiple flags.
// The value of each argument will not try to be separated by comma
func (f *FlagSet) StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) {
f.VarP(newStringToIntValue(value, p), name, "", usage)
}
// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash.
func (f *FlagSet) StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) {
f.VarP(newStringToIntValue(value, p), name, shorthand, usage)
}
// StringToIntVar defines a string flag with specified name, default value, and usage string.
// The argument p points to a map[string]int variable in which to store the value of the flag.
// The value of each argument will not try to be separated by comma
func StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) {
CommandLine.VarP(newStringToIntValue(value, p), name, "", usage)
}
// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash.
func StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) {
CommandLine.VarP(newStringToIntValue(value, p), name, shorthand, usage)
}
// StringToInt defines a string flag with specified name, default value, and usage string.
// The return value is the address of a map[string]int variable that stores the value of the flag.
// The value of each argument will not try to be separated by comma
func (f *FlagSet) StringToInt(name string, value map[string]int, usage string) *map[string]int {
p := map[string]int{}
f.StringToIntVarP(&p, name, "", value, usage)
return &p
}
// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash.
func (f *FlagSet) StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int {
p := map[string]int{}
f.StringToIntVarP(&p, name, shorthand, value, usage)
return &p
}
// StringToInt defines a string flag with specified name, default value, and usage string.
// The return value is the address of a map[string]int variable that stores the value of the flag.
// The value of each argument will not try to be separated by comma
func StringToInt(name string, value map[string]int, usage string) *map[string]int {
return CommandLine.StringToIntP(name, "", value, usage)
}
// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash.
func StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int {
return CommandLine.StringToIntP(name, shorthand, value, usage)
}
| {
"pile_set_name": "Github"
} |
package com.orbitz.consul.config;
import com.orbitz.consul.cache.CacheDescriptor;
import com.orbitz.consul.cache.ConsulCache;
import com.orbitz.consul.model.ConsulResponse;
import com.orbitz.consul.monitoring.ClientEventHandler;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import junitparams.naming.TestCaseName;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import java.math.BigInteger;
import java.time.Duration;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
@RunWith(JUnitParamsRunner.class)
public class CacheConfigTest {
@Test
public void testDefaults() {
CacheConfig config = CacheConfig.builder().build();
assertEquals(CacheConfig.DEFAULT_BACKOFF_DELAY, config.getMinimumBackOffDelay());
assertEquals(CacheConfig.DEFAULT_BACKOFF_DELAY, config.getMaximumBackOffDelay());
assertEquals(CacheConfig.DEFAULT_WATCH_DURATION, config.getWatchDuration());
assertEquals(CacheConfig.DEFAULT_MIN_DELAY_BETWEEN_REQUESTS, config.getMinimumDurationBetweenRequests());
assertEquals(CacheConfig.DEFAULT_MIN_DELAY_ON_EMPTY_RESULT, config.getMinimumDurationDelayOnEmptyResult());
assertEquals(CacheConfig.DEFAULT_TIMEOUT_AUTO_ADJUSTMENT_ENABLED, config.isTimeoutAutoAdjustmentEnabled());
assertEquals(CacheConfig.DEFAULT_TIMEOUT_AUTO_ADJUSTMENT_MARGIN, config.getTimeoutAutoAdjustmentMargin());
AtomicBoolean loggedAsWarn = new AtomicBoolean(false);
Logger logger = mock(Logger.class);
doAnswer(vars -> {
loggedAsWarn.set(true);
return null;
}).when(logger).error(anyString(), any(Throwable.class));
config.getRefreshErrorLoggingConsumer().accept(logger, null, null);
assertTrue("Should have logged as warning", loggedAsWarn.get());
}
@Test
@Parameters(method = "getDurationSamples")
@TestCaseName("Delay: {0}")
public void testOverrideBackOffDelay(Duration backOffDelay) {
CacheConfig config = CacheConfig.builder().withBackOffDelay(backOffDelay).build();
assertEquals(backOffDelay, config.getMinimumBackOffDelay());
assertEquals(backOffDelay, config.getMaximumBackOffDelay());
}
@Test
@Parameters(method = "getDurationSamples")
@TestCaseName("Delay: {0}")
public void testOverrideMinDelayBetweenRequests(Duration delayBetweenRequests) {
CacheConfig config = CacheConfig.builder().withMinDelayBetweenRequests(delayBetweenRequests).build();
assertEquals(delayBetweenRequests, config.getMinimumDurationBetweenRequests());
}
@Test
@Parameters(method = "getDurationSamples")
@TestCaseName("Delay: {0}")
public void testOverrideMinDelayOnEmptyResult(Duration delayBetweenRequests) {
CacheConfig config = CacheConfig.builder().withMinDelayOnEmptyResult(delayBetweenRequests).build();
assertEquals(delayBetweenRequests, config.getMinimumDurationDelayOnEmptyResult());
}
@Test
@Parameters({"true", "false"})
@TestCaseName("Enabled: {0}")
public void testOverrideTimeoutAutoAdjustmentEnabled(boolean enabled) {
CacheConfig config = CacheConfig.builder().withTimeoutAutoAdjustmentEnabled(enabled).build();
assertEquals(enabled, config.isTimeoutAutoAdjustmentEnabled());
}
@Test
@Parameters(method = "getDurationSamples")
@TestCaseName("Margin: {0}")
public void testOverrideTimeoutAutoAdjustmentMargin(Duration margin) {
CacheConfig config = CacheConfig.builder().withTimeoutAutoAdjustmentMargin(margin).build();
assertEquals(margin, config.getTimeoutAutoAdjustmentMargin());
}
@Test
@Parameters({"true", "false"})
@TestCaseName("LogLevel as Warning: {0}")
public void testOverrideRefreshErrorLogConsumer(boolean logLevelWarning) throws InterruptedException {
CacheConfig config = logLevelWarning
? CacheConfig.builder().withRefreshErrorLoggedAsWarning().build()
: CacheConfig.builder().withRefreshErrorLoggedAsError().build();
AtomicBoolean logged = new AtomicBoolean(false);
AtomicBoolean loggedAsWarn = new AtomicBoolean(false);
Logger logger = mock(Logger.class);
doAnswer(vars -> {
loggedAsWarn.set(true);
logged.set(true);
return null;
}).when(logger).warn(anyString(), any(Throwable.class));
doAnswer(vars -> {
loggedAsWarn.set(false);
logged.set(true);
return null;
}).when(logger).error(anyString(), any(Throwable.class));
config.getRefreshErrorLoggingConsumer().accept(logger, null, null);
assertTrue(logged.get());
assertEquals(logLevelWarning, loggedAsWarn.get());
}
@Test
public void testOverrideRefreshErrorLogCustom() {
AtomicBoolean loggedAsDebug = new AtomicBoolean(false);
Logger logger = mock(Logger.class);
doAnswer(vars -> {
loggedAsDebug.set(true);
return null;
}).when(logger).debug(anyString(), any(Throwable.class));
CacheConfig config = CacheConfig.builder().withRefreshErrorLoggedAs(Logger::debug).build();
config.getRefreshErrorLoggingConsumer().accept(logger, null, null);
assertTrue(loggedAsDebug.get());
}
public Object getDurationSamples() {
return new Object[]{
Duration.ZERO,
Duration.ofSeconds(2),
Duration.ofMinutes(10)
};
}
@Test
@Parameters(method = "getMinMaxDurationSamples")
@TestCaseName("min Delay: {0}, max Delay: {1}")
public void testOverrideRandomBackOffDelay(Duration minDelay, Duration maxDelay, boolean isValid) {
try {
CacheConfig config = CacheConfig.builder().withBackOffDelay(minDelay, maxDelay).build();
if (!isValid) {
Assert.fail(String.format("Should not be able to build cache with min retry delay %d ms and max retry delay %d ms",
minDelay.toMillis(), maxDelay.toMillis()));
}
assertEquals(minDelay, config.getMinimumBackOffDelay());
assertEquals(maxDelay, config.getMaximumBackOffDelay());
} catch (NullPointerException | IllegalArgumentException e) {
if (isValid) {
throw new AssertionError(String.format("Should be able to build cache with min retry delay %d ms and max retry delay %d ms",
minDelay.toMillis(), maxDelay.toMillis()), e);
}
}
}
public Object getMinMaxDurationSamples() {
return new Object[]{
new Object[] { Duration.ZERO, Duration.ZERO, true },
new Object[] { Duration.ofSeconds(2), Duration.ofSeconds(2), true },
new Object[] { Duration.ZERO, Duration.ofSeconds(2), true },
new Object[] { Duration.ofSeconds(2), Duration.ZERO, false },
new Object[] { Duration.ofSeconds(1), Duration.ofSeconds(2), true },
new Object[] { Duration.ofSeconds(2), Duration.ofSeconds(1), false },
new Object[] { Duration.ofSeconds(-1), Duration.ZERO, false },
new Object[] { Duration.ZERO, Duration.ofSeconds(-1), false },
new Object[] { Duration.ofSeconds(-1), Duration.ofSeconds(-1), false },
};
}
@Test
public void testMinDelayOnEmptyResultWithNoResults() throws InterruptedException {
TestCacheSupplier res = new TestCacheSupplier(0, Duration.ofMillis(100));
TestCache cache = TestCache.createCache(CacheConfig.builder()
.withMinDelayOnEmptyResult(Duration.ofMillis(100))
.build(), res);
cache.start();
Thread.sleep(300);
assertTrue(res.run > 0);
cache.stop();
}
@Test
public void testMinDelayOnEmptyResultWithResults() throws InterruptedException {
TestCacheSupplier res = new TestCacheSupplier(1, Duration.ofMillis(50));
TestCache cache = TestCache.createCache(CacheConfig.builder()
.withMinDelayOnEmptyResult(Duration.ofMillis(100))
.withMinDelayBetweenRequests(Duration.ofMillis(50)) // do not blow ourselves up
.build(), res);
cache.start();
Thread.sleep(300);
assertTrue(res.run > 0);
cache.stop();
}
static class TestCache extends ConsulCache<Integer, Integer> {
private TestCache(Function<Integer, Integer> keyConversion, CallbackConsumer<Integer> callbackConsumer, CacheConfig cacheConfig, ClientEventHandler eventHandler, CacheDescriptor cacheDescriptor) {
super(keyConversion, callbackConsumer, cacheConfig, eventHandler, cacheDescriptor);
}
static TestCache createCache(CacheConfig config, Supplier<List<Integer>> res) {
ClientEventHandler ev = mock(ClientEventHandler.class);
CacheDescriptor cacheDescriptor = new CacheDescriptor("test", "test");
final CallbackConsumer<Integer> callbackConsumer = (index, callback) -> {
callback.onComplete(new ConsulResponse<>(res.get(), 0, true, BigInteger.ZERO, null, null));
};
return new TestCache((i) -> i,
callbackConsumer,
config,
ev,
cacheDescriptor);
}
}
static class TestCacheSupplier implements Supplier<List<Integer>> {
int run = 0;
int resultCount;
private Duration expectedInterval;
private LocalTime lastCall;
TestCacheSupplier(int resultCount, Duration expectedInterval) {
this.resultCount = resultCount;
this.expectedInterval = expectedInterval;
}
@Override
public List<Integer> get() {
if (lastCall != null) {
long between = Duration.between(lastCall, LocalTime.now()).toMillis();
assertTrue(String.format("expected duration between calls of %d, got %s", expectedInterval.toMillis(), between),
Math.abs(between - expectedInterval.toMillis()) < 20);
}
lastCall = LocalTime.now();
run++;
List<Integer> response = new ArrayList<>();
for (int i = 0; i < resultCount; i++) {
response.add(1);
}
return response;
}
}
}
| {
"pile_set_name": "Github"
} |
#include <iostream>
#include <iomanip>
#include <rak/timer.h>
#include "../src/torrent/object_stream.h"
#ifdef NEW_OBJECT
#include "object.h"
typedef torrent::Object return_type;
//#define OBJECTREF_MOVE(x) torrent::ObjectRef::move(x)
#define OBJECTREF_MOVE(x) x
#else
#include "../src/torrent/object.h"
typedef torrent::Object return_type;
#define OBJECTREF_MOVE(x) x
#endif
#define TIME_WRAPPER(name, body) \
rak::timer \
time_##name(unsigned int n) { \
rak::timer started = rak::timer::current(); \
\
for (unsigned int i = 0; i < n; i++) { \
body; \
} \
\
return rak::timer::current() - started; \
}
typedef std::list<std::string> std_list_type;
void f() {}
torrent::Object func_create_string_20() { return torrent::Object("12345678901234567890"); }
std::string func_create_std_string_20() { return "12345678901234567890"; }
return_type
func_create_string_list_20() {
torrent::Object tmp(torrent::Object::TYPE_LIST);
torrent::Object::list_type& listRef = tmp.as_list();
for (int i = 0; i < 10; i++) listRef.push_back(torrent::Object("12345678901234567890"));
return OBJECTREF_MOVE(tmp);
}
std_list_type
func_create_std_string_list_20() {
std_list_type tmp(torrent::Object::TYPE_LIST);
for (int i = 0; i < 10; i++) tmp.push_back("12345678901234567890");
return tmp;
}
torrent::Object stringList20(func_create_string_list_20());
// return_type
// func_copy_string_list_20_f() {
// torrent::Object tmp(stringList20);
// return OBJECTREF_MOVE(tmp);
// }
torrent::Object tmp1;
return_type
func_copy_string_list_20() {
tmp1 = stringList20;
return OBJECTREF_MOVE(tmp1);
}
TIME_WRAPPER(dummy, f(); )
TIME_WRAPPER(string, torrent::Object s("12345678901234567890"); )
TIME_WRAPPER(std_string, std::string s("12345678901234567890"); )
TIME_WRAPPER(return_string, torrent::Object s = func_create_string_20(); )
TIME_WRAPPER(return_std_string, std::string s = func_create_std_string_20(); )
TIME_WRAPPER(return_string_list, torrent::Object s(func_create_string_list_20()); )
TIME_WRAPPER(return_std_string_list, std_list_type s(func_create_std_string_list_20()); )
TIME_WRAPPER(copy_string_list, torrent::Object s(func_copy_string_list_20()); )
int
main(int argc, char** argv) {
// std::cout << "sizeof(torrent::Object): " << sizeof(torrent::Object) << std::endl;
// std::cout << "sizeof(torrent::Object::value_type): " << sizeof(torrent::Object::value_type) << std::endl;
// std::cout << "sizeof(torrent::Object::string_type): " << sizeof(torrent::Object::string_type) << std::endl;
// std::cout << "sizeof(torrent::Object::map_type): " << sizeof(torrent::Object::map_type) << std::endl;
// std::cout << "sizeof(torrent::Object::list_type): " << sizeof(torrent::Object::list_type) << std::endl;
std::cout.setf(std::ios::right, std::ios::adjustfield);
std::cout << "time_dummy: " << std::setw(8) << time_dummy(100000).usec() << std::endl;
std::cout << "time_string: " << std::setw(8) << time_string(100000).usec() << std::endl;
std::cout << "time_std_string: " << std::setw(8) << time_std_string(100000).usec() << std::endl;
std::cout << "time_return_string: " << std::setw(8) << time_return_string(100000).usec() << std::endl;
std::cout << "time_return_std_string: " << std::setw(8) << time_return_std_string(100000).usec() << std::endl;
std::cout << std::endl;
std::cout << "time_return_string_list: " << std::setw(8) << time_return_string_list(100000).usec() << std::endl;
std::cout << "time_return_std_string_list: " << std::setw(8) << time_return_std_string_list(100000).usec() << std::endl;
std::cout << "time_copy_string_list: " << std::setw(8) << time_copy_string_list(100000).usec() << std::endl;
return 0;
}
| {
"pile_set_name": "Github"
} |
// @flow
import React, { Component } from 'react';
import _ from 'lodash';
const numeral = require('numeral');
type Props = {
stakes: {},
max: number,
colors: any,
height: any,
active: any
};
class StakeChart extends Component<Props> {
constructor(props) {
super(props);
const colors = [
'241,158,41',
'192,71,222',
'55,188,150',
'68,91,200',
'253,112,62',
'108,182,42',
'207,44,64'
];
this.state = {
colors: props.colors || colors
};
}
render() {
const { colors } = this.state;
const { stakes, max, active } = this.props;
let { height } = this.props;
let prev = 0;
const parts = _.map(stakes, s => {
const ret = {
o: prev,
w: s / max
};
prev += s / max;
return ret;
});
const activePresent =
active >= 0 && parts.length !== 0 && active < parts.length;
height = height || 8;
const content = (
<svg width="100%" height={height}>
<g>
<rect width="100%" height={height} style={{ fill: 'lightgrey' }} />
<g transform="translate(0,0)">
{_.map(parts, (p, i) => (
<rect
key={i}
x={numeral(p.o).format('0%')}
y="0%"
width={numeral(p.w).format('0%')}
height="100%"
style={{
fill: activePresent
? makeColor(colors[i % colors.length], i === active)
: makeColor(colors[i % colors.length])
}}
/>
))}
</g>
</g>
</svg>
);
return content;
}
}
function makeColor(c, active = true) {
const aplha = active ? 1 : 0.2;
return `rgba(${c},${aplha})`;
}
export default StakeChart;
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">ScrollableItemList</string>
<string name="hello_world">Hello world!</string>
<string name="action_settings">Settings</string>
</resources>
| {
"pile_set_name": "Github"
} |
/*
* Globalize Culture moh-CA
*
* http://github.com/jquery/globalize
*
* Copyright Software Freedom Conservancy, Inc.
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* This file was generated by the Globalize Culture Generator
* Translation: bugs found in this file need to be fixed in the generator
*/
(function( window, undefined ) {
var Globalize;
if ( typeof require !== "undefined"
&& typeof exports !== "undefined"
&& typeof module !== "undefined" ) {
// Assume CommonJS
Globalize = require( "globalize" );
} else {
// Global variable
Globalize = window.Globalize;
}
Globalize.addCultureInfo( "moh-CA", "default", {
name: "moh-CA",
englishName: "Mohawk (Mohawk)",
nativeName: "Kanien'kéha",
language: "moh",
numberFormat: {
groupSizes: [3,0],
percent: {
groupSizes: [3,0]
}
},
calendars: {
standard: {
days: {
names: ["Awentatokentì:ke","Awentataón'ke","Ratironhia'kehronòn:ke","Soséhne","Okaristiiáhne","Ronwaia'tanentaktonhne","Entákta"],
namesShort: ["S","M","T","W","T","F","S"]
},
months: {
names: ["Tsothohrkó:Wa","Enniska","Enniskó:Wa","Onerahtókha","Onerahtohkó:Wa","Ohiari:Ha","Ohiarihkó:Wa","Seskéha","Seskehkó:Wa","Kenténha","Kentenhkó:Wa","Tsothóhrha",""]
}
}
}
});
}( this ));
| {
"pile_set_name": "Github"
} |
1
1
1
1
1
0
2
1
2
0
1
2
1
1
1
1
3
2
1
4
4
1
0
2
1
0
0
2
1
1
0
1
1
1
2
1
1
1
1
1
0
1
1
1
1
0
1
1
1
1
1
1
2
1
1
2
1
0
0
1
0
1
1
1
2
0
0
3
0
0
2
2
1
1
0
0
1
2
3
0
0
0
0
2
1
2
2
0
0
0
0
1
1
1
0
0
0
1
0
0
1
1
1
2
2
1
2
1
1
0
0
4
2
1
1
1
0
0
1
1
0
1
5
2
2
0
1
2
2
1
0
1
7
2
2
0
1
3
2
1
2
1
1
1
0
0
1
1
0
2
1
1
3
0
1
3
1
2
0
1
0
1
1
0
1
1
2
2
0
1
1
0
1
1
0
1
0
5
5
9
8
4
1
4
9
9
10
2
0
1
1
0
2
1
1
3
0
4
1
1
3
1
0
2
2
1
1
2
0
2
0
0
1
0
1
0
0
0
0
1
0
1
0
1
2
0
0
0
1
0
2
1
1
0
0
2
0
1
1
0
0
1
2
0
1
0
1
0
2
0
0
0
0
1
0
1
2
0
1
0
0
0
0
2
1
1
2
0
0
0
1
1
0
1
0
1
0
3
2
0
0
0
0
1
1
1
1
1
| {
"pile_set_name": "Github"
} |
--- a/src/Listener.cpp
+++ b/src/Listener.cpp
@@ -723,6 +723,7 @@ int Listener::L2_setup (void) {
// Now optimize packet flow up the raw socket
// Establish the flow BPF to forward up only "connected" packets to this raw socket
+#ifdef HAVE_IPV6
if (l->sa_family == AF_INET6) {
#ifdef HAVE_IPV6
struct in6_addr *v6peer = SockAddr_get_in6_addr(&server->peer);
@@ -740,6 +741,9 @@ int Listener::L2_setup (void) {
return -1;
#endif /* HAVE_IPV6 */
} else {
+#else
+ {
+#endif
rc = SockAddr_v4_Connect_BPF(server->mSock, ((struct sockaddr_in *)(l))->sin_addr.s_addr, ((struct sockaddr_in *)(p))->sin_addr.s_addr, ((struct sockaddr_in *)(l))->sin_port, ((struct sockaddr_in *)(p))->sin_port);
WARN_errno( rc == SOCKET_ERROR, "l2 connect ip bpf");
}
| {
"pile_set_name": "Github"
} |
---
http_interactions:
- request:
method: get
uri: http://ps.pndsn.com/v2/auth/grant/sub-key/sub-a-mock-key?channel-group=demo&m=0&pnsdk=PubNub-Ruby/4.1.0beta1&r=1&signature=SZnlQvj8WjD36PcVX8zfqVM0zyBQ-DwYgZ_Hk-HXerE=×tamp=1465314461&ttl=1440&uuid=ruby-test-uuid-client-one&w=0
body:
encoding: UTF-8
string: ''
headers:
User-Agent:
- HTTPClient/1.0 (2.8.0, ruby 2.3.0 (2015-12-25))
Accept:
- "*/*"
Date:
- Tue, 07 Jun 2016 15:47:41 GMT
response:
status:
code: 200
message: OK
headers:
Date:
- Tue, 07 Jun 2016 15:47:41 GMT
Content-Type:
- text/javascript; charset=UTF-8
Content-Length:
- '215'
Connection:
- keep-alive
Access-Control-Allow-Origin:
- "*"
Access-Control-Allow-Methods:
- GET
Access-Control-Allow-Headers:
- Origin, X-Requested-With, Content-Type, Accept
Cache-Control:
- no-cache, no-store, must-revalidate
body:
encoding: UTF-8
string: '{"message":"Success","payload":{"level":"channel-group","subscribe_key":"sub-a-mock-key","ttl":1440,"channel-groups":{"demo":{"r":1,"w":0,"m":0}}},"service":"Access
Manager","status":200}'
http_version:
recorded_at: Tue, 07 Jun 2016 15:47:41 GMT
recorded_with: VCR 3.0.1
| {
"pile_set_name": "Github"
} |
1
00:00:32,449 --> 00:00:42,442
♪♪~
2
00:00:42,442 --> 00:00:51,442
♪♪~
3
00:01:11,938 --> 00:01:14,438
(深沢 保) ただ~いま。
4
00:01:17,944 --> 00:01:20,931
太郎。
5
00:01:20,931 --> 00:01:22,931
どこ行くんだ?
6
00:01:24,451 --> 00:01:26,970
こんな遅い時間に お前…。
7
00:01:26,970 --> 00:01:29,005
(深沢太郎) バイト。
8
00:01:29,005 --> 00:01:31,057
バイトって お前 まだ17歳だろ。
9
00:01:31,057 --> 00:01:33,443
大体 子供が こんな遅い時間に
働いていいわけ…。
(太郎) チッ。
10
00:01:33,443 --> 00:01:34,943
ほっとけよ。
11
00:01:39,449 --> 00:01:41,449
フゥ~。
12
00:01:43,954 --> 00:01:47,457
誰か~! 助けてくれ!
13
00:01:47,457 --> 00:01:49,957
離せ!
おりゃ~!
14
00:01:52,946 --> 00:01:56,933
おい 金…。
あぁ…。
15
00:01:56,933 --> 00:01:59,936
お~! よし これで
ダブル 買えるぜ。
16
00:01:59,936 --> 00:02:02,936
お~!
よし!
17
00:02:04,441 --> 00:02:07,978
昨晩 若者がサラリーマンを襲う
通り魔事件が発生した。
18
00:02:07,978 --> 00:02:10,530
(大野) えっ? こないだ
検挙したばっかじゃないすか。
19
00:02:10,530 --> 00:02:12,449
どうやら 別グループのようだ。
20
00:02:12,449 --> 00:02:15,452
合成麻薬ダブルを買う金 欲しさの
かつあげや 暴行事件が→
21
00:02:15,452 --> 00:02:16,953
相次いでるとのことだ。
22
00:02:16,953 --> 00:02:19,439
ちなみに この合成麻薬ダブルの
流通経路は→
23
00:02:19,439 --> 00:02:21,958
いまだ はっきりとは
つかめていない。
24
00:02:21,958 --> 00:02:23,944
この事件に関しては→
25
00:02:23,944 --> 00:02:25,979
本部の薬物捜査班が
元立ちになってるから。
26
00:02:25,979 --> 00:02:28,532
俺 桜田門の全体会議
行って来るわ。
27
00:02:28,532 --> 00:02:29,950
あれ? ラッセル君は?
28
00:02:29,950 --> 00:02:32,450
(丸岡高子) あっ 今日
ミルクの特売日だから。
29
00:02:34,437 --> 00:02:37,440
あっ 長嶋さんだよね? 刑事課の。
30
00:02:37,440 --> 00:02:39,459
(長嶋 葵) えっ? はい。
31
00:02:39,459 --> 00:02:41,945
深沢係長に伝言 お願いできる?
32
00:02:41,945 --> 00:02:43,480
はい。
33
00:02:43,480 --> 00:02:46,533
(白戸) かわいそうだろ 葵ちゃん
あんまり こき使うな。
34
00:02:46,533 --> 00:02:48,451
う~ん?
35
00:02:48,451 --> 00:02:52,956
何かさ あんた最近
やけにラッセルに優しくないか?
36
00:02:52,956 --> 00:02:56,456
まぁ チュウした仲だし。
37
00:02:57,961 --> 00:02:59,462
(大野) マジですか…。
38
00:02:59,462 --> 00:03:02,966
最近 やたら彼氏面
してると思ったら。
39
00:03:02,966 --> 00:03:05,018
彼氏面じゃないな。
40
00:03:05,018 --> 00:03:06,536
もはや 彼氏だ!
41
00:03:06,536 --> 00:03:11,458
うわ~ 佐々木と別れたばっかりで
ラッセルも よくやるねぇ。
42
00:03:11,458 --> 00:03:13,443
おっ 噂をすればだよ。
43
00:03:13,443 --> 00:03:15,445
戻りました。
44
00:03:15,445 --> 00:03:18,965
お~!
45
00:03:18,965 --> 00:03:20,951
めでてぇな バカ野郎 えっ?
46
00:03:20,951 --> 00:03:23,470
刑事課から
いよいよ カップル誕生だ なっ?
47
00:03:23,470 --> 00:03:25,472
ラッセル 遅い 3分遅刻。
48
00:03:25,472 --> 00:03:28,541
あ… あの今 ちょっと…。
でも まぁ 面白いから許してやる。
49
00:03:28,541 --> 00:03:30,541
は?
(白戸) 葵ちゃん。
50
00:03:32,462 --> 00:03:34,948
(大野) オホホホホ!
51
00:03:34,948 --> 00:03:36,967
いやいや… じゃあね
めでたいついでに→
52
00:03:36,967 --> 00:03:39,452
皆さんに朗報です。
あ… ちょっと 係長。
53
00:03:39,452 --> 00:03:42,455
(深沢) この事件
このヤマを解決した人間は→
54
00:03:42,455 --> 00:03:43,957
ボーナス2倍!
(大野) おっ!
55
00:03:43,957 --> 00:03:46,476
さらに 昇進も!
(大野) お~!
56
00:03:46,476 --> 00:03:48,511
…という 噂があります。
57
00:03:48,511 --> 00:03:50,947
怖いよ 顔が怖い。
噂だよ。
58
00:03:50,947 --> 00:03:53,466
それぐらい
大きいヤマだってことだからね。
59
00:03:53,466 --> 00:03:54,968
よろしく頼むよ ねっ。
60
00:03:54,968 --> 00:03:56,953
ちょっと待ってください 係長。
(深沢) えっ?
61
00:03:56,953 --> 00:03:59,940
あの 息子さんが…。
62
00:03:59,940 --> 00:04:01,441
ん? 何だ?
63
00:04:01,441 --> 00:04:03,960
どうやら 隣の署の生活安全課で→
64
00:04:03,960 --> 00:04:06,479
息子さんが
補導されてるそうなんです。
65
00:04:06,479 --> 00:04:08,531
太郎が…。
66
00:04:08,531 --> 00:04:10,450
(原) 係長に息子さん いたんだ。
67
00:04:10,450 --> 00:04:12,936
また 太郎って
今どき 随分 古風だな。
68
00:04:12,936 --> 00:04:16,439
身柄を引き取りに
来てほしいそうです。
69
00:04:16,439 --> 00:04:18,458
そうか すぐ行く。
70
00:04:18,458 --> 00:04:21,461
(赤石)
でも 今から桜田門で会議では?
71
00:04:21,461 --> 00:04:22,961
そっか…。
72
00:04:24,965 --> 00:04:28,034
あの… すまないが
この中で 私の代わりに→
73
00:04:28,034 --> 00:04:30,534
生活安全課に行ってくれる者は
いないか?
74
00:04:32,939 --> 00:04:34,958
おい。
75
00:04:34,958 --> 00:04:37,444
そんなボーナス欲しいか? えっ?
76
00:04:37,444 --> 00:04:39,462
日頃 世話になってる
上司のために→
77
00:04:39,462 --> 00:04:41,431
ひと肌 脱いでやろうって奴は
いないのか?
78
00:04:41,431 --> 00:04:43,950
おい マルコー お前 行けよ。
え~ 何で私が?
79
00:04:43,950 --> 00:04:46,453
(白戸) こういう時くらい
せめてもの恩返ししろよ。
80
00:04:46,453 --> 00:04:49,506
はぁ?
そうだ そうだ→
81
00:04:49,506 --> 00:04:52,442
お前が行ってくれるんならな
橋蔵くんの親権争いに有利な→
82
00:04:52,442 --> 00:04:54,442
陳述書 俺が書いてやる。
83
00:04:56,446 --> 00:04:58,946
頼むよ マルコー。
84
00:05:00,450 --> 00:05:01,950
あ~。
85
00:05:02,952 --> 00:05:05,438
係長の息子って
どんなコですかね?
86
00:05:05,438 --> 00:05:08,491
係長に似て
チビでメガネじゃないの?
87
00:05:08,491 --> 00:05:12,491
失礼ですよ フフフ。
笑ってるし…。
88
00:05:16,449 --> 00:05:18,935
お~ 全然違う。
89
00:05:18,935 --> 00:05:20,437
ホントだ。
90
00:05:20,437 --> 00:05:22,455
お疲れさまです じゃあ→
91
00:05:22,455 --> 00:05:24,455
サインをこちらに お願いします。
はい。
92
00:05:28,445 --> 00:05:30,964
どうも すいません。
93
00:05:30,964 --> 00:05:32,499
はぁ…。
94
00:05:32,499 --> 00:05:34,999
外に車 置いてあるから 行くよ。
95
00:05:36,936 --> 00:05:39,456
(太郎) あんた 誰?
96
00:05:39,456 --> 00:05:43,443
あぁ… 私は
あんたのパパの部下の丸岡高子。
97
00:05:43,443 --> 00:05:45,945
この子は 息子の橋蔵
こっちが…→
98
00:05:45,945 --> 00:05:47,947
橋蔵の使用人のラッセル。
99
00:05:47,947 --> 00:05:49,966
使用人って…。
100
00:05:49,966 --> 00:05:54,003
あんな奴の部下って
恥ずかしくないの?
101
00:05:54,003 --> 00:05:56,003
あなたのお父さんでしょ?
102
00:05:57,457 --> 00:05:59,943
あんな奴 親だと思ってねえし。
103
00:05:59,943 --> 00:06:01,961
ふ~ん。
104
00:06:01,961 --> 00:06:05,461
(太郎) はぁ…
子供だって 親 選びてぇよ。
105
00:06:07,450 --> 00:06:10,453
(太郎) もう帰っていいんだよね?
うん いいけど…。
106
00:06:10,453 --> 00:06:13,006
その前に お腹すかない?
107
00:06:13,006 --> 00:06:15,506
おいしいもん 食べに行こうよ。
108
00:08:21,935 --> 00:08:24,420
風俗店のチラシを
配布している際→
109
00:08:24,420 --> 00:08:26,906
マンションに不法侵入した疑い…。
110
00:08:26,906 --> 00:08:29,909
小っちゃ!
…んだよ バカにしてんのかよ。
111
00:08:29,909 --> 00:08:33,980
で このバイト代で
ダブル 買うつもりだったの?
112
00:08:33,980 --> 00:08:35,515
ダブル?
113
00:08:35,515 --> 00:08:37,400
ダブル 知らないの?
114
00:08:37,400 --> 00:08:40,920
知ってるよ! エックスの次の
アルファベットだろ?
115
00:08:40,920 --> 00:08:43,406
エックスの前です。
116
00:08:43,406 --> 00:08:45,425
で バイトは
どれぐらい やってるの?
117
00:08:45,425 --> 00:08:47,427
う~ん 1か月ぐらい前。
118
00:08:47,427 --> 00:08:49,412
1か月ぐらい前… うわっ!
119
00:08:49,412 --> 00:08:51,414
白戸とラッセルが
チュウした頃だ!
120
00:08:51,414 --> 00:08:53,466
チュウって! ちょっと
勘違いしないでください。
121
00:08:53,466 --> 00:08:56,903
あれは 酔っぱらって 少しだけ
記憶が飛んでただけなんです。
122
00:08:56,903 --> 00:08:59,422
つうか 白戸とチュウって
よくできるよなぁ。
123
00:08:59,422 --> 00:09:02,425
想像しただけでも
気持が悪いわ うぇ~。
124
00:09:02,425 --> 00:09:05,428
うぅ… 私も想像したら
気持悪くなって来た。
125
00:09:05,428 --> 00:09:07,928
ちょっと トイレ行って来ます。
126
00:09:14,487 --> 00:09:16,406
そういえば…。
127
00:09:16,406 --> 00:09:26,916
♪♪~
128
00:09:26,916 --> 00:09:28,918
ウソ…。
129
00:09:28,918 --> 00:09:31,418
今度は ホントにできちゃった。
130
00:09:32,972 --> 00:09:34,507
ん?
131
00:09:34,507 --> 00:09:36,507
父親は?
132
00:09:39,412 --> 00:09:42,412
え~~!
133
00:09:43,916 --> 00:09:47,904
で おとうさん 嫌いなんだ?
134
00:09:47,904 --> 00:09:50,907
ダセェし メガネだし。
135
00:09:50,907 --> 00:09:54,444
チビだし うちの課でも
バカにされてんもんな。
136
00:09:54,444 --> 00:09:56,496
知ってる。
137
00:09:56,496 --> 00:09:58,014
知ってんの?
138
00:09:58,014 --> 00:09:59,432
俺 見たことあるし。
139
00:09:59,432 --> 00:10:01,918
(大野)
((係長 テメェ バカ野郎…))
140
00:10:01,918 --> 00:10:04,921
((たまには びしっと言えよ
マルコーに))
141
00:10:04,921 --> 00:10:07,924
(白戸)((しっかりしろ このチビ))
(大野)((おめぇは チビだ))
142
00:10:07,924 --> 00:10:10,476
(大野)((体ごと ぶつかれば
いいわけよ マルコーに もう…))
143
00:10:10,476 --> 00:10:12,512
((痛てて…))
144
00:10:12,512 --> 00:10:14,914
(大野)((マルコーに
何 ビビってんだ えっ?→
145
00:10:14,914 --> 00:10:18,418
部下を怖がって どうするんだよ))
(深沢)((いやいや… 俺はね→
146
00:10:18,418 --> 00:10:21,421
検挙率とのバランスを
こう 考えてだな→
147
00:10:21,421 --> 00:10:23,439
やってるわけよ ねっ))
148
00:10:23,439 --> 00:10:24,924
ゲッ 私のせいかよ。
149
00:10:24,924 --> 00:10:26,976
ホント情けなくてさ。
150
00:10:26,976 --> 00:10:30,930
家でも 俺に対して
ビクビクしてんだぜ?
151
00:10:30,930 --> 00:10:33,416
どこでも ビクビクしやがって。
152
00:10:33,416 --> 00:10:35,416
マジ バカみてぇ。
153
00:10:36,936 --> 00:10:40,923
う~ん 太郎君のほうが
しっかりしてるかもね。
154
00:10:40,923 --> 00:10:43,926
俺は もうガキじゃねえ
バイトして金も貯めてるし。
155
00:10:43,926 --> 00:10:49,015
偉い 大人だね。
まぁ 時給1500円だしな。
156
00:10:49,015 --> 00:10:52,919
1500円 ハッ!
安い 子供のバイトじゃん。
157
00:10:52,919 --> 00:10:55,421
何だよ 子供のバイトって!
バカにしてんのかよ!
158
00:10:55,421 --> 00:10:58,925
時給15万の大人のバイト
してみない?
159
00:10:58,925 --> 00:11:00,925
時給15万?
160
00:11:02,912 --> 00:11:04,931
普通のバイトの10倍!
161
00:11:04,931 --> 00:11:06,933
アハっ 100倍。
162
00:11:06,933 --> 00:11:09,986
えっ! 100倍! ウソだろ!?
163
00:11:09,986 --> 00:11:12,486
シングルマザー ウソつかない。
164
00:11:14,440 --> 00:11:17,427
まさか 人殺しとか?
165
00:11:17,427 --> 00:11:20,430
違う違う 優しいおにいさんから
受け取ったものを→
166
00:11:20,430 --> 00:11:23,433
そのまた優しいおにいさんに
お届けするみたいな→
167
00:11:23,433 --> 00:11:26,919
まぁ 宅配便みたいなバイトだと
思ってくれれば。
168
00:11:26,919 --> 00:11:29,472
おぉ あ~。
アハハハ。
169
00:11:29,472 --> 00:11:31,507
あれ? ラッセル どうした?
170
00:11:31,507 --> 00:11:34,410
あ… いや 大丈夫です
すいません。
171
00:11:34,410 --> 00:11:37,413
じゃあさ 佐々木に
渡してほしいもんがあんだけど。
172
00:11:37,413 --> 00:11:38,915
卓也!?
173
00:11:38,915 --> 00:11:40,933
何?
あ いや…。
174
00:11:40,933 --> 00:11:43,920
卓也にですね。
はい。
175
00:11:43,920 --> 00:11:45,420
じゃ いってきます。
176
00:11:49,442 --> 00:11:51,442
何なんだ? あいつは。
177
00:11:53,012 --> 00:11:56,512
(玲子) へぇ~ いいじゃん。
(佐々木卓也) 何かあったら…。
178
00:12:03,906 --> 00:12:05,906
葵。
(玲子) えっ?
179
00:12:08,911 --> 00:12:12,432
もしかして
あなたが長嶋 葵さん?
180
00:12:12,432 --> 00:12:13,933
はい。
181
00:12:13,933 --> 00:12:18,905
はじめまして 私『週刊文化』の
村上玲子と申します。
182
00:12:18,905 --> 00:12:21,424
『週刊文化』。
183
00:12:21,424 --> 00:12:23,910
(玲子) 今ね→
184
00:12:23,910 --> 00:12:27,430
密着取材をさせてもらってるの。
密着?
185
00:12:27,430 --> 00:12:29,916
丸岡主任について
取材を受けてたんだ。
186
00:12:29,916 --> 00:12:31,417
先輩について?
187
00:12:31,417 --> 00:12:35,471
現代の働く母親にスポットライトを当てる
企画なんだけど→
188
00:12:35,471 --> 00:12:38,925
丸岡高子さんを
ぜひ特集したいと思って。
189
00:12:38,925 --> 00:12:40,927
はぁ。
190
00:12:40,927 --> 00:12:43,930
子連れで刑事なんて
今まで いなかったものね。
191
00:12:43,930 --> 00:12:47,416
(玲子) 子育てと仕事を
両立させるって ホント尊敬しちゃう。
192
00:12:47,416 --> 00:12:49,418
そうですね。
193
00:12:49,418 --> 00:12:53,956
ぜひ あなたからも丸岡さんに
お願いしてもらえないかしら。
194
00:12:53,956 --> 00:12:55,475
えっ?
195
00:12:55,475 --> 00:12:57,910
あ えっと…。
196
00:12:57,910 --> 00:13:00,429
葵ちゃん? 何してるの?
197
00:13:00,429 --> 00:13:01,929
白戸さん。
198
00:13:06,903 --> 00:13:10,403
佐々木君は もう
葵ちゃんと別れたんだよね?
199
00:13:11,924 --> 00:13:14,477
ええ まぁ…。
200
00:13:14,477 --> 00:13:16,512
君には 新しい彼女も
できたみたいだし。
201
00:13:16,512 --> 00:13:17,914
えっ?
202
00:13:17,914 --> 00:13:20,416
いつまでも葵ちゃんに
なれなれしくしないように。
203
00:13:20,416 --> 00:13:22,418
白戸さん 何 言ってるんですか。
こういうのは 葵ちゃん…。
204
00:13:22,418 --> 00:13:24,904
いいかげんにしてください!
205
00:13:24,904 --> 00:13:27,404
これ 先輩から じゃ。
206
00:13:30,426 --> 00:13:33,926
(警視監)
では 10分休憩とします。
207
00:13:41,921 --> 00:13:45,424
[TEL](呼び出し音)
208
00:13:45,424 --> 00:13:48,427
(警視正) 深沢君 深沢君。
あぁ 参事官。
209
00:13:48,427 --> 00:13:52,415
(警視正) 君の所は問題が多いね。
申し訳ございません。
210
00:13:52,415 --> 00:13:54,433
でも 相変わらず検挙率は いい。
211
00:13:54,433 --> 00:13:58,004
それを僕は とても買ってる。
もったいない お言葉です。
212
00:13:58,004 --> 00:13:59,922
今回のは 大きいヤマだ。
213
00:13:59,922 --> 00:14:02,408
君が ひと役買ってくれると
信じているよ。
214
00:14:02,408 --> 00:14:03,910
はい。
215
00:14:03,910 --> 00:14:05,410
(警視正) 頑張ってくれたまえ。
216
00:14:06,913 --> 00:14:08,915
ありがとうございます。
217
00:14:08,915 --> 00:14:10,416
フゥ~。
218
00:14:10,416 --> 00:14:13,419
(ムツミ)
いい? ヒカリノモリ公園で→
219
00:14:13,419 --> 00:14:16,989
青色のアタッシュケースを持った
男が話しかけて来るから。
220
00:14:16,989 --> 00:14:19,425
青色のアタッシュケース?
221
00:14:19,425 --> 00:14:23,425
話しかけて来たら
「青くないです」って言ってね。
222
00:14:24,914 --> 00:14:27,433
青くないです。
(ムツミ) そっ。
223
00:14:27,433 --> 00:14:31,938
あとは その男から聞いて
指示には絶対に従ってね。
224
00:14:31,938 --> 00:14:33,940
おお。
(ドアが開く音)
225
00:14:33,940 --> 00:14:35,992
青くないです。
226
00:14:35,992 --> 00:14:38,411
(太郎) 青くないです…。
(ドアが閉まる音)
227
00:14:38,411 --> 00:14:40,930
戻りました。
おっ。
228
00:14:40,930 --> 00:14:44,917
[TEL](着信音)
229
00:14:44,917 --> 00:14:47,920
(太郎) うわ 親父だ。
230
00:14:47,920 --> 00:14:49,920
[TEL](電源を切る音)
231
00:14:51,440 --> 00:14:52,942
ふ~ん。
232
00:14:52,942 --> 00:14:54,977
電話 出ないの?
233
00:14:54,977 --> 00:14:57,913
いいんだよ
親父の電話なんか出たら→
234
00:14:57,913 --> 00:15:00,433
ダサいの うつりそうだし。
235
00:15:00,433 --> 00:15:03,933
第一 あいつには 親の資格がない。
236
00:15:05,421 --> 00:15:07,423
親の資格…。
237
00:15:07,423 --> 00:15:11,410
そうだよ
免許制とかにすればいいのにな。
238
00:15:11,410 --> 00:15:13,963
「はい あなたには
親の資格があります→
239
00:15:13,963 --> 00:15:16,515
あなたには ありません」
みたいな?
240
00:15:16,515 --> 00:15:18,434
アハ…。
241
00:15:18,434 --> 00:15:19,934
免許制か…。
242
00:15:23,923 --> 00:15:25,941
よ~し 行くか!
えっ?
243
00:15:25,941 --> 00:15:28,427
行くって どこにですか?
ん?
244
00:15:28,427 --> 00:15:31,931
よし 行くぞ!
どこに? ちょっと待って!
245
00:15:31,931 --> 00:15:33,983
ねぇ! ちょっと!
246
00:15:33,983 --> 00:15:37,483
15万円 15万円。
247
00:15:38,421 --> 00:15:43,421
一 十 百 千 万 15万円。
248
00:15:51,400 --> 00:15:53,452
先輩。
ん?
249
00:15:53,452 --> 00:15:57,406
先輩は何で 橋蔵くん
産もうと思ったんですか?
250
00:15:57,406 --> 00:16:00,409
それって
今回の捜査に必要なこと?
251
00:16:00,409 --> 00:16:03,913
えっ? いや ないです。
252
00:16:03,913 --> 00:16:07,433
捜査中は余計なことを考えるな。
253
00:16:07,433 --> 00:16:09,919
すいません。
254
00:16:09,919 --> 00:16:13,456
あっ ところで 太郎君に
何させてるんですか?
255
00:16:13,456 --> 00:16:18,911
ん? 合成麻薬ダブル
密売組織への おとり捜査。
256
00:16:18,911 --> 00:16:21,414
はぁ!? ちょっと
何 考えてるんすか 未成年を。
257
00:16:21,414 --> 00:16:23,416
しかも 上司の息子を!
258
00:16:23,416 --> 00:16:25,434
いいか? 冷静に考えな。
259
00:16:25,434 --> 00:16:27,920
ああいうコが潜入したほうが
向こうにも バレにくいし→
260
00:16:27,920 --> 00:16:29,922
真相も つかみやすいだろ。
261
00:16:29,922 --> 00:16:32,425
冷静になんなきゃいけないのは
先輩のほうですよ!
262
00:16:32,425 --> 00:16:34,460
止めて来ます!
待て待て待て…!
263
00:16:34,460 --> 00:16:37,012
離し… 離して… ちょっと もう!
264
00:16:37,012 --> 00:16:39,415
せっかくのチャンスを
無駄にするな~!
265
00:16:39,415 --> 00:16:40,916
痛たた あ~!!
266
00:16:40,916 --> 00:16:42,918
あっ 来た 来た。
えっ?
267
00:16:42,918 --> 00:16:53,929
♪♪~
268
00:16:53,929 --> 00:16:56,929
(権田) これは 何色ですか?
269
00:16:58,901 --> 00:17:00,901
青くないです。
270
00:17:06,926 --> 00:17:09,412
こういうの初めて?
271
00:17:09,412 --> 00:17:11,414
まぁな!
272
00:17:11,414 --> 00:17:13,914
緊張してる?
273
00:17:15,468 --> 00:17:17,968
んなわけねえじゃん。
おい!
274
00:17:18,921 --> 00:17:22,421
権田さんに向かって ナメた口
きいてんじゃねえぞ おい!
275
00:17:24,410 --> 00:17:27,413
あんた
何 偉そうに言ってんだよ!
276
00:17:27,413 --> 00:17:30,916
こっちには タイミングとか
リズムとかがあんのよ!
277
00:17:30,916 --> 00:17:32,916
誰の紹介よ!
278
00:17:42,411 --> 00:17:44,413
はい。
279
00:17:44,413 --> 00:17:49,413
これを 1時間以内に
ここに持ってってほしいの。
280
00:17:51,921 --> 00:17:54,924
ここに行ったら 赤いアタッシュケースを
持ってる人がいるから→
281
00:17:54,924 --> 00:17:58,461
それと これを交換して
君は また ここに戻って来る。
282
00:17:58,461 --> 00:17:59,961
分かった?
283
00:18:03,916 --> 00:18:05,417
大丈夫?
284
00:18:05,417 --> 00:18:06,917
顔 青いよ?
285
00:18:08,420 --> 00:18:11,420
あ… 青くないです。
そう。
286
00:18:14,426 --> 00:18:16,912
なら さっさと行け!!
287
00:18:16,912 --> 00:18:18,412
は… はい!
288
00:18:19,965 --> 00:18:22,518
私は 太郎を追う
お前は権田を見張れ!
289
00:18:22,518 --> 00:18:23,936
えっ? あぁ!
290
00:18:23,936 --> 00:18:26,422
ちょっと! 先輩!
291
00:18:26,422 --> 00:18:28,440
先輩 待ってください!
292
00:18:28,440 --> 00:18:37,950
♪♪~
293
00:18:37,950 --> 00:18:39,450
君。
294
00:18:44,940 --> 00:18:46,909
何? それ。
295
00:18:46,909 --> 00:18:49,428
あぁ… いや これは→
296
00:18:49,428 --> 00:18:50,930
バイトで。
バイト?
297
00:18:50,930 --> 00:18:53,430
ど~したの~?
298
00:18:55,434 --> 00:18:57,920
はぁ~ よかった。
299
00:18:57,920 --> 00:19:00,472
ちょっと
あんたからも説明してよ。
300
00:19:00,472 --> 00:19:04,472
お巡りさん 僕 この人に頼まれて
このアタッシュケース…。
301
00:19:07,913 --> 00:19:09,415
えっ!?
302
00:19:09,415 --> 00:19:12,918
ちょ… 主任! 何するんですか!
303
00:19:12,918 --> 00:19:15,921
男のくせに 口が軽いなんて
ダメな奴だな。
304
00:19:15,921 --> 00:19:19,441
佐々木 この坊や 運んでくれる?
お昼寝の時間だから。
305
00:19:19,441 --> 00:19:21,441
はい。
306
00:21:30,939 --> 00:21:34,927
よ~し 佐々木 今日の業務 終了。
307
00:21:34,927 --> 00:21:38,447
えっ? 俺 終わりですか?
うん お疲れ~。
308
00:21:38,447 --> 00:21:42,434
お高 発信機つけちゃうわよ。
はい よろしく。
309
00:21:42,434 --> 00:21:54,430
♪♪~
310
00:21:54,430 --> 00:21:57,950
発信機って また違法捜査ですか?
311
00:21:57,950 --> 00:21:59,918
おっ 察しがいいね。
312
00:21:59,918 --> 00:22:01,937
また ムチャして。
313
00:22:01,937 --> 00:22:03,939
公になったら どうすんですか。
314
00:22:03,939 --> 00:22:07,926
そんなの気にしてらんないよ。
えっ?
315
00:22:07,926 --> 00:22:11,497
ちんたらやってたら
また 別の事件が起きる。
316
00:22:11,497 --> 00:22:14,917
ルールを無視する奴ら
相手にするのに→
317
00:22:14,917 --> 00:22:17,436
ルール守ってやってたら→
318
00:22:17,436 --> 00:22:19,936
いつまで経っても追いつかない。
319
00:22:22,424 --> 00:22:26,424
俺 やっぱり
主任みたいな刑事になりたいです。
320
00:22:28,931 --> 00:22:33,018
そりゃ 全部が全部マネしたい
わけじゃないですけど…。
321
00:22:33,018 --> 00:22:34,420
言いますなぁ。
322
00:22:34,420 --> 00:22:38,924
事件解決が第一
俺なりに頑張ります。
323
00:22:38,924 --> 00:22:43,429
事件も犯人も ラッセルみたいに
トロトロしてたら→
324
00:22:43,429 --> 00:22:46,932
早く解決すんのにな~。
325
00:22:46,932 --> 00:22:48,932
葵が怒りますよ。
326
00:22:50,469 --> 00:22:52,521
動き なし。
327
00:22:52,521 --> 00:23:09,938
♪♪~
328
00:23:09,938 --> 00:23:11,924
いけない いけない。
329
00:23:11,924 --> 00:23:13,924
動き なし。
330
00:23:15,461 --> 00:23:19,932
密売組織の黒幕は
サクラバ会で決まりね。
331
00:23:19,932 --> 00:23:23,452
(ムツミ) 仕切ってんのは
ナンバー2の権田章二。
332
00:23:23,452 --> 00:23:26,421
彼 同じ においがするの。
333
00:23:26,421 --> 00:23:29,925
この容赦ない目つきが
私と似てると思わない?
334
00:23:29,925 --> 00:23:32,961
同じ においね~。
335
00:23:32,961 --> 00:23:36,014
厄介かもね。
336
00:23:36,014 --> 00:23:37,933
中国語
337
00:23:37,933 --> 00:23:40,435
もう30分か ありがとう。
338
00:23:40,435 --> 00:23:42,437
そろそろだね~。
339
00:23:42,437 --> 00:23:44,923
起きなさ~い→
340
00:23:44,923 --> 00:23:48,911
お昼寝の時間は 終わりですよ~。
341
00:23:48,911 --> 00:23:50,412
ブッ。
342
00:23:50,412 --> 00:23:53,982
何で俺 こんなとこにいんの?
343
00:23:53,982 --> 00:23:56,482
あっ 時間 大丈夫?
344
00:23:57,920 --> 00:24:01,423
大丈夫じゃねえよ!
あら 大変。
345
00:24:01,423 --> 00:24:03,926
うわ 30分も遅刻だ。
346
00:24:03,926 --> 00:24:05,410
はい。
347
00:24:05,410 --> 00:24:07,946
あ~ もう!
348
00:24:07,946 --> 00:24:09,946
(太郎) ヤバい ヤバい…!
349
00:24:20,425 --> 00:24:24,425
赤… 赤… 赤の…。
350
00:24:26,415 --> 00:24:27,915
赤!
351
00:24:29,418 --> 00:24:31,918
あの。
はい?
352
00:24:33,922 --> 00:24:36,422
あ… すいません。
353
00:24:37,993 --> 00:24:39,993
ヤッベェ。
354
00:24:44,416 --> 00:24:45,918
戻って来た。
355
00:24:45,918 --> 00:24:47,936
あの…。
356
00:24:47,936 --> 00:24:51,440
やだ 交換してないじゃない!
357
00:24:51,440 --> 00:24:54,927
行きました 行ったんだけど
いなくて…。
358
00:24:54,927 --> 00:24:57,446
んなわけねえだろ!
ちゃんと見たのか!?
359
00:24:57,446 --> 00:25:00,983
ちょっと遅刻しちゃって…。
360
00:25:00,983 --> 00:25:03,919
遅刻した?
361
00:25:03,919 --> 00:25:07,940
よく分かんないんですけど
途中で 気 失っちゃって。
362
00:25:07,940 --> 00:25:11,426
気を失った?
363
00:25:11,426 --> 00:25:22,754
♪♪~
364
00:25:22,754 --> 00:25:25,754
はぁ~。
えっ! 麻薬!?
365
00:25:26,425 --> 00:25:28,925
あんた 見たわね?
366
00:25:32,431 --> 00:25:34,917
責任とってもらうわよ。
≪来い≫
367
00:25:34,917 --> 00:25:37,986
え~!? ちょっ… ヤダ… ヤダ!
368
00:25:37,986 --> 00:25:39,521
ヤバい。
369
00:25:39,521 --> 00:25:41,940
[TEL](振動音)
370
00:25:41,940 --> 00:25:44,943
先輩 ヤバいです 太郎君が
連れ去られそうになってます。
371
00:25:44,943 --> 00:25:47,429
[TEL] 助手席のパソコン開いて。
えっ?
372
00:25:47,429 --> 00:25:49,448
だから そんなことしてる場合じゃ
ないですって!
373
00:25:49,448 --> 00:25:52,918
[TEL] いいから見ろ。
はぁ?
374
00:25:52,918 --> 00:25:54,418
はい。
375
00:25:57,990 --> 00:26:00,926
[TEL] 太郎に発信機
つけておいたから それで追って。
376
00:26:00,926 --> 00:26:02,427
はい。
377
00:26:02,427 --> 00:26:04,930
[TEL] ボ~っとして 係長の息子
見失わないように。
378
00:26:04,930 --> 00:26:07,449
[TEL] 責任重大 よろしく。
379
00:26:07,449 --> 00:26:09,449
あっ ちょっと 先輩!
ちょっと…。
380
00:26:13,922 --> 00:26:15,457
(ノック)
381
00:26:15,457 --> 00:26:16,957
どうぞ。
382
00:26:19,928 --> 00:26:22,931
さっきは 悪かったなマルコー。
いやいや。
383
00:26:22,931 --> 00:26:26,435
太郎君 素直で いいコですね。
384
00:26:26,435 --> 00:26:27,936
あぁ そう?
385
00:26:27,936 --> 00:26:31,440
じゃあ あいつ 親にだけ
生意気な態度とってんだ。
386
00:26:31,440 --> 00:26:33,942
あっ で 太郎 今どこだ?
387
00:26:33,942 --> 00:26:35,978
ケータイの電源 切ってて
つながらないんだ。
388
00:26:35,978 --> 00:26:39,031
あ~ 何か
大事なものをなくしたから→
389
00:26:39,031 --> 00:26:40,932
持ち主に謝りに行くって
言ってました。
390
00:26:40,932 --> 00:26:43,452
素直に謝るって
なかなか できないのに→
391
00:26:43,452 --> 00:26:45,937
偉いですよね 太郎君。
392
00:26:45,937 --> 00:26:50,937
ホホ… あいつも きちんと
ひとに謝れるようになったんだ。
393
00:26:51,943 --> 00:26:56,982
フフ… いや あいつさ
今は突っ張って見せてるけど→
394
00:26:56,982 --> 00:27:00,035
小さい頃 体 弱くてな→
395
00:27:00,035 --> 00:27:02,938
しょっちゅう風邪ひいて
熱 出してたんだ。
396
00:27:02,938 --> 00:27:06,942
小さな体で耐えてるの見てると
かわいそうでな。
397
00:27:06,942 --> 00:27:10,942
ねだられるまんまに しょっちゅう
アイスクリームとか買いに行ってたよ。
398
00:27:12,931 --> 00:27:16,451
まっ でも まぁ
あの頃から甘やかしてるのが→
399
00:27:16,451 --> 00:27:18,451
いけなかったのかもしれないな。
400
00:27:21,423 --> 00:27:26,912
な~んてね あらためて
息子の話なんかしたら照れますね。
401
00:27:26,912 --> 00:27:28,413
フフフ…。
402
00:27:28,413 --> 00:27:31,913
いいんじゃないの? たまには。
403
00:27:35,437 --> 00:27:38,490
じゃあ みんなが戻って来たら
また会議な。
404
00:27:38,490 --> 00:27:41,910
今回のヤマは
我々に期待されてんだ。
405
00:27:41,910 --> 00:27:43,910
あ… 深沢。
ん?
406
00:27:45,931 --> 00:27:48,931
何? 何だよ。
407
00:27:52,921 --> 00:27:55,924
合成麻薬ダブル。
えっ?
408
00:27:55,924 --> 00:27:57,976
すぐに鑑識に回して。
409
00:27:57,976 --> 00:28:00,011
うん 分かった。
410
00:28:00,011 --> 00:28:03,932
(太郎) すいません すいません
すいません…。
411
00:28:03,932 --> 00:28:06,435
謝って済む問題じゃないわよ。
412
00:28:06,435 --> 00:28:09,935
大事な取引を
台無しにされたんだから。
413
00:28:11,440 --> 00:28:14,443
体で返してもらうしかないわね。
414
00:28:14,443 --> 00:28:16,928
(太郎) えっ?
ロッキー。
415
00:28:16,928 --> 00:28:19,481
(ロッキー) 腎臓 若ソウダネ。
416
00:28:19,481 --> 00:28:22,033
若クテ イイヨネ。
417
00:28:22,033 --> 00:28:26,438
イツモノヨウニ
手術ハ 麻酔ナシデイイカ?
418
00:28:26,438 --> 00:28:29,441
(権田) OK。
えっ?
419
00:28:29,441 --> 00:28:31,927
や… や…。
420
00:28:31,927 --> 00:28:33,945
(太郎) や~!
421
00:28:33,945 --> 00:28:36,431
やめたほうがいいぞ!
422
00:28:36,431 --> 00:28:39,935
俺の親父は 刑事だ!
423
00:28:39,935 --> 00:28:41,987
刑事?
424
00:28:41,987 --> 00:28:45,487
そう だから
やめといたほうがいいぞ!
425
00:28:46,925 --> 00:28:49,428
そうか。
426
00:28:49,428 --> 00:28:52,414
そうだ! 参ったか!
427
00:28:52,414 --> 00:28:53,914
参った。
428
00:28:58,937 --> 00:29:02,474
いよいよ あんたを
殺すしかなくなったわね。
429
00:29:02,474 --> 00:29:03,974
えっ?
430
00:29:05,527 --> 00:29:10,932
自分の子供を
おとり捜査に使うなんて…→
431
00:29:10,932 --> 00:29:15,432
大したパパね。
いや…。
432
00:29:23,929 --> 00:29:25,929
ここか…。
433
00:29:31,937 --> 00:29:35,924
先輩 太郎君が入ってったと
思われる倉庫の前に着きました。
434
00:29:35,924 --> 00:29:37,926
[TEL] そう んじゃ そこで待機。
435
00:29:37,926 --> 00:29:39,427
えっ!? でも…。
436
00:29:39,427 --> 00:29:43,415
[TEL](通話が切れた音)
437
00:29:43,415 --> 00:29:44,950
え~。
438
00:29:44,950 --> 00:29:47,502
(大野)
あ~ なかなか つかめねえなぁ。
439
00:29:47,502 --> 00:29:49,938
(赤石) 全然 手掛かりないですね。
440
00:29:49,938 --> 00:29:52,424
(原) 係長 すごい資料っすね。
えっ?
441
00:29:52,424 --> 00:29:54,426
マルコーが ダブルを入手した。
442
00:29:54,426 --> 00:29:55,927
(赤石) ホントっすか?
まぁね。
443
00:29:55,927 --> 00:29:59,931
今 鑑識に回してる こちらは
今から桜田門で全体会議だ。
444
00:29:59,931 --> 00:30:03,435
こいつは 合成麻薬の
流通経路に関する資料だ。
445
00:30:03,435 --> 00:30:06,488
いつになく 力 入ってますね。
ボーナス狙いっすか?
446
00:30:06,488 --> 00:30:08,423
えっ? というよりな→
447
00:30:08,423 --> 00:30:12,944
この事件の加害者の少年達の
平均年齢が17.5歳。
448
00:30:12,944 --> 00:30:15,430
まぁ ちょうど
ウチの息子と同い年ぐらいでな。
449
00:30:15,430 --> 00:30:17,449
この少年達は加害者だが→
450
00:30:17,449 --> 00:30:21,953
この麻薬密売が横行する
世の中では 被害者でもある。
451
00:30:21,953 --> 00:30:24,453
そう思うと 何か
力 入っちゃってな。
452
00:30:28,944 --> 00:30:31,429
この事件を解決することが→
453
00:30:31,429 --> 00:30:34,429
太郎自身を
救うことになるかもしれない。
454
00:30:35,917 --> 00:30:38,453
俺が太郎にしてやれることは→
455
00:30:38,453 --> 00:30:41,453
そんなことぐらいしか
ないのかもしれないな。
456
00:30:44,993 --> 00:30:47,028
なんてね!
457
00:30:47,028 --> 00:30:49,431
係長。
行って来るよ!
458
00:30:49,431 --> 00:30:53,451
[TEL](着信音)
459
00:30:53,451 --> 00:30:55,937
ん?
[TEL](着信音)
460
00:30:55,937 --> 00:30:58,423
おい 太郎 今 どこにいる…。
461
00:30:58,423 --> 00:31:01,426
は? お前 誰だ!
462
00:31:01,426 --> 00:31:03,979
[TEL](権田)
しらばっくれんじゃないわよ。
463
00:31:03,979 --> 00:31:07,432
自分の息子を
おとり捜査に使ったのは→
464
00:31:07,432 --> 00:31:11,432
刑事のパパ あんたじゃない。
465
00:31:12,921 --> 00:31:14,923
ん? おとり捜査?
466
00:31:14,923 --> 00:31:17,425
[TEL](振動音)
467
00:31:17,425 --> 00:31:18,910
先輩?
468
00:31:18,910 --> 00:31:21,947
餌に食らいついた
パトライト 車に乗っけろ。
469
00:31:21,947 --> 00:31:23,982
[TEL] 電話 切るなよ。
はい!
470
00:31:23,982 --> 00:31:26,935
[TEL](太郎) パパ 助けて!
おい! おい! 太郎!
471
00:31:26,935 --> 00:31:28,935
お前 太郎に何してんだ!
472
00:31:29,921 --> 00:31:32,924
子供に こんなことさせて。
473
00:31:32,924 --> 00:31:35,927
二度と
こんなことができないように→
474
00:31:35,927 --> 00:31:37,946
後悔させてやる。
475
00:31:37,946 --> 00:31:39,946
おい! おい! おい!
476
00:31:47,422 --> 00:31:50,422
「こんな事をして
ただで すむと思うなよ」。
477
00:31:52,944 --> 00:31:56,414
「合成麻薬ダブルは
警察が手に入れた」。
478
00:31:56,414 --> 00:31:57,916
何 言ってんの?
479
00:31:57,916 --> 00:32:00,416
ダブルなら ちゃんと こっち…。
480
00:32:02,454 --> 00:32:05,006
まさか… おい!
481
00:32:05,006 --> 00:32:15,917
♪♪~
482
00:32:15,917 --> 00:32:18,920
甘い! 粉ミルク?
483
00:32:18,920 --> 00:32:21,920
ふざけたマネしやがって!
484
00:32:25,427 --> 00:32:28,463
「お前たちは
完全に包囲されている」。
485
00:32:28,463 --> 00:32:31,016
[TEL](権田) あぁ?
486
00:32:31,016 --> 00:32:35,016
「太郎の胸を見てみろ
発信機が ついている」!?
487
00:32:36,421 --> 00:32:39,424
発信機?
(太郎) えっ?
488
00:32:39,424 --> 00:32:54,506
♪♪~
489
00:32:54,506 --> 00:32:57,425
えっ?
先輩 発信機の表示が消えました。
490
00:32:57,425 --> 00:33:01,446
よし サイレン鳴らせ。
はい!
491
00:33:01,446 --> 00:33:07,435
(パトカーのサイレン)
492
00:33:07,435 --> 00:33:10,922
クッソ~! 行くわよ!
はい!
493
00:33:10,922 --> 00:33:13,975
え~!? ちょっと ヤダヤダ!
494
00:33:13,975 --> 00:33:16,528
ヤダ! ヤダ!
495
00:33:16,528 --> 00:33:20,432
おい! お前
太郎に手ぇ出したら お前…!
496
00:33:20,432 --> 00:33:23,435
(太郎) やめて~!
497
00:33:23,435 --> 00:33:26,438
このコに他に
発信機が ついてないか調べて!
498
00:33:26,438 --> 00:33:28,440
(太郎) ヤダ!
499
00:33:28,440 --> 00:33:31,926
先輩 追います!
[TEL] そのまま待機 絶対 追うなよ。
500
00:33:31,926 --> 00:33:33,926
えっ? でも…。
501
00:33:34,946 --> 00:33:36,481
[TEL](通話が切れた音)
502
00:33:36,481 --> 00:33:37,981
えっ?
[TEL](通話が切れた音)
503
00:33:42,937 --> 00:33:45,423
マルコー。
504
00:33:45,423 --> 00:33:47,923
お前 太郎に何させた。
505
00:33:49,444 --> 00:33:52,931
ただ
捜査を手伝ってもらってるだけ。
506
00:33:52,931 --> 00:33:54,933
あいつは まだ17歳だ
未成年だ!
507
00:33:54,933 --> 00:33:56,951
未成年。
508
00:33:56,951 --> 00:33:59,487
未成年って何だ?
509
00:33:59,487 --> 00:34:01,923
大人の方が必要ってこと?
510
00:34:01,923 --> 00:34:06,928
私に言わせてもらえば
親に甘えていいのは 6歳まで。
511
00:34:06,928 --> 00:34:11,433
百歩譲っても
義務教育の15歳までだな。
512
00:34:11,433 --> 00:34:15,420
それ以上は 子供じゃ ない。
それは お前の勝手な考え方だろ!
513
00:34:15,420 --> 00:34:19,507
未成年って立場に甘えて
権利ばっかり主張して→
514
00:34:19,507 --> 00:34:22,927
責任を持たない連中が
世の中にはゴロゴロいる。
515
00:34:22,927 --> 00:34:25,447
親のスネをかじってるくせに→
516
00:34:25,447 --> 00:34:29,434
自分一人で生きてますって
勘違いしてる連中がね。
517
00:34:29,434 --> 00:34:32,954
少年法に守られて
名前も出ないことをいいことに→
518
00:34:32,954 --> 00:34:37,442
ひっどい事件を起こしても
軽い刑で のうのうと生きてる。
519
00:34:37,442 --> 00:34:41,012
おかしいでしょ?
520
00:34:41,012 --> 00:34:43,932
そんな性根の腐った子供の親は→
521
00:34:43,932 --> 00:34:48,432
子供に向き合いもせず
ろくに叱れもしない。
522
00:34:49,921 --> 00:34:53,421
子供も子供なら 大人も大人だ。
523
00:34:56,911 --> 00:34:59,464
あとは 私がやる。
524
00:34:59,464 --> 00:35:02,464
行きなよ 会議なんだろ。
525
00:35:07,922 --> 00:35:10,425
フゥ~。
526
00:35:10,425 --> 00:35:12,425
その通りだよ。
527
00:35:14,913 --> 00:35:19,417
俺は 忙しいとかいう理由で→
528
00:35:19,417 --> 00:35:21,936
ずっと太郎と
向き合って来なかった。
529
00:35:21,936 --> 00:35:26,524
お前の言う通りだ
ろくに叱ったこともないよ。
530
00:35:26,524 --> 00:35:28,927
全部 言い訳だ。
531
00:35:28,927 --> 00:35:31,412
仕事が忙しいとか
事件が起こったとか→
532
00:35:31,412 --> 00:35:33,932
全部 言い訳
そんなことは分かってるんだよ!
533
00:35:33,932 --> 00:35:35,917
でも そうやって
ず~っと やって来て→
534
00:35:35,917 --> 00:35:38,436
今さら 何を喋ったらいいんだ!
535
00:35:38,436 --> 00:35:40,922
ホントは…→
536
00:35:40,922 --> 00:35:43,441
向き合うことが怖いって→
537
00:35:43,441 --> 00:35:46,941
太郎に見透かされてることぐらい
知ってるよ。
538
00:35:49,931 --> 00:35:51,931
(深沢) はぁ…。
539
00:35:57,939 --> 00:36:00,425
でもな→
540
00:36:00,425 --> 00:36:03,425
太郎は 俺の息子だ。
541
00:36:06,998 --> 00:36:09,934
あいつに何かあったら→
542
00:36:09,934 --> 00:36:11,934
1番に助ける。
543
00:36:15,423 --> 00:36:19,923
これだけは 親父の意地だ。
544
00:36:27,518 --> 00:36:31,422
会議は出ない
警察を辞めると伝えといてくれ。
545
00:36:31,422 --> 00:36:33,424
係長。
546
00:36:33,424 --> 00:36:36,427
フゥ~ 行こうか。
547
00:36:36,427 --> 00:36:48,489
♪♪~
548
00:36:48,489 --> 00:36:53,489
「親父の意地」ねぇ…。
549
00:36:55,930 --> 00:36:59,930
橋蔵のパパは
そんなこと言ってくれるかねぇ。
550
00:37:03,938 --> 00:37:07,425
ちょっと似合わないこと
しちゃってもいいかな?
551
00:37:07,425 --> 00:37:09,425
(丸岡橋蔵) あ~。
552
00:37:16,434 --> 00:37:18,436
[TEL](呼び出し音)
553
00:37:18,436 --> 00:37:20,936
ラッセル 至急 戻って来い。
554
00:37:24,425 --> 00:37:26,928
先輩 発信機つぶされたのか→
555
00:37:26,928 --> 00:37:28,963
太郎君の居場所が
分かんなくなっちゃいました。
556
00:37:28,963 --> 00:37:31,963
どうしましょ。
よし。
557
00:37:41,926 --> 00:37:44,929
ほれ。
えっ? 先輩 何で?
558
00:37:44,929 --> 00:37:48,429
さっきのはダミー こっちが本命
別に仕込んどいた。
559
00:37:49,934 --> 00:37:51,953
はぁ~ なるほど。
560
00:37:51,953 --> 00:37:54,973
奴らは ダブルが
警察の手に渡ったことを知った。
561
00:37:54,973 --> 00:37:58,042
鑑識の結果が出れば
言い逃れはできない。
562
00:37:58,042 --> 00:38:00,428
だから まずは
証拠を消そうとする。
563
00:38:00,428 --> 00:38:03,948
つまり 奴らが向かっているのは
ダブルの貯蔵庫。
564
00:38:03,948 --> 00:38:06,434
やっぱり 本から絶たなきゃね。
565
00:38:06,434 --> 00:38:08,934
じゃ 行きましょ。
いや。
566
00:38:11,923 --> 00:38:15,423
今日は 私達は行かない。
えっ?
567
00:38:18,012 --> 00:38:20,512
作戦を変更する。
568
00:40:23,387 --> 00:40:25,887
太郎~! 太郎~!
569
00:40:29,894 --> 00:40:31,913
太郎!
570
00:40:31,913 --> 00:40:33,898
太郎!
係長!
571
00:40:33,898 --> 00:40:35,399
マルコー ラッセル君。
572
00:40:35,399 --> 00:40:39,437
太郎君の居場所が 見つかりました
ジョウナン地区の16番倉庫です。
573
00:40:39,437 --> 00:40:41,989
深沢!
えっ?
574
00:40:41,989 --> 00:40:44,909
世話になったな これ餞別だ。
575
00:40:44,909 --> 00:40:46,911
(深沢) 何だ? これ。
あ~!
576
00:40:46,911 --> 00:40:50,411
目の前で開けるなよ
恥ずかしいだろ。
577
00:41:02,476 --> 00:41:05,396
早くしなさいよ!
(一同) はい。
578
00:41:05,396 --> 00:41:08,896
クッソ あんたのせいよ!
579
00:41:09,901 --> 00:41:13,404
これ運び終わったら
次は あんたの番だからね。
580
00:41:13,404 --> 00:41:16,404
え~…。
(深沢) ≪待て!≫
581
00:41:17,909 --> 00:41:19,909
パパ!
太郎。
582
00:41:23,481 --> 00:41:26,901
何で ここが分かったの?
583
00:41:26,901 --> 00:41:29,387
あんた達 発信機 探せって
言ったじゃない!
584
00:41:29,387 --> 00:41:30,887
≪すいません!≫
585
00:41:31,923 --> 00:41:35,423
パパ 1人で来たの?
586
00:41:36,894 --> 00:41:39,394
何? その箱。
587
00:41:41,432 --> 00:41:43,932
これは 何でもない。
588
00:41:45,503 --> 00:41:48,503
そんなわけ… ないじゃないの!
589
00:41:53,394 --> 00:41:56,894
(権田) 卵?
卵?
590
00:42:02,403 --> 00:42:03,903
((ブッ!))
591
00:42:04,922 --> 00:42:06,922
((何だこれ! 臭ぇ!))
592
00:42:09,994 --> 00:42:11,494
(深沢) えい!
593
00:42:12,897 --> 00:42:14,899
うぉ~。
うっ…。
594
00:42:14,899 --> 00:42:16,400
≪臭ぇ!≫
595
00:42:16,400 --> 00:42:19,921
(深沢) うわ~~!
596
00:42:19,921 --> 00:42:22,924
≪逃がすな!≫
≪何やってんだ…≫
597
00:42:22,924 --> 00:42:24,424
うぁ~!
598
00:42:25,910 --> 00:42:27,929
うぁ~!
599
00:42:27,929 --> 00:42:29,463
ん~!
600
00:42:29,463 --> 00:42:30,982
うぉ~ うわ!
601
00:42:30,982 --> 00:42:33,417
うぇ~… うぇ~。
602
00:42:33,417 --> 00:42:36,417
(権田) ノ~ ノ~ ノ~ ノ~!
おら~!
603
00:42:39,890 --> 00:42:42,893
んが~!
痛てて…!
604
00:42:42,893 --> 00:42:44,393
離して!
605
00:42:45,930 --> 00:42:48,482
おりゃ~!
606
00:42:48,482 --> 00:42:50,401
(深沢) んん~~…!
607
00:42:50,401 --> 00:42:51,902
うお…。
608
00:42:51,902 --> 00:42:58,926
♪♪~
609
00:42:58,926 --> 00:43:00,895
太郎…。
610
00:43:00,895 --> 00:43:02,395
太郎。
611
00:43:03,898 --> 00:43:06,398
もうちょっとだ ん~!
612
00:43:09,503 --> 00:43:11,503
ブッ ゴホ!
613
00:43:16,911 --> 00:43:18,896
パパ!
614
00:43:18,896 --> 00:43:27,905
♪♪~
615
00:43:27,905 --> 00:43:29,405
(銃声)
616
00:43:33,995 --> 00:43:35,396
ふん!
あぁ!
617
00:43:35,396 --> 00:43:36,897
ん~! ん~!
618
00:43:36,897 --> 00:43:39,417
痛い痛い…! ギブギブ!
619
00:43:39,417 --> 00:43:47,908
(パトカーのサイレン)
620
00:43:47,908 --> 00:43:50,428
ナイスピッチング ラッセル。
621
00:43:50,428 --> 00:43:52,428
ありがとうございます。
622
00:43:54,515 --> 00:43:56,917
(大野) ≪こっちだ!≫
623
00:43:56,917 --> 00:44:00,917
何だ これ 臭っさい!
おい 連れてけ 連れてけ。
624
00:44:08,396 --> 00:44:12,396
(大野)
バカ野郎! お前… この野郎!
625
00:44:14,402 --> 00:44:18,389
(赤石) 係長 1人で行くなんて
水くさいですよ。
626
00:44:18,389 --> 00:44:19,889
あれ?
627
00:44:22,410 --> 00:44:24,912
やっぱり警察手帳
持ってたんですね。
628
00:44:24,912 --> 00:44:26,914
手品か何かですか?
629
00:44:26,914 --> 00:44:28,916
(原) 辞めるとか たんか切って
飛び出したのは→
630
00:44:28,916 --> 00:44:30,968
ウソだったんですかね。
(深沢) いや それはだな…。
631
00:44:30,968 --> 00:44:33,003
係長!
(深沢) えっ? はい。
632
00:44:33,003 --> 00:44:34,405
お手柄です!
633
00:44:34,405 --> 00:44:36,907
いや~ 何だかんだ言っても
やるときゃやるじゃないですか。
634
00:44:36,907 --> 00:44:39,910
合成麻薬の貯蔵庫を
一網打尽なんて。
635
00:44:39,910 --> 00:44:41,912
えっ?
636
00:44:41,912 --> 00:44:46,417
まぁ やっぱ俺達の上司は
こうじゃなきゃ いかんわな。
637
00:44:46,417 --> 00:44:48,417
さすがです。
638
00:44:51,489 --> 00:45:10,891
♪♪~
639
00:45:10,891 --> 00:45:14,428
よし 全員 現場の保全
鑑識を呼んでくれ。
640
00:45:14,428 --> 00:45:17,498
麻薬密売組織を
絶対に壊滅させるぞ!
641
00:45:17,498 --> 00:45:18,998
(一同) はい。
642
00:45:22,903 --> 00:45:25,423
(深沢) 合成麻薬が
屋外に隠されてる可能性もある。
643
00:45:25,423 --> 00:45:27,908
周辺の聞き込み 頼むわ。
分かりました。
644
00:45:27,908 --> 00:45:29,410
あ~ 内部写真
よろしくお願いしますね。
645
00:45:29,410 --> 00:45:31,412
≪係長≫
(深沢) あ~ ご苦労さん。
646
00:45:31,412 --> 00:45:32,913
どう?
647
00:45:32,913 --> 00:45:34,913
反省した?
648
00:45:38,469 --> 00:45:42,969
「子供は親を選べない」か。
649
00:45:45,392 --> 00:45:48,395
子供が親を選べないように→
650
00:45:48,395 --> 00:45:50,895
親も子供を選べない。
651
00:45:52,416 --> 00:45:56,921
でも あんたの親は あいつで→
652
00:45:56,921 --> 00:45:59,924
あいつの子供は→
653
00:45:59,924 --> 00:46:01,424
あんた。
654
00:46:02,977 --> 00:46:05,913
親子に生まれたんだから
しょうがない。
655
00:46:05,913 --> 00:46:13,904
♪♪~
656
00:46:13,904 --> 00:46:17,925
あいつは 紛れもなく→
657
00:46:17,925 --> 00:46:19,925
あんたの親だよ。
658
00:46:26,400 --> 00:46:28,903
じゃあ 引き続き よろしく頼む
あっ ナンバープレートもね。
659
00:46:28,903 --> 00:46:30,403
≪はい≫
660
00:46:32,923 --> 00:46:36,423
パパ あの…。
661
00:46:43,000 --> 00:46:45,903
フラフラしてるからだ!
662
00:46:45,903 --> 00:46:48,403
補導されるようなことを
するんじゃ ない。
663
00:46:54,395 --> 00:46:56,914
昨日から帰ってないんだろ。
664
00:46:56,914 --> 00:47:00,434
おかあさんが心配してる。
665
00:47:00,434 --> 00:47:02,434
早く帰んなさい。
666
00:47:10,895 --> 00:47:12,913
深沢。
667
00:47:12,913 --> 00:47:15,399
手 震えてるぞ。
668
00:47:15,399 --> 00:47:17,918
やっぱり叱り慣れてないんだな。
669
00:47:17,918 --> 00:47:19,420
震えてないよ。
670
00:47:19,420 --> 00:47:21,405
そんなことより
太郎に万一のことがあったら→
671
00:47:21,405 --> 00:47:23,405
どう 責任とるつもりだったんだ!
672
00:47:25,426 --> 00:47:26,426
ん?
673
00:47:32,416 --> 00:47:34,416
よっこいしょ。
674
00:47:37,421 --> 00:47:38,921
えっ!?
675
00:47:39,907 --> 00:47:42,409
よっ。
おぉ…。
676
00:47:42,409 --> 00:47:44,409
あった。
677
00:47:48,966 --> 00:47:52,903
絶対に見つからないように
お尻の穴に仕込んどいた。
678
00:47:52,903 --> 00:47:54,405
え~…。
679
00:47:54,405 --> 00:47:57,908
いつの間に?
あんたが寝てる隙に。
680
00:47:57,908 --> 00:48:01,895
いや お前… お前な~。
よいしょ~。
681
00:48:01,895 --> 00:48:04,398
はい。
うわ~! ちょっと!
682
00:48:04,398 --> 00:48:07,398
んじゃ お先に。
683
00:48:10,988 --> 00:48:14,408
あぁ 深沢。
684
00:48:14,408 --> 00:48:15,908
ん? 何だ。
685
00:48:17,411 --> 00:48:20,414
親父の意地→
686
00:48:20,414 --> 00:48:22,914
なかなか カッコ良かった。
687
00:48:26,420 --> 00:48:27,905
んじゃ。
688
00:48:27,905 --> 00:48:37,898
♪♪~
689
00:48:37,898 --> 00:48:42,898
あ~あ 深沢に
手柄とられちゃったなぁ。
690
00:48:44,888 --> 00:48:46,888
ま いっか。
691
00:48:50,411 --> 00:48:52,930
ほら 乗れ。
大丈夫だよ。
692
00:48:52,930 --> 00:48:54,430
そうか。
693
00:48:56,000 --> 00:48:59,887
(深沢) じゃあ 自宅まで
お願いします おい 大丈夫か?
694
00:48:59,887 --> 00:49:01,887
父親か。
695
00:49:04,908 --> 00:49:13,901
[TEL](着信音)
696
00:49:13,901 --> 00:49:16,970
もしもし 葵? どうした?
697
00:49:16,970 --> 00:49:19,006
卓也…。
698
00:49:19,006 --> 00:49:21,909
[TEL] 何?
699
00:49:21,909 --> 00:49:23,909
あの…。
700
00:49:25,913 --> 00:49:29,413
ごめん 間違えちゃった じゃあ。
701
00:49:30,918 --> 00:49:34,905
あっ ちょっと…。
[TEL](通話が切れた音)
702
00:49:34,905 --> 00:49:37,458
はぁ…。
703
00:49:37,458 --> 00:49:39,958
やっぱ言えない。
704
00:49:45,399 --> 00:49:47,901
お腹すいたねぇ。
705
00:49:47,901 --> 00:49:51,922
今日は おいしいもん
食べよっか~ ん~?
706
00:49:51,922 --> 00:49:54,925
ん~ ラッキー。
707
00:49:54,925 --> 00:49:56,894
よ~し 行くぞ~。
708
00:49:56,894 --> 00:49:59,894
(カメラのシャッター音)
709
00:50:11,425 --> 00:50:12,910
(深沢) ≪ちょっと いいか≫
710
00:50:12,910 --> 00:50:16,397
今日 仕事が終わったら
みんなで一緒に 飲みに行くぞ。
711
00:50:16,397 --> 00:50:18,415
しかも 私のおごりだ。
イエ~イ。
712
00:50:18,415 --> 00:50:21,919
ホントにボーナス出たんすか?
それは 噂だった 昇進もな。
713
00:50:21,919 --> 00:50:23,904
何だよ~。
714
00:50:23,904 --> 00:50:25,939
いいじゃないか
おごるっつってんだからさ。
715
00:50:25,939 --> 00:50:27,975
マルコー お前も来るんだぞ。
716
00:50:27,975 --> 00:50:30,911
私はパス。
何でだよ~?
717
00:50:30,911 --> 00:50:34,398
何で仕事終わってから あんた達と
飲みに行かなきゃいけないのよ~。
718
00:50:34,398 --> 00:50:36,917
え~ ホントは
嬉しいんじゃねえのか?
719
00:50:36,917 --> 00:50:39,920
全然。
だぁ~。
720
00:50:39,920 --> 00:50:41,905
「だぁ~」って言った。
721
00:50:41,905 --> 00:50:44,958
ウソなんだ。
722
00:50:44,958 --> 00:50:47,458
バカなこと言ってんじゃないよ
ホントに。
723
00:50:48,912 --> 00:50:50,414
何?
724
00:50:50,414 --> 00:50:52,916
あ… いや。
(原) ≪係長!≫
725
00:50:52,916 --> 00:50:55,916
(原) これ見てください! これ!
(深沢) どうしたんだよ。
726
00:50:59,423 --> 00:51:01,423
マルコー これ。
727
00:51:03,444 --> 00:51:20,894
♪♪~
728
00:51:20,894 --> 00:51:23,413
♪♪~
(香山靖子) まぁ。
729
00:51:23,413 --> 00:51:32,906
♪♪~
730
00:51:32,906 --> 00:51:37,911
[外:762CBC6B6F0F7132973C0F6CEB4141C6](ベル)
731
00:51:37,911 --> 00:51:40,914
はい 強行犯係。
732
00:51:40,914 --> 00:51:42,900
(原) はい。
733
00:51:42,900 --> 00:51:45,419
「村上玲子」…。
734
00:51:45,419 --> 00:51:48,989
((私 『週刊文化』の
村上玲子と申します))
735
00:51:48,989 --> 00:51:52,409
((丸岡高子さんを
ぜひ特集したいと思って))
736
00:51:52,409 --> 00:51:56,897
係長 監察官からです。
737
00:51:56,897 --> 00:51:58,397
うん。
738
00:51:59,900 --> 00:52:01,900
お電話代わりました 深沢です。
739
00:52:04,905 --> 00:52:06,905
(深沢) はい。
740
00:52:08,959 --> 00:52:10,459
(深沢) はい。
741
00:52:11,395 --> 00:52:14,395
「BONNIE PINK」が歌う
この番組の主題歌…。
742
00:52:17,901 --> 00:52:22,406
ハガキに住所 氏名 年齢
番組の感想などをお書きの上→
743
00:52:22,406 --> 00:52:25,406
ご覧の宛先まで お送りください。
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.ui;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ui.UIUtil;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.awt.*;
import java.lang.annotation.Annotation;
/**
* @author max
* @author Konstantin Bulenkov
*/
public class ColorUtil {
private ColorUtil() {
}
public static Color softer(@Nonnull Color color) {
if (color.getBlue() > 220 && color.getRed() > 220 && color.getGreen() > 220) return color;
final float[] hsb = Color.RGBtoHSB(color.getRed(), color.getGreen(), color.getBlue(), null);
return Color.getHSBColor(hsb[0], 0.6f * hsb[1], hsb[2]);
}
public static Color darker(@Nonnull Color color, int tones) {
final float[] hsb = Color.RGBtoHSB(color.getRed(), color.getGreen(), color.getBlue(), null);
float brightness = hsb[2];
for (int i = 0; i < tones; i++) {
brightness = Math.max(0, brightness / 1.1F);
if (brightness == 0) break;
}
return Color.getHSBColor(hsb[0], hsb[1], brightness);
}
public static Color brighter(@Nonnull Color color, int tones) {
final float[] hsb = Color.RGBtoHSB(color.getRed(), color.getGreen(), color.getBlue(), null);
float brightness = hsb[2];
for (int i = 0; i < tones; i++) {
brightness = Math.min(1, brightness * 1.1F);
if (brightness == 1) break;
}
return Color.getHSBColor(hsb[0], hsb[1], brightness);
}
@Nonnull
public static Color saturate(@Nonnull Color color, int tones) {
final float[] hsb = Color.RGBtoHSB(color.getRed(), color.getGreen(), color.getBlue(), null);
float saturation = hsb[1];
for (int i = 0; i < tones; i++) {
saturation = Math.min(1, saturation * 1.1F);
if (saturation == 1) break;
}
return Color.getHSBColor(hsb[0], saturation, hsb[2]);
}
@Nonnull
public static Color desaturate(@Nonnull Color color, int tones) {
final float[] hsb = Color.RGBtoHSB(color.getRed(), color.getGreen(), color.getBlue(), null);
float saturation = hsb[1];
for (int i = 0; i < tones; i++) {
saturation = Math.max(0, saturation / 1.1F);
if (saturation == 0) break;
}
return Color.getHSBColor(hsb[0], saturation, hsb[2]);
}
public static Color dimmer(@Nonnull Color color) {
float[] rgb = color.getRGBColorComponents(null);
float alpha = 0.80f;
float rem = 1 - alpha;
return new Color(rgb[0] * alpha + rem, rgb[1] * alpha + rem, rgb[2] * alpha + rem);
}
private static int shift(int colorComponent, double d) {
final int n = (int)(colorComponent * d);
return n > 255 ? 255 : n < 0 ? 0 : n;
}
public static Color shift(Color c, double d) {
return new Color(shift(c.getRed(), d), shift(c.getGreen(), d), shift(c.getBlue(), d), c.getAlpha());
}
public static Color withAlpha(Color c, double a) {
return toAlpha(c, (int)(255 * a));
}
@Nonnull
public static Color withPreAlpha(@Nonnull Color c, double a) {
float[] rgba = new float[4];
rgba = withAlpha(c, a).getRGBComponents(rgba);
return new Color(rgba[0] * rgba[3], rgba[1] * rgba[3], rgba[2] * rgba[3], 1.0f);
}
public static Color toAlpha(Color color, int a) {
Color c = color != null ? color : Color.black;
return new Color(c.getRed(), c.getGreen(), c.getBlue(), a);
}
public static Color withAlphaAdjustingDarkness(Color c, double d) {
return shift(withAlpha(c, d), d);
}
@Nonnull
public static String toHtmlColor(@Nonnull final Color c) {
return "#" + toHex(c);
}
public static String toHex(@Nonnull final Color c) {
final String R = Integer.toHexString(c.getRed());
final String G = Integer.toHexString(c.getGreen());
final String B = Integer.toHexString(c.getBlue());
return (R.length() < 2 ? "0" : "") + R + (G.length() < 2 ? "0" : "") + G + (B.length() < 2 ? "0" : "") + B;
}
/**
* Return Color object from string. The following formats are allowed:
* <code>#abc123</code>,
* <code>ABC123</code>,
* <code>ab5</code>,
* <code>#FFF</code>.
*
* @param str hex string
* @return Color object
*/
public static Color fromHex(String str) {
str = StringUtil.trimStart(str, "#");
if (str.length() == 3) {
return new Color(
17 * Integer.valueOf(String.valueOf(str.charAt(0)), 16).intValue(),
17 * Integer.valueOf(String.valueOf(str.charAt(1)), 16).intValue(),
17 * Integer.valueOf(String.valueOf(str.charAt(2)), 16).intValue());
}
else if (str.length() == 6) {
return Color.decode("0x" + str);
}
else {
throw new IllegalArgumentException("Should be String of 3 or 6 chars length.");
}
}
@Nullable
public static Color fromHex(String str, @Nullable Color defaultValue) {
try {
return fromHex(str);
}
catch (Exception e) {
return defaultValue;
}
}
@Nullable
public static Color getColor(@Nonnull Class<?> cls) {
final Annotation annotation = cls.getAnnotation(Colored.class);
if (annotation instanceof Colored) {
final Colored colored = (Colored)annotation;
return fromHex(UIUtil.isUnderDarcula() ? colored.darkVariant() : colored.color(), null);
}
return null;
}
/**
* Checks whether color is dark or not based on perceptional luminosity
* http://stackoverflow.com/questions/596216/formula-to-determine-brightness-of-rgb-color
*
* @param c color to check
* @return dark or not
*/
public static boolean isDark(@Nonnull final Color c) {
// based on perceptional luminosity, see
return (1 - (0.299 * c.getRed() + 0.587 * c.getGreen() + 0.114 * c.getBlue()) / 255) >= 0.5;
}
@Nonnull
public static Color mix(@Nonnull Color c1, @Nonnull Color c2, double balance) {
balance = Math.min(1, Math.max(0, balance));
return new Color((int)((1 - balance) * c1.getRed() + c2.getRed() * balance + .5),
(int)((1 - balance) * c1.getGreen() + c2.getGreen() * balance + .5),
(int)((1 - balance) * c1.getBlue() + c2.getBlue() * balance + .5),
(int)((1 - balance) * c1.getAlpha() + c2.getAlpha() * balance + .5));
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div class="SRResult" id="SR_onebyfftlen">
<div class="SREntry">
<a id="Item0" onkeydown="return searchResults.Nav(event,0)" onkeypress="return searchResults.Nav(event,0)" onkeyup="return searchResults.Nav(event,0)" class="SRSymbol" href="../structarm__cfft__radix4__instance__f32.html#ab9eed39e40b8d7c16381fbccf84467cd" target="_parent">onebyfftLen</a>
<span class="SRScope">arm_cfft_radix4_instance_f32</span>
</div>
</div>
<div class="SRResult" id="SR_outlen">
<div class="SREntry">
<a id="Item1" onkeydown="return searchResults.Nav(event,1)" onkeypress="return searchResults.Nav(event,1)" onkeyup="return searchResults.Nav(event,1)" class="SRSymbol" href="../arm__convolution__example__f32_8c.html#a9c49c44c8bc5c432d220d33a26b4b589" target="_parent">outLen</a>
<span class="SRScope">arm_convolution_example_f32.c</span>
</div>
</div>
<div class="SRResult" id="SR_outputq31">
<div class="SREntry">
<a id="Item2" onkeydown="return searchResults.Nav(event,2)" onkeypress="return searchResults.Nav(event,2)" onkeyup="return searchResults.Nav(event,2)" class="SRSymbol" href="../arm__graphic__equalizer__example__q31_8c.html#a9862488450f2547b07aee8035d6b4d8a" target="_parent">outputQ31</a>
<span class="SRScope">arm_graphic_equalizer_example_q31.c</span>
</div>
</div>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
package org.jenkinsci.plugins.mesos;
import com.mesosphere.usi.core.models.commands.LaunchPod;
import com.mesosphere.usi.core.models.template.FetchUri;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import hudson.Extension;
import hudson.model.AbstractDescribableImpl;
import hudson.model.Descriptor;
import hudson.model.Label;
import hudson.model.Node;
import hudson.model.labels.LabelAtom;
import hudson.util.FormValidation;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.commons.lang.StringUtils;
import org.apache.mesos.v1.Protos.ContainerInfo.DockerInfo.Network;
import org.jenkinsci.plugins.mesos.api.LaunchCommandBuilder;
import org.jenkinsci.plugins.mesos.api.RunTemplateFactory.ContainerInfoTaskInfoBuilder;
import org.jenkinsci.plugins.mesos.config.models.faultdomain.DomainFilterModel;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Option;
/** This is the Mesos agent pod spec config set by a user. */
public class MesosAgentSpecTemplate extends AbstractDescribableImpl<MesosAgentSpecTemplate> {
private static final Logger logger = LoggerFactory.getLogger(MesosAgentSpecTemplate.class);
private final String label;
private Set<LabelAtom> labelSet;
private final Node.Mode mode;
private final int idleTerminationMinutes;
private final boolean reusable;
private final double cpus;
private final int mem;
private final double disk;
private final int minExecutors;
private final int maxExecutors;
private final String jnlpArgs;
private final String agentAttributes;
private final List<MesosSlaveInfo.URI> additionalURIs;
private final LaunchCommandBuilder.AgentCommandStyle agentCommandStyle;
private final ContainerInfo containerInfo;
private final DomainFilterModel domainFilterModel;
@DataBoundConstructor
public MesosAgentSpecTemplate(
String label,
Node.Mode mode,
String cpus,
String mem,
int idleTerminationMinutes,
int minExecutors,
int maxExecutors,
String disk,
String jnlpArgs,
String agentAttributes,
List<MesosSlaveInfo.URI> additionalURIs,
ContainerInfo containerInfo,
LaunchCommandBuilder.AgentCommandStyle agentCommandStyle,
DomainFilterModel domainFilterModel) {
this.label = label;
this.mode = mode;
this.idleTerminationMinutes = idleTerminationMinutes;
this.reusable = false; // TODO: DCOS_OSS-5048.
this.cpus = (cpus != null) ? Double.parseDouble(cpus) : 0.1;
this.mem = Integer.parseInt(mem);
this.minExecutors = minExecutors;
this.maxExecutors = maxExecutors;
this.disk = (disk != null) ? Double.parseDouble(disk) : 0.0;
this.jnlpArgs = StringUtils.isNotBlank(jnlpArgs) ? jnlpArgs : "";
this.agentAttributes = StringUtils.isNotBlank(agentAttributes) ? agentAttributes : "";
this.additionalURIs = (additionalURIs != null) ? additionalURIs : Collections.emptyList();
this.containerInfo = containerInfo;
this.domainFilterModel = domainFilterModel;
this.agentCommandStyle = agentCommandStyle;
validate();
}
private void validate() {}
@Extension
public static final class DescriptorImpl extends Descriptor<MesosAgentSpecTemplate> {
public DescriptorImpl() {
load();
}
/**
* Validate that CPUs is a positive double.
*
* @param cpus The number of CPUs to user for agent.
* @return Whether the supplied CPUs is valid.
*/
public FormValidation doCheckCpus(@QueryParameter String cpus) {
try {
if (Double.valueOf(cpus) > 0.0) {
return FormValidation.ok();
} else {
return FormValidation.error(cpus + " must be a positive floating-point-number.");
}
} catch (NumberFormatException e) {
return FormValidation.error(cpus + " must be a positive floating-point-number.");
}
}
}
/**
* Creates a LaunchPod command to to create a new Jenkins agent via USI
*
* @param jenkinsUrl the URL of the jenkins master.
* @param name The name of the node to launch.
* @param role The Mesos role for the task.
* @return a LaunchPod command to be passed to USI.
* @throws MalformedURLException If a fetch URL is not well formed.
* @throws URISyntaxException IF the fetch URL cannot be converted into a proper URI.
*/
public LaunchPod buildLaunchCommand(URL jenkinsUrl, String name, String role)
throws MalformedURLException, URISyntaxException {
List<FetchUri> fetchUris =
additionalURIs.stream()
.map(
uri -> {
try {
return new FetchUri(
new java.net.URI(uri.getValue()),
uri.isExtract(),
uri.isExecutable(),
false,
Option.empty());
} catch (URISyntaxException e) {
logger.warn(String.format("Could not migrate URI: %s", uri.getValue()), e);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
return new LaunchCommandBuilder()
.withCpu(this.getCpus())
.withMemory(this.getMem())
.withDisk(this.getDisk())
.withName(name)
.withRole(role)
.withJenkinsUrl(jenkinsUrl)
.withContainerInfo(Optional.ofNullable(this.getContainerInfo()))
.withDomainInfoFilter(
Optional.ofNullable(this.getDomainFilterModel()).map(model -> model.getFilter()))
.withJnlpArguments(this.getJnlpArgs())
.withAgentAttribute(this.getAgentAttributes())
.withAgentCommandStyle(Optional.ofNullable(this.agentCommandStyle))
.withAdditionalFetchUris(fetchUris)
.build();
}
public String getLabel() {
return this.label;
}
public Set<LabelAtom> getLabelSet() {
// Label.parse requires a Jenkins instance so we initialize it lazily
if (this.labelSet == null) {
this.labelSet = Label.parse(label);
}
return this.labelSet;
}
public Node.Mode getMode() {
return this.mode;
}
/**
* Generate a new unique name for a new agent. Note: multiple calls will yield different names.
*
* @return A new unique name for an agent.
*/
public String generateName() {
return String.format("jenkins-agent-%s-%s", this.label, UUID.randomUUID().toString());
}
public double getCpus() {
return this.cpus;
}
public double getDisk() {
return this.disk;
}
public int getMem() {
return this.mem;
}
public int getIdleTerminationMinutes() {
return this.idleTerminationMinutes;
}
public boolean getReusable() {
return this.reusable;
}
public List<MesosSlaveInfo.URI> getAdditionalURIs() {
return additionalURIs;
}
public int getMinExecutors() {
return minExecutors;
}
public int getMaxExecutors() {
return maxExecutors;
}
public LaunchCommandBuilder.AgentCommandStyle getAgentCommandStyle() {
return this.agentCommandStyle;
}
public String getJnlpArgs() {
return jnlpArgs;
}
public String getAgentAttributes() {
return agentAttributes;
}
public ContainerInfo getContainerInfo() {
return this.containerInfo;
}
public DomainFilterModel getDomainFilterModel() {
return this.domainFilterModel;
}
public static class ContainerInfo extends AbstractDescribableImpl<ContainerInfo> {
private final String type;
private final String dockerImage;
private final List<Volume> volumes;
private final Network networking;
private final boolean dockerPrivilegedMode;
private final boolean dockerForcePullImage;
private boolean isDind;
@SuppressFBWarnings("UUF_UNUSED_FIELD")
private transient List<Object> portMappings;
@SuppressFBWarnings("UUF_UNUSED_FIELD")
private transient boolean dockerImageCustomizable;
@SuppressFBWarnings("UUF_UNUSED_FIELD")
private transient List<Object> parameters;
@SuppressFBWarnings("UUF_UNUSED_FIELD")
private transient List<Object> networkInfos;
@SuppressFBWarnings("UUF_UNUSED_FIELD")
private transient boolean useCustomDockerCommandShell;
@SuppressFBWarnings("UUF_UNUSED_FIELD")
private transient String customDockerCommandShell;
@DataBoundConstructor
public ContainerInfo(
String type,
String dockerImage,
boolean isDind,
boolean dockerPrivilegedMode,
boolean dockerForcePullImage,
List<Volume> volumes,
Network networking) {
this.type = type;
this.dockerImage = dockerImage;
this.dockerPrivilegedMode = dockerPrivilegedMode;
this.dockerForcePullImage = dockerForcePullImage;
this.volumes = volumes;
this.isDind = isDind;
this.networking =
(networking != null) ? networking : ContainerInfoTaskInfoBuilder.DEFAULT_NETWORKING;
}
public boolean getIsDind() {
return this.isDind;
}
public Network getNetworking() {
return this.networking;
}
public String getType() {
return type;
}
public String getDockerImage() {
return dockerImage;
}
public boolean getDockerPrivilegedMode() {
return dockerPrivilegedMode;
}
public boolean getDockerForcePullImage() {
return dockerForcePullImage;
}
public List<Volume> getVolumes() {
return volumes;
}
public List<Volume> getVolumesOrEmpty() {
return (this.volumes != null) ? this.volumes : Collections.emptyList();
}
@Extension
public static final class DescriptorImpl extends Descriptor<ContainerInfo> {
public DescriptorImpl() {
load();
}
}
}
public static class Volume extends AbstractDescribableImpl<Volume> {
private final String containerPath;
private final String hostPath;
private final boolean readOnly;
@DataBoundConstructor
public Volume(String containerPath, String hostPath, boolean readOnly) {
this.containerPath = containerPath;
this.hostPath = hostPath;
this.readOnly = readOnly;
}
public String getContainerPath() {
return containerPath;
}
public String getHostPath() {
return hostPath;
}
public boolean isReadOnly() {
return readOnly;
}
@Extension
public static final class DescriptorImpl extends Descriptor<Volume> {
public DescriptorImpl() {
load();
}
}
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2012-2019 The VPaint Developers.
// See the COPYRIGHT file at the top-level directory of this distribution
// and at https://github.com/dalboris/vpaint/blob/master/COPYRIGHT
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef VAC_INBETWEENHALFEDGE_H
#define VAC_INBETWEENHALFEDGE_H
#include "HalfedgeBase.h"
#include "InbetweenEdge.h"
namespace VectorAnimationComplex
{
class InbetweenHalfedge: public HalfedgeBase<InbetweenEdge>
{
};
}
#endif // VAC_INBETWEENHALFEDGE_H
| {
"pile_set_name": "Github"
} |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="EventDeclaratorExpression.cs" company="https://github.com/StyleCop">
// MS-PL
// </copyright>
// <license>
// This source code is subject to terms and conditions of the Microsoft
// Public License. A copy of the license can be found in the License.html
// file at the root of this distribution. If you cannot locate the
// Microsoft Public License, please send an email to [email protected].
// By using this source code in any fashion, you are agreeing to be bound
// by the terms of the Microsoft Public License. You must not remove this
// notice, or any other, from this software.
// </license>
// <summary>
// A single event declarator within an event.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace StyleCop.CSharp
{
/// <summary>
/// A single event declarator within an event.
/// </summary>
/// <subcategory>expression</subcategory>
public sealed class EventDeclaratorExpression : Expression
{
#region Fields
/// <summary>
/// The identifier of the event.
/// </summary>
private readonly LiteralExpression identifier;
/// <summary>
/// The initialization expression for the event.
/// </summary>
private readonly Expression initializer;
#endregion
#region Constructors and Destructors
/// <summary>
/// Initializes a new instance of the EventDeclaratorExpression class.
/// </summary>
/// <param name="tokens">
/// The list of tokens that form the statement.
/// </param>
/// <param name="identifier">
/// The identifier name of the event.
/// </param>
/// <param name="initializer">
/// The initialization expression for the event.
/// </param>
internal EventDeclaratorExpression(CsTokenList tokens, LiteralExpression identifier, Expression initializer)
: base(ExpressionType.EventDeclarator, tokens)
{
Param.AssertNotNull(tokens, "tokens");
Param.AssertNotNull(identifier, "identifier");
Param.Ignore(initializer);
this.identifier = identifier;
this.initializer = initializer;
this.AddExpression(identifier);
if (initializer != null)
{
this.AddExpression(initializer);
}
}
#endregion
#region Public Properties
/// <summary>
/// Gets the identifier name of the event.
/// </summary>
public LiteralExpression Identifier
{
get
{
return this.identifier;
}
}
/// <summary>
/// Gets the initialization statement for the event.
/// </summary>
public Expression Initializer
{
get
{
return this.initializer;
}
}
/// <summary>
/// Gets the parent event.
/// </summary>
public Event ParentEvent { get; internal set; }
#endregion
}
} | {
"pile_set_name": "Github"
} |
%!PS-AdobeFont-1.0: Utopia-Italic 001.001
%%CreationDate: Wed Oct 2 18:58:18 1991
%%VMusage: 34122 41014
%% Utopia is a registered trademark of Adobe Systems Incorporated.
11 dict begin
/FontInfo 10 dict dup begin
/version (001.001) readonly def
/Notice (Copyright (c) 1989, 1991 Adobe Systems Incorporated. All Rights Reserved.Utopia is a registered trademark of Adobe Systems Incorporated.) readonly def
/FullName (Utopia Italic) readonly def
/FamilyName (Utopia) readonly def
/Weight (Regular) readonly def
/ItalicAngle -13 def
/isFixedPitch false def
/UnderlinePosition -100 def
/UnderlineThickness 50 def
end readonly def
/FontName /Utopia-Italic def
/Encoding StandardEncoding def
/PaintType 0 def
/FontType 1 def
/FontMatrix [0.001 0 0 0.001 0 0] readonly def
/UniqueID 36549 def
/FontBBox{-166 -250 1205 890}readonly def
currentdict end
currentfile eexec
a9994c496574b9cb23f8cbcd64a4a16861c70f0f2da82f0c7b06ceacd6521bb0
cc26f1cf47836cbab75757d7a81793f43e56cc8f22f926da04d715ab6ff2e257
5a135dabbaaa58f31f548cbe8a76c69e2402589b9e5e46e757f06bf2eddbbe6e
e48a624cbe1c4840a338e90f7efbe9f2194aee1c869bc4cd76e2f1937d78e207
d8149c05b50ef0bb361f5905977c40be7d4dad07b54e087896acda5aa70ab803
9dfc55a73134c7f1c9be9028d3ec6ccb0fbb8fda52bba4d7551a8124e68481d0
775ed7f8ec68d8073bfdd3b67f72ec68634ffc57727e16b9aba841546ae54d99
b60e227682315510edda09bd6ead4d1652f449d737592c44bb178689a3840169
53d899636686efc6f838b19f966be5f833f5e7d41af38a899df96fdce1ebc116
9b0ec87c930d9fdcbab7e74880e9693a24de9c67dbb0dd75b3dbd113b079c490
60018433ccc06ac1df33fc090c4642fd5225fa0a188c131974ca8820319704dd
14b1719b958779d1475d92e712322eb2e6a79d652c4e3f5833aca3091675fef3
fde103446e565428267b009d87bb7d6bc40f46b498b19bf1223aba33079e41ba
a9561254a9df97a48d71015b6a24cf952539f664f172d565f39a55e063154d80
a960cd56c34011fe26e8849c6677427b2c7e728cd16272363d76661b3ab8bb2c
85c20b747519a4431421d4c83783feb896c45688ff7f824381cff5654a1bfc68
6f4894e1d265455fc00064d08c37dc3a47b59a5b2d14dd893a2b871492b07c4a
591695c8df012ef46a750d6cb7fd86696e7ead280648bd737e9ea490140f3b1b
1dffab2ff8e085a8b1c78e4b9ae9cee277a29233cec5ab2588d1cdaf4bbb6512
e1b1476c68fd3db18ecc2536968ebebf4333c2c3709ca354f21fffc38c3858c0
561378a701158ed0201a3b236ca9cb7f7528d1e5e07a9bfab04f8d0fa3d12405
5ba2fa9dff76f41b980bfb36e8db1300f7172c902dcb9f5f0bba3cb9c212bf31
76cb802ddebe556f4610021fa3700d476a2be2a66ae6c658fd0a95bc1b9629ad
03fe4e775ffdfa9f87a274fd904c0db5beae258f9af213f257db2be0265b964a
681fc4d2855f5356fc87576d847db274fed9ca5bb4961ffcf53f28b673f50e31
c3c634604e70fd0895af711c9fa302572eede069f65502f68884a90d9f1ed7f1
cdd95fc73ca6ae925cce4e01e49934539739f082f0ff1398deafda84c3d9b1e0
cafdefe5bf321670bfa286078bc066bb1895b1a3f0253abc079b753c8f020fae
ceb70acca52c4fd011825ba3ab32ce9c7ec263b4828d979b57588c48de210085
e56eeb002dcecc967c95f219728511ccc6c51817125e1afdd3e4a9b35c8e5551
06a026900808425393fa10ea4b6f4513f0af047d326d80462d36c2c7d033125c
ab05bd0d3058b69f11a1d16f23345b3c97e444ffcfb4b3496743d9f6abca53e1
b702a359cedda4d78e1a771989290f86ceee70502c574d6582a64045acbc33ed
436871e9beaaefb12174806003037e8616b5f12ff470cc6e08b0af6789b9614e
6ce98441b72e79b0c613e4a01835cb4d61c9cdecde9a66184af5876f0c6bea8d
8280472a4c6a2f38fa95b7c475ce75f8acd6850e8e82dcbbbb96e7b389612cfb
082c280fc4babf5da29e26f1f163d5ce353c9eb61122a10634ca3224261089f5
e999b1d591d92216afe16a409f7735043885cd0218b4bb7289fb4547fdca93a1
30280807d3c5fe5abf6d6ce88af04beb42ee031be89efad865f05daadc0ec4f7
871c1a8d78e51a453a8c4b0ea780268639009c2ed6741fef8c598248ea9a2c5d
f3ab0401a05b0fc35f66bcd6c812087ac03be6cee22ff97df7152681279de3bc
be0a5c3574240a561f667c87a9fd065b788c9bf4e38dce8abc94f29acfef1933
6568527336b48570be215a8ed6c54642ff28aa5edebe0228873c55a65430ff76
8502e836e97ae8f2f6ac4dc5eee76beba71cb16389399a34535296a0175974a8
c44918f7c668fcb07a72c0771fa8c49ab7d238611d8c2eb17a2a6f7bdce45b45
2616dd51ec465225f83e886237b273488f4ec220d1837272df8a504273fdd95d
ad88d197a3a6e92519eef80d7c18b31df15b9530d96f9982c1f7f870a95272c1
325b394ec41c9e7fd2f13ce4adda1cc0cca0ad9b068e4d1c92e17d1ebbd4e365
f7f4a9c547a81470f3ea6386dae456937b302a2e6f9c4553a39ea280895ad232
57b00bf0b1c9c38746dd5438067ea5ba365b1a4509a29bf938f3c028cd2d0dbe
441c1e881135b7abd353f1b14f225001812697df59ae92b11a92eb68b41a6e62
637f097d4b079e266a81e6bc30a69d5741ebc518a6bab96ee0f79b9cdebb1d35
04f88ac6a46db5e7be43a3c290399f00e2f78da44d348c66d7d12ce239b30dc0
8f916c875e3e54cef7f5f1b966f606d8fa3e072d4a417b3cbd43bd3b644b2928
626f29f4d369502b63ff43c7be25193bf11216db04ddc2f39faf825a559576be
427bdb147daa9d9dc8a9839daba32fbfbc8d9fe41bae9d7b15d8106ae01e2cf0
d59c6f8c2cf498fe7518fa7b84f38c6f4be7cfcfd0e75908650f52a107ab7d7d
2bbb6361d5378232b80edb45dfc91750eadad74c9b42a19339d0227d83caaba1
72a43d3e5c64129284a50ecbc669f29a4d4caf634369d069118fd7a3fe12bfdf
e202f3376727ae5f1a9340d824dd8b76a517da44eacd882063c8243e121769d2
809ff04b1e23d34d9ce5f496c7f1926cf1b0930466df9076dcd5b82c97fe1b45
d1579437aa0820561298a96e22c94befa09929f74c067706594eb37375c136e1
a9491f80c7a0a0fd447f078898778428309ad92d89cae7f48727d8912deba5f0
d7b49674111a6033b67c19bb92129b7127d4ce80bce642b1880628df772a87ee
5b48f395b55aee658c50ab2c81ab7de66eaa52382fadcc60957581a4e09ee989
331ca245586217a7e6b0f746455c59552906d8a525f5ba5a07cc1df0c45b6ed0
95230142ee05ec75e12531f65374f78173615c913532a330659b9beaa653eaed
c0895b65d1a44304e09765cc9e070f61820249b6236194d952103f17799b68e0
8b447986aea15ecfb780f7717986537516a6ae928648b16a51895f33adc17310
f0813e7c715368bce559bab6c2292b42198f7f28a2ef66661f70aaad4e5ff132
07de7de9f1691d5d1235d1e77a71403c2ce61c8f8960c262ea5fdf4e93fe87e3
b4d7a400080f90a71a8805257c24318563575bedff7ecf2ee53a6ca8a24f8ac6
e26c70b028f87a1806e63784542067b3d9deb6c96122c644e4052624a9f5e2fd
c04d4c1d02d39b438097040027c0b7131dd5febaa0f09b767cce35c4e4447387
567dc3887cbe792114a9a0bbcd01c3aeeb167b5ea4e08f2e04eeb67783669fc5
16e91b1fd880efd3b1e9c1553a2900b5f1ef5f7b6ac7d3574456eb2fb6494511
cdf324639998efce019716334e92407673b4ce1d19dd9ff89d301a7954e95b4b
11e33d64682ee3a8e215ecbb482532e9f0dfa9859b014711a1789d97b8aa0809
a1338f2de4e58a9c3d727b6efb754806c2cb3698d01ffda6ff4baf277aa570d6
524ddf0898a02c508bc6684ae45ced430993828c8193cdf0091b2dfd7d811424
364490b8e607aa52cf695a4179703ca2ea9d0b595770729228f912fd9228360b
359e298a8845c00bb1fb2aca1abdfc1c570b044bcded5e35986c484f0c84cf37
6fcccd6374804a3d380b48a6b50506b0f52c51dfa6d49c83c9a2a8bc366137cf
3840355310b16356606f84b5da7783d2f6d6f46284f63f1ca4bd7672e114eb49
9a2d80bf132de70d992d326f70a272c64f6264b68aa7722e7a0e400eee21e634
8fc2524eed6e4df4ae7da2667dca0cc4d79eb78dc7db496a7f54564931434aba
a06e085a1d1ecd916fca69e8cfbda338b6f873c90f3440ff8c442646dbf337dc
9631f88f60d4efe91d35df767fa8a0e3c1db3d0053cb419e78453c595d6cb05f
a4205d1b3334183c34b5bc80a898ed9ab1cc015c65189fd0efebe5aea9c71196
ae040a0598a301c4aeeef4864e4789b1124dfe3f8e1aaf66a3a510705f6a21a0
cf0de7e2f9a2b89179cdd4b009b33935b87765f3bab41ab667e39b74c3774edb
443c772d06d80f7248bb6366d64b1a408044b55f61f641793436d23b35fd55db
3a8f7d0e99953a6e74338c1d20083b579e534e887994c2080731ef4f9c6cba7d
5ed6975b583022715232c005033ffb24995ccdffcb1f8655de53044fc3a46950
694b82f991cdacaef5879fbbbc1ba7de04cab076fdc76b7e75aeb177c61143f2
4ea610269642f4b1c765063418ebdb6e0fd220f6005ce8223f956d6755b7d145
e734a947ffebea2904bad11b0fe43beb9baaa38108e475ff79e8ac33bc9d7138
3feb6e6e7083fed7a31f178ba5b930f47f4c197fe5217f9ae5b64c3ee13ce1ea
5db10538f5e54aff7b81aaeee741713bd60bffc2c8d3f7361ceacfdcca39882c
6b0e2217bfe1990908c1c79c59939a2344174f73d069d0bf1279aa30f722691d
faedace1bdbe056026c8c7970def57b68fb8fc30e7a86d461a2210998c103c6b
835238192c02ceb0144f2e035fc89f160fb9f14f3a6772b5dcf908244629ef82
30fd14e830fec7280100b5eda94d83208463e7358028f83fe2605e7dedb85355
50384276f3b72f5895a6bd4170abd2bf25d6a6d8c165b2ac5620a3417927744c
2a9798576343f472e6edd840b3082a22cc0efef8f53787167273e0cf926ebee7
868803e55a266e761dc6ab8f7722efc301fa93ee781e081f4f58e6b6ebad140f
ab33411eca49b23928a425e4348cf705a1e0f4fdd320025f3c315cf0da1170fe
1a13ae53d45b712a3370f2e68051c1381311cd34da1096e7cc8d2c8e018bff87
45621d8452fd40edaadb9e5d4f89d92b51f8ed170dbea911ecd14ac32be75dce
dac5e26c212d389c94cd67126b98cdfab76600fdd7f36fb51813987daa38bab0
974d2ff8e114b529a87d187e5d6e782e8f4f5e01342a5d8c95131a799149e496
9549a3cd70a85c5e69894ea8b544c3df17e4f25d704b96e1ab28aa76babe5b74
e39b799c88ba47826dd52f55d7840a32481524a5067c247389bd392e80318ae6
cca790fbfd0d7c6ed9ed7523b80390e2a67d5e11d03cede53ca91580a4f90e83
17fe325ce9f63b99ba9874c84cf44fd1b87d593bea28b5b674a643753758f9fd
27aa8213e088f885272f0cdb0792cf315ba0b03ede938756d26a025ce9f682fe
2c6cfd369314eecb40063f24f365edbeee61d41370bbec9b08665e43857ec274
6516dc151c4fb114da529efafe44ba73a2675d1d19b1dd9e696c380379df7682
42de54a93033c757441b7fdfa404678109b0034c4b4777ff05f4badda38c5b88
d6c1ce1d281521ed0077f1bafd0426e63b522c3ef1eeeec9df67f3bae4bc48e5
15ba02efc3be471c0ddb42372553ebcb4863d6625245dbb6c0eebb15efdf7e37
27335a4e3ad99e4f70e57e218e749f30bf9c9a20f172c40f0532dd17d93eaff0
911ffbee62309b59cc549faf8048f56afbd6936cf686514b16bca3113d17671e
15731aa12b2ea9c46f887180c3885878a4b59f965f15e1af4cfa20f1d9fd84bf
7d21d8488356d638867f2ce794c2d1b836bd471bd561daf63e1b427ea3e3c371
1acd9f55e8fd1d53c279f5c93a2da8a8aa80857f1312d2e9e418ae4440a7c4ad
6216861f66844f4ff36f5ccf37e96f76706f8dcf7d2a8b6e76a8bdd30d66abe8
6846fa3b8d5e8c9b516457aec4036b402bffd6913d24cce8714449de2ba50848
6d3f3fcfc5f81e9f85c99e20cd7551f1f0b63a04b5d8d9734fbbcaf55b44897d
621b90bb0f3039ef877c04ea5d7c747cdbc9c25b98df15c1ae3946453ba2b8ea
733dddb19f5a1f3ddcbd8d0c6bfa2e4b914249156d26e0e0bf16a088a9eaa107
60fcbf45dd8e9d8bd7a837c3e4f840e1d747ec1bd31a82172152819011c53c1c
7f2ddeecfa67ae102bcd5b12532bf43d6e1b515b27918c89c32d4df49c3afb6e
3b719f37cf1cc9bd04c45129bde8b221f88e10cef81a75e890543f0a65afb387
7df9dffbbc8a0bf424628a26a48448e17f180c4dca4c25daab5f4b5fecba8ec6
24d80359767b2bb4dcc58163295031ff07ffe7b73bd22928f63c62c2e556c419
c3aa968ba13b7bb559197339835490f2c15d7617578787ceb25e01cf3c7e031b
607a2397a682db4b4ae408247d00136abd2e631c85653f45c833948b48aac4b9
ce421dfc528d49cfe98ad6f5bf332ea672288b85520b13efdd67bd326a10e118
6b9e7026afe8f70aa21888e25b6e28ceea8dbca35e168f23aeb233a18d291a34
05cf105b4bb6966acf3a2ea3395cf406dc6271a358c3cf51ee3f63949e738c69
3a2622ee7dd92ed867a3c279aa260499e75ce46503f33c62a85404c6715ee35b
861357f1e5ad91e1779f474165b3c61b3f5cadb6e38af59fec5eb9fb81773bf9
83e923c4a9d7aac4c64a4490d575030e5d1d2c788de808e626968374fddb4977
954430d7513b84f74d7c16755027fe46e5b9ec95cc3913dcc28f8963e90cb00e
ed7d1af4c8c05c52f4f799aa42dff81ce677d750aeed905ddce7ed886f1e7ca7
ad587245d7c74f4b9272860b36a76701b184705e7e0225ab64ccdf3aa20f102c
82ba6d86254808c33fd7205f340622360844e19a937645702c923faa2dbd1218
ee5a8eafe6e66913d3a895ba65dd825c7dc1b579b0b8f6d4bd47de45e5f922a8
25b20041f7b8a1a55281f0fc44b149f0e346e0f3e1e8fdf5634eed22537df9eb
40d8c8bd63d844ba24798ca2060786679aaaf1753f2350f12c3a984628362db7
f4df3e4ff03f14be6ca123bf8d4bf43817df72c92dcbd6234c0fee3b8bae81b8
5d7495a6acdff5033bbc102016c267ba56013a739c9ac62dfe9fdafe427d22a7
1fa02c29744c713dce48b9e52be12797084cc12dc805dda41fd60578a10a9f2f
0a51b5efa3feb126888d0430649e858dcb9c59a17872602334edce0a92336f99
250406dedea2e1b1d3f8031b2eca7ae2e96973b852c90b59878faf06286e8dba
f7b1ff75a5454235035d1d68158585cb5f08aa68b4c5971ff0499f0a3f491d41
8e2ed07b0cdfba8001740ae437674704ab6378f650d84b6b562bf346d5b3a338
e21fbe69d330a12d69703a6625fc77e4eeed50c3eed7db53553e1183258cb374
25c209961144262d9145735df60c1e3cee6c21ea9667d328aa6424dc1d0d4c31
bdfb91c001b3b9c30e6343bc3e6af5148ef219e48e9c49e2d9f8818b86e13ec1
ef1ba0e04c9a93b509972038925babe2964cc034d5947c9f8232fa9a40999fbc
b26adb0ad1a69e25c896c97307ff214ff08b76427f982d0eb9eedd8cf2126219
9758b2b07735c086e9a6dd43b4094d320379b3ef0c20bb9fcb691a6fb40b9c25
9612905890f8fbbe837b1edac168764ae7721a97d9e0ef67424d2fd6166951f8
00097747f5db43cd16c8f0a5e2041366bbfec05cf950876f7e4462c4231f83f4
3f14e61553b99ebfa7ad033f51ecf10ad964d71c18b90e360e208ee79f86b722
d6f062498e4294ec0991b318d18270374de5212418f54a5a1700a19b23a31dc7
b1c4bf77bb4c4fb26d219d87c7c0ac10a88f65bf5cbb122cc250035499ebd3be
7b839f311cb67349e68cacdefcaf9970b1e22a120ba90bdec02374a7fb96b232
08619b8a9cb59e86fffb65f1f0cb64579de0cc5b4a1d428c0c4f4b46ce3ea0c0
3a4c815b2d210e3ee18f1c5cef74ec07c3415f2dee72c7511cb479e78d3a0d96
aa7f15360ae7455210f4528a18d74863516b1c688ce1e539306e260fcc203e9b
5af0c06c3c04fade53ed1009672d75ddbf3358396868c2225e9460590590a37a
d9b361c5872562a91042a635d8531a62088d1341f9519903f98e339a196c4e2c
8e6ed843cd24fdbeb101f09bc4516a7cfc4554320a9318f10448fb9d10c45939
050a55114e979f4cd9473af6254311e3f50d619d9f304df427ad53ffddfad0c2
f849b4ea37eaf66005afa5269d435d1b3362553043e98eb631d8b4e820e9571f
88ac251d767e3e92666f3c0aad0f5a5901dd5f0a72a44ca5f957b40bc29541ef
5c28f10e36b8102e5038c96ca8a7cac9698d6f8fcbb82bc413d364e3bedde064
a77e2347719c840ee53489d836646e8fd4ac6019d00a045157bf223f32f0e68d
379965445dc50bb7b2c223f2aa12fffb8858197093319a10d3203038f4e16c04
c62294d51a206da6ce0ba74e5b204656bc172c22cc2404f882e91ad157bf4dcb
dd619aeb837e0bf53c4328e57e5be92943a9dc6f2c5664255d7872c976386c42
5235e3ee2671789c3c32b7d5e7fee0262a43cde4d3bb98d2aab133429ae002f3
b71c2b3688d9297cef802a2330b12b1e03989fc8b9d1dbc91a7bdaba94703525
4efd991952a2be03e1147e9b5ca770638a5778a7a141a705ae4ce474000033f9
e1ba21d948c4b016162cfff6bca97a7f8f81cc2c22f8011b0cb74e4c7c92ac80
c7c53ee9c0520d2c7a5dc97a78fe15cee47994362c65210336de52f9841b8d09
4be1aaa04e39d7f66d0c4d01eaf270e26eb38e09c8b303816c34ffb8cfda9f41
c03d95d95688a154c087930ede2dfa63ed8f228e3917dcc8722d1f8636305073
31d43ea9b53230b0d0797013e7604b528215f1a128cdbb26b0cc3fa43e3fe25c
b57c9bf8f8e290586e27924cd20025c54d6d8792b4fa52450684c8d0e131aa49
2ab0a4fec2d0f91f4b81d74153b5e2e9bcca216c1b49bcff347c57a727202068
0e040eb5d5e2479209d7e085760a7604544f0e1b541bbc73431ebd722806cc24
505dbef65e4ca3457d0587c4d4fccaf7f14c31b5cf62ae5f0c08661c9f5cb386
ec057c82dedeedd54c8f380320831f2d6d9094a5d0075a0c5c203be292231353
3aab44061f0c75483c8c90ddfe2de61eebaa5c23f3c1e14bead51e741963e5c9
393e8c906adce443244f5fd6ad7ab164ee9a217e609fcf90e5b8157d27623d62
a803207d9839752a19da399e82082ded26d15088bc3623923fe5d5bcdc2bec1c
639e190dcf9181e90ddd7a07c773e6a0511f1e268acf8d1a0d985abab4c95363
c88093bb9f111cd66e7b7d8cd8eca3407fdd45d01d566944abbfb99bbcd5602b
dc8e250247dd7a37f4389e94c1342b5e6ccd99ac4d62595b91b15462dbad20b6
5bf5fd83a8d8864cc9e75f018caba8d7feb8dbe58766dc0a06405ba89e39a6dc
a302161f88b641a7427aa926d438cff5b5e463309a6cd2abfea827a512d03128
ec1f323c8a92929a4a6f63c798fe0b73fe59ebcbb1e4614c2d3ea0ce81cf5cc8
8a6d82334df23cc978d9a97930cee6f567067f7c3877cd46b672d116805b1543
4a019e47fea549f9942732bd1537c24fdc2f1b0780f7c9b3ef7e846906abd277
82e21a4a8456cd0a880174efbec65e6eb267a94a9e0bb6bdb6c1c4998d7523ff
eca5ed22cd608abad4f8570d80ff8cb65bca81b09536ae22a0925ec620cfb875
70ac3a8e19abf258af93113ffc08aa07f0ef6d751440d874b3631f8a4bbd7721
9a796911b9dbc0b2e1fc5768d40cb8dbcd38ee9f970c35bbef52deec4582d18e
17a184203a891ef39fc8d56c29ed3b9010bf9092c8afc63f072c6ae93eeb9342
6647475cb38c7a634dae53b14995468e4e39464ad9829b48c97e78c580ac32ae
b2f5631d37bda7f69936d4cfe342279eddc7ce5262969c87e74ec6c4728c82bb
eff94703031bb14d7743d97690d7a2ff863904ebcd94d1a2fd7270343f38355b
ad318250da6b388c644a9f62bf59482d7a7161f5d3b65ad5b41005bcced2c16b
c60f81fb3b013544a32c81c2f455f34cdc12e195fb29631b613d0db8fe3b4957
a49b3e0382b3ca954a0c135e19cb83b7e29c7904b5852641bb08cee1bdba5a09
f1f03521a866d99a6b5e670351b73cfb453263adf92a8677de8067ff52ee5a63
e9ba822e8a11dcbbfa745caf433ef673fbd0e6c7d28f9d3260e2b2d75b888181
3662efea0548a4f4c2ea1031376182e12b36987f2afbdf205af3229b7e87b20b
6b334fe8615d13b3c640d25b5c822a2270111d964e4cff01f36d2df200f41872
29389c1482c576422c158fbf2405e3dc0d87d76237f6666e6e89c46b83289a9a
718686fe767577688e8524474e522ba5fe3f6222ae659a12855b295d408b7da0
042c02087ffab03b422ba9b4bac8c39d08605624026e355fe62328d4df8c5d9a
63e3118bdd69a142412d43f20d23d990ccb50ef669a76a9fc3924a64fbd9bb39
fda7015bd5b56d3402151be6c4240dd647138bce226f7ebb9b723b3b3e6b3b27
4da0d1245dfe453bd9ea5fcd36eed1c09e800b20eb206e3f1782d70c67a82d88
10451c2cdf052ef603a41d50b90dfa15a5e967c6ae7b505b1136fabc9898c004
5c1043754b8bd712703233855db0895697dbfcec02ef25092d073f37f8f86fd5
e647c80b0af65453bdd645f92209412ffdf14cb48b7daa02781bc981d89e075b
39c7e8251d2d384835691f495b3d5a43557f1dff58b935246c2c0b715b40d4b7
ed7386bcdf5be2beecdbb223dbf57c9a06864f399d4748c11b89760579f1e420
a4e8c67241a8348ef3bd04634b9e69d5dc5015f89b10f7580a008e5dd424ffed
60f14a39a759a0c05e4bc3b1e344feaf613be4e1007f5535a7850832760b84d7
3391a60001d472449b3e4920f46186d0fb8c8a1b96165e2f463e39e289274985
e4062504ce4ce8b24764d050c371e6545c3aaad645242e39876fa2d8c8fa931f
73d56e5206d47649cbeae14ef3187d43f6a3bf48bad527f6c01c8b1287edefa1
1016690d1f50d310d463f9636d6f8514a60337fce30e6d72ddd2f381bf2bbecd
d416f7ad7338fd928296aeb8c58c33e08737974840b690f1e8d6c46928fddf4d
65c07eb4cde5f6b5dedab807a67ee995fdef7e7fcb285ea981e0877da1b8bc42
fc201fe269c44a48be11a5ee8a365b50760c477e51005db9fde781c080f8ab14
44181e4a0bc80741530370fc987e7eed25cd1b2de3bd5c53e0cffd67f24f9e2a
82fc866abac0d9e690a0075cb623a625ec6154da7bc0c36431f20fe0a8df2fe0
50cdb6c3d767b5f640833e7ff56016665ab4d42f28853f5bc2726de5a7372ed4
8666c51df290f796fb129d441bff05f661a123dad14e5b8bd3839f0de22e44ad
77cc296603290f731b23a695b45f5b930a8d6d3c6783466226fa0a20ff0ac7c9
f63eea109a17da0ddf5e1356e23c37528776ebda66b0a5ab37b7aaac291eae01
aadf7c555a4fb77722c0a527b5a711188e28944494ac58f58c6dab6df90971f9
9ed636c4eba0dbcad659aafc776c3efff28a7d28da972a8bc5d8eeef3b7bdcbb
8de66ae1d2fa6ec3eb33c37e632118bbc8dda3bfa80da3b7a2770908fc4575ff
de9b1db2fa31637a13f8b97e73a55f7a975fc836eead6dad2453aa31d00447d5
6350145197d430f8e6af1246a7d605e7df7a5ff4fb58e8241469481bf48b8436
31acf36fd14a7de24d3a90d681246d5bd65d31dcb7795f2c9f6dd3f04ad330ab
37636fb9da5f27693b7613cb6268d36cbe13f628cbbd5257f2644b9be91b69f7
2403f914beb707992fcc653b96fe7b23eacc1f46c325bf02427894a28ea28e74
482a2585f57f4cca2a49fc473b73b2cc47ebe014e78f9dd868dd4c63923f660a
0d465c1053e7a6dd9138ddead25cdfe5f3a211d6f06e2c19453b405173a814a7
5017f34fb8504208864db35ea2406c19d989e65e5417105ddd09fe62ddce224a
daffb81a19999b0459da464d3d10492b62c12d085f4381fb91b597f218b7fc1e
c0418a9292c6d2fb2cca2786b1b9a5cbd5edef092b5a3afa05d09e6e59a70f02
1346ef208122d8c8f6bd2da33609aee5917f0c92fbc0941dff2cb3df9feef235
4efa7155231d3403ce8211ca2786915280811d9dc9275456e491331899265c69
8b3698810effb9e6e6fc2f5b1daa88e0740f8195cc8de64711be512f43128d28
58f130156cc2bf774075a2b0bd3a0fec0729494003d58fdde93a2fddb529803c
76834d20a2f0654a56d1cb58ec344aeac5bb976c694fbb328a3cf9c355a5ccfa
3de62645b792d17c86e64a14465c5d29686a03037abb8eddebf2c55c94e4d984
713982f983d942b390674695a46485ee6eac26cf864348762b018ce478fcf5e1
c434b8a26187f6707c6963a933c1e7e5addb9dca4d5e9f5ea43a40c43d068b87
f3d8bd0324f2d10f52d92da826ad20917b039076bafd236881654a5abbae112c
7fcb2bd027c96d69fa1224a8108d1b7caa4d4b03f813d461157a7b5956be1296
d929e3e3e5249cded2fe79950e619b47b99cfcf0ce26e6c5135ef0d5a05662d9
24d57e0be99056aea4094b3894b644585ecc7a6f016df96ce3fc1ae75635ec2a
2761bf9402fa697a76e326afb8d7ae5f7d375d260ce60143a38913a4e085381f
ae19ced0d207c80cfd0da23b999fa947408f31c00ed5046f48187ffa4a9dc8ee
1e410719a9e1fa29f15ab776e8013158bef0615e6960273646af89da0fa935c1
8a7a1b67df2b0fd72b42cf02c5e916b0761e89550303ff45560bcb350b7e9ac8
0422c68d1e34211212999f5a99800c7b103afdd7c8c5e32ce86d906031418297
415ecd921c44e3d65759bfbecdc688167896374b5cd91a64cf08e3c224b9f22b
06d26bd1bb52679fb75e2489027a7cf868ce7fec588def5493f6570d2340a2d7
61a6272402e9d29d02546929d02ec741a4f54c94a25ac1dc87b8f2087e4d9275
32d6afc9597fa33166e593112001404b15e1691f002afaa8a4fe0fd803adaab4
ba64ffbefea2acdb3059dd040b2305997fe7a6fd78db6e47e385af2cfa834af9
3cff1bd3773d6b6d2f303ef4950825d6c3768532ba44a111af37f8b632cca18e
7325171953b66ec7ad79eff3b69219cb31201f5bdd8da70a8bac1df11ca37519
5658f07151fdecba4c1e14c375de60affcee4998a0a377587f54271bedd8b226
ab3c9b3638b4e1f4b633120aef8e4a2989e2925cc84c7281674e7089b71bcd8f
da8d1a26cc2181ba716d80be532578d6798584c364f9220f1725670c78007454
0ece6487869cecd8e66f8adfefcf23cfc5fb8f1ce23a5e1e7159493a78742f05
660bb55526f65f17eb255e84cf0a5c0b6b98f1d54d7fd00c1f8236c60c596f6c
28c7adc584b38671839b5847e37a7017f4f7857e31c3108cac29640fe9cc2ddc
9165273404769a556bd04c3bf5f9d76627603543947cf885de71f44c65760076
a889b5b2d3511219936476ffd38e2d5e86fdeafaa17dd36436ea3dd6914e812f
b1d5d0333a7ebbcf6e908d334ec8dd335357546e241e6f3064251323b2fd746e
54d67d5e2d5b425cbdf89105b0c19ab4cea931a557f2d7d488009fa8685a1c78
57f58bf6f62a8c71eae1b8944d78df9897dde68b628df2706d7aadbeb2d4a4b7
561810d4f7e1988262f7460684575b38f13c4680c3dc254c7ad5bf5bf200950d
857556ee412c33b42820f740ad368a8392ad0605f16692e2c06b78ed542729e6
d399a052823686f83e8c3f0e0befd2e7405c52e1d083c13e9283b50b4a834249
6214238f65e52491566c183d5ff36711e6a99eb49f5518efdae0743467dcc32e
4d4c92ff8de5f8c649152d48106228117f5c160fc8bd2c69b1dd365337722742
2c59cd0589bf70287b487799df5e698698f4ff13bf4618c63c2b0e000e57aad3
dae45bdae9bbb459fd55584839dd185f48a75687a8192f3a63d2280648e9322b
02b9625144e67c6080f0eebb7c741968736caaafc151c4a9a1c1395f31b61ca9
004eb39a789a9ffc247a249991106df6e1c52e40e4533f0fb6087a59080bf55b
f197ae35804870900129d9a941004273ca0065c42ecec8cb517609d01405ef3d
b500f09d410e230f680e12e5cd607cc0d1edc63e1845fd1bf3d9ba07e1b7a5c2
eb0accd178e1c0178ef55eceba43a30599a0477fa4f8ca397b69ae64f6b6310d
9fdb875a00545daa9f0dfc0ff9fa355a9ad4b08d08ca7bece29a5d3a325c1463
8f38dfe12ccd5c942a5aca6414a7b04c698c9adae4d8d43732e41dc0f04da349
8bdf6b9f3a58296957d88f217d5c46f45c7c3a433ff9c355a5ccfa3de6553bae
ddc411e40b9f5e95cd6e9f62dae79353c5e1b874f6677bc593d2a2a07b1c3b20
9596abb07ee8f71212674a53b3a604f5af5449dbcef830433acdcf845e44d879
fc619fe992d5f6ea9b0cddfc24fe223e7a357cb0d5771cbd12bf50851f400b49
9e55a3efcdae3b6d6cfb72ca9e96f9355234ea284bf70abc81d29bfc928dac80
5eaca5ca15e3b8303bd3710a1eba32e3c13ef7641c7637afb5cef5dc59902e6b
aff4c7ce9fae89977d96691d306dfb9a54bd8b9cb8484ae4b50f7e2ae5ade3a2
9076c21e036a78c3d78918c529b3eafbcffa7c842dc917f162fe68179bb45b4a
75ccb684f7c4b5a6d5db3cc876537e4b643e447dc64bb76d9e74a3d378517880
9896262b00893cd7474bb605994df0ee028f4a703b4ac00ec95dc3ccb3342835
418a5a158b4e6072a657a7e3d2728c0f64242be6fbcdf27d5093139fd7bc95d9
89320f2ac958c06cec8fdcf56596bbb45b18abf2c6b5b891236063441ba7331d
613c0f1de49f50a839bceeee1f2ea065fa879f50918c6a3280bd7bbf1e366e27
4d5e9e0a6bda5a8768f72b811252f40704a1190b5887bc5c586b1704ff1f7d4b
a8ea68ab335a506771a681f80dde3b8aa04ee1bd6cf881217b829942d0282c3b
e262314736557d8a43e8686f3280d4c9aeb9fd78262834ea5569a233d459abf4
5e50d17d666d3e9caefe4f8d74710aaf2497a405d73fca9e107a922d0a9eea59
6de7f71ef541a0a5b141f786dfdd1861f81651f540f82505b3fe83aa32067db1
13812fa761a7f3b28342bf1ef531746b033232fbd06a12a0b80a9e23571fc1d2
184a850fcceb331ff7076d3a03e8ed50991837925efc91e1ca33491b36cb1aff
2c1c58338fccc4a9f0e298e781afb718990cb9eb141c6ebbebc79e437b52a17b
0b4a8cfd0da2f9401ce9370a1ef04986793a67fea0fc96d8eb325cf73ae51c14
c5030d9a8340b8b33986ccc325f93903c0928af3fb3766c8428fa690cda94c97
939fd8df41abc0652e0ca8e82e048b133aba600278e3a227c4eaf8b557856ff9
e6d2fb6671ba75b779e90f2bee7e349ec394ac76e16c73509b5e99d44fa10636
ac63f8d546d5eba3c6bca7b738d4338134d2b79dd8436932d6ee22e2183a5c07
a72f4742edfc2ed11145fcf8a9bb54313894709b28d214fffcc4b917e0c389b2
590ef307e3ebbf0e42570de893328dcfe81e7edebf72b88ff1a557e2f038ef1f
8c25c68dd7c037226f7dbab36cf73ee00b92ba6efdbf9a9e0f4a7a91853394b2
a02077209350a2b61d4771f338e739c5382c07e18386e398b2946ade4c3f0a44
ff6f333d550ea7db004654aad32542571febf3c31271cc5d167976c6407c7570
115bfb3f838701b58b320496c39c59553b33fdd7fcf135bf4d849b130fa58739
6a983a93344bfd37afdb01b18232af9f65d5e3f5131bdc40063d7916a2b67ade
09e2fed7968e96c6ffdb6b41b0a7bb6b8e93c2ac4f1b466f16e219321c9ad83a
8027101b3d007ed0e43594efafac9dc0e4764477f6328ef0936444bbb4fd20e4
2ae35cc3090ea9ea8ffc5a85236a4597f661568964f6a839650470f79eac69ab
e1794cc369c4183f66715746b9abff4c151ab6f06be72ae63f41f5f806b3fa05
eee4f22990464b7e4950f5654b36b7e3158a7b4ecd4fc7d5600c831ea7521df8
e61c5f5a3942eeb82e06477461ddbbbc9551ed095d78309f0dc9465f88e70fdd
7cf599e493318efbb07aef45a81f2d36111838a736b422891c562c933005dc2f
99a44557c978e4d9da41d3e2c3820e4a40fe28a8c268cb593960058f0fec18bd
c23102a74807835b1244daf8bb9a7553961977ea5a7d0e8a59f58b868c9daf82
5c4cca4d84f4f37484b6bf8c1da54c5e089d3cb47377a99eba19e3e81a13d2f0
59c22952aad0fa4f3a4d82478a06a1913b8f2dd141e776fdd43d76fc9d28fb93
d198f84310f271baaae7e44752a4f8fd1ab089d3555f9d7083999bdedafca730
50598a2b85541f071716d335c793b55b007392a1f757469aae1436de65d00f07
66e196c47010731b77aa457876cb8994b97e20991af4e7b47bdf752f59c7e6c1
27f3043e2b06dead3d6fd024aeb2413d1dc2ac897aec8c2a97c848d09b102982
06f2dc08f0f0ea755c68a0aa8062521d34f16beaf87d359db9cc79faabc3d256
3cae31022f3d4ae7dbda58194866a7a29dd34f17a4d9b7f7987e01a6db74bf0c
5e0c53bd07af310a2756f22c1cb9bf2a0e1a57aba6753d037887c4b65a661f70
b2d10fad50c5ffd365d3778295d880f498ff4950257e5ce855161796989611b3
e793c99f4e12e947b51643fa80b129c6664b283c100b5694d2d8bf60a3213157
a17b72080842b4ee055d0860a801d930d63e5354f599854b5a142f11f6b78717
8897d66f3f6a3ff20029ed1870d538076c1d4fa4ece214b0075ada5cbd5f4087
8975b21a79d8a67be6d73f8777ba26c18ae3b8613387b25f43beda7a2c9f6692
667015b80847d0759ccf2ef45aeb00a02d63f630dc6823d5f61e91b410fcbba3
677470b440063d43bf734b16ab45a42b698037ca3cc5b460b563467d9b6f9453
d31b47822f6fc7fa1604e77d00ab1f172bf8f4cd9bde3635703a4166293494b4
37f032570a165604cf5b8b153da54077095196e55252dfccd90a8ebc09bb7f45
ada3560fd014a844b2d9bae654a533733ca8ae359bd7537c60b52997e10ef2e6
f9a2210dc484431af83a956af66f06646d918d2832ecd425ff9cd8e9268557c9
efc84868728aff4d8174e6f85135c11a6968b13f7d8346334dba97a1a3f0990c
d1d2bac3aa8a5c294e4d0d67e893a609189383a24cd3bf86c22720d320d59a6b
5e543b799bb5d45fa9826b832a809026d104293f363200780278d973c9c63492
8576754d19d28c0e4058684cd1a71a919e5814346bd40fc15eeca1591e0a709f
f083b86841f04cdf8aadaedd95f26fb807bd09886a011443d410466fc6bbc56f
95468646ababaf39887f25e485776892c89a6cb7bf5d20b6c4e8a0bd65c5de90
7a988a3e2e39aa721dd03ca69608d59cc1e8a1d2ea706b0426622e4dc6c94013
3f979e5d141f06fd2051a54d84fdd4c7170a44f81ac786d258f98893bb4559c6
ac59332f599dbf644367e28fcf01da3e8f33b14f8f83052d9c0af2061465b059
5a923f4567274cefb60341ba793ec77e5d8c7f96bb88b63591d8cbeda48642dd
c631a115cba7694f64506c787271a8c9cee064d9af1ab39cd121f7a1ff1c812c
903aed1a0d13791c051ae21ba92b5db47e72e7d6001fb31535a385202edd815a
40bcb9937ef719f7cd20e30fda23fd356476d22ed6c206f15cb2a78fdb45389b
6cfa3cc388cddb82886b8eedbd9447a48844921a51d4cb19921c3d42e58c0173
5189ba388aa32ac9b75b5ab43e428b20957bf253a117efbdc6d9dc0931099af7
eb10790dbb14844be86d80c5e1ae17d0fa0f73703f180effa98dc52c5a8a9740
869fa97d0bce9fb59b36a960315cc1d7264f5fe730a397c6ee121021a3c82cf4
bcd40e844b78f99ac28c27d9c4f4c90cfe0062736be1bd39d45592020daf0b9d
91efd616232a6aed299c9a01388e0460b46ec288d4e1cc5b08769ac88002094c
9b680abb4d3e0823b0832d0d60d08dd7e1a27531803b26f74966f6f4fa2f4117
2048782c3e3ea7744dc52e7113d3e290d309881514fb40773b1ec9bd7932f7b7
42c3a3dd44ad5207b89125d7a11aed1a249863a6e537c887b00161ffae6817d6
269d93d4752375c48ad0a598d2486f1a173bc55562bc889eee0afac3a4b12fd5
cc2a57080eb6e3880e1e2930fbbf7e6629c428a1025966b47ddba3d48874e8b4
ac01d33158f31760d82a88ee60be24a62fb5ad9f24254e2198d968255d79e4df
206d37d52891300c669855a59e666dbb4676c4ec6fa988d6378ce4d00892c711
3fec62c89dac139cc2b77a954f6fe953064c52a9345df309d8baabbeb1cb7910
5aecc2f99b451c1b028a81d62927e2c932805d38cc8ab7aff70fc84f944d3fe5
12694fe2dc46e548177bb78e4a51ad574c30f29145be59a24faadf91d34e9750
1b1bba9cd1563e7e339e9d8795a3cf887053f1ee7062fe98bbc2257e8a6a964c
d8542df1129217515f38beee762ca45f9ac85c4497afc6b7200c540287d85c40
7dd6cac0ae86a4d324d1b3fd2f249d3613772ab3362840a78ee8633b3f89b46e
b6749c008a46eca09c3ac738011772215d2e61e59e2b35b0d9bd012378c2e8f9
7a58b7febc683901bf086f32282a9e539f2816acd16d5b5a0d7c26372e1b7ecd
d64464cbc518da0ab436329afd5f6e12d0ada7cb9bd84cd2b6eeadc4b966e2d1
33dbfa0ebef84b45019f47c668325dd037f85dbca5fbcc54ef9aa710d998f3cc
df1e64c0809bcf0386de718201cb3c474fb5ce9cb8054ad3394bb6626f7eef64
f60626142f6939666fcefbc920932d389770acb70180dae25a1e06fd6be4f45a
07e382d0c055447a99dd7e0952f24e89401a1c15a277d02dfb6d31f41ebcb754
2f4f608640ef289ce4147dc1cab77ce02b1823a87bd46f0c00cefe0191d68573
6af019569e89ab388d3d770af3f61546fe350adac3337713bbc091318cf92de1
7b61ccda6e28c314cd2d6a27e758e48dd6038fb55c6841dcede725a1a30d5fda
87a37ce7461be94135e94dc72c976fc326a32194046979c351a53fb544d1ea5c
dc87382f3e020e019e03ab3ae26eaa4e64256cd32aa1eac38539d04ae425e1cd
e4548d0c352b680b4922b6b407856f1ebf6f3ea6ae68eb941d914d50642dbf74
fa2a8df6b682e0d53f3b21ffa7225b105e359f49488183dc94cdaf1d7aa849d8
88849fbd3bf680dd02198a557e768668eb2fbbfa23cf3af7bcd76940e1329b04
bea1e1da8d71850e11eed811616410e2f7db0557aad2d531e1d3af851329a38e
96b192d5ab246fb72788f99e07d22f9bf6c35bd15036db0fe66364654ca2c34d
4aa14542eab8d979311725ae045f9af9790b26f13d0e447b3f58a8b817eaf26e
f9ac8938054c130583237c616a5f3be196c9252a0621cd35183ffa748b0c4e23
869a58dc345282066b213b441823389934fea2e40aba453edbb936ef4695b6ca
5734f49bf4c21f0c66287df7cd723cd1d0e6d1ef02514a89a0360d6630f41781
e06d5d12f793d64c257de894f58a074c06e4e18a5991b46af372e7c1065a9132
f88a83fdb08a4a54b50d4218785e15b6a86d90b7ed621302558b510c0c205100
44c2ff47efed6ff373b46f1b1188f96da0b38b0a9430e571d4dad21eae98dcd2
8d36affac5ee640128374d182d451b3aed3f2e2080a1531a2723ef40b496ae2d
f0ba56582870fe7ddd397548d59042d981da2c5b15c2d3cf8ac934d91679067f
e601409c220b8a6647414d0b73afe5e96fd5106f9e5c9883132953083730f75e
2947bb0a0ad58d73cf0b0ff80481d2a152262e1b024df4ee74d22e9933f1f222
f389b8367ec1d5200c70f7f48079fc559a6610e483fd55df95a8621c4e42774a
0e978ad53ca8466cdfc66e1eae6ea5df41dce8369b40c6aebe585f4e9b3b23eb
19139f371c9565ee85a2d8eec6e0c48ac857ee2660ce13eb5cb81616649953e5
53e23fc2fcfbf98a83cf202cf6ec7f3816e9d70a4f86391e51327e047315f653
906b49384b9c069fbdb27c84521aaaee74e53055897cdbd167a7b5d33b8feb24
aff3e22b5d9da4966788ced11943e209ddd32f3085aa28f2b631e56693a390a1
74b5cc902bd67a2c3959dcd84a08f8ff2dcfbfff5f75e2ef01c9fb619117fd97
f8682a84f0e718362659df2d082fa8d9428010a76268380f525c4a3292cdaa9c
475554ed161cd2c43355af9350c3fbc654993a3f8901391308af9bbb1fce0813
7592a1a4b271cd98d094d77ba038314ea68a4e64d68b0165bb593b873b24fe3b
8a59f22ea3714023825140e0658fb050e8c4e32d85e92cc0d2eeeeaff00c353c
c00483ad71627438a50b4dade6b0aa1caac73ce846581b858910486820ad2190
33a9a108a502fa6ea60b8c85d1ec5c816a325dc802f213ba6bfcbfc65306d07c
3202f0d42fdbb7b62fc6eeace31837cc024965695c529e274ea638ea2a24847a
db9eabb31283bb13944309c17e3615256c7b95d8f99f86feddc2c7a566e4dc87
bb060d88c661f589080fae039f4fa4b5b611caa6c65455dcd4bff9ff89b63f60
5dbbdb21c07404e02688b7f92a8373ee1a6a891bf6c315b793a9fe81079c1ef2
804238c72039924ea63039020363282f24f969f6a1aa10ef2d9e7cbcaf53c9cc
1f0925637190af45c7aa731e7dac0f091dd22e5ddab9a8dea87a0269d5343ef9
f8297d133a037679c6834beb8741396754f2dc5a89a1621c13722289eded1880
c9859ce2a9739d003bb874f747abe3ab28d79930680ca53ceba6db84aa3eb869
0830eff19972d32f37eb6eab0723478f881127a84536374025a782a7cb24f90e
892bf7b8b9a44a237779c4cdbe5325d3e67cd11ff1a58b9d69ce962a659486ba
6649708daa2f7a0cb144f6b11161501464b7c7b20659f6e5ab21231aceb813fb
b30f9d6c6afd24d638ece5dac5259af8eeaa46abfc1d6f29383188b4ef4d8e08
fc13a8cb07f61f55727619cde4f86944b879e23dda590ecc57f312c9d6d6fbc6
8c875291b617f8040f4494323cb93e84f2100092b5a0527dbf02fe55175a1b8f
7b8b130de1ee8d39589b7489f2d087e6f865cb536b595c552534475f1bae5981
807cfd89532d91a6316154dedc1b8fb3f8fa0270651107c5ab24ad8310303d98
39b801b2f0158aca98833b79846369993bb4a3a715afb6eba5a2237652b17d60
2dbc21281bbecab7df65812e9ba59b78f6e3f6248d5cbebfedde90149d823635
0ad0a8cda735b701534eb682dd2261883e19355d6e31d980265351f1895cd212
a8a006a6953dff2ff7ba40d5263b6547b6cd5a5317bd77c08364987424033bf5
bd9dd20c0ba69bb3d25df2ef5fc06092981c2f57ac3d38b93976eced658544a7
942cb27ef9cc649835863b2e14a4390fc3e5bc2c0be8fe2a67f1ecb3d4734636
6b29b60d7261a743583915fba3c8f832c4c41bc1dff13178f2d61832fdd13853
db2135b4d92b1628b80f3baca26481a48a9e0d0847e4c53427e5066816b474f1
052fdbf71f550738c13f1c3ab27d1fa12a11194c145d90f61e4ef2a1fae0d25e
6974f191d3e1106c406ce4fba266c952faa505954f6ddf00068422eb0cd4baa5
34b06c1805468d7fc0b88a491b48a393c55726d4941ea7d869b239a69565a892
82e020721bc7ed828c88681584e0c49452fa8f2bb88914491074cf0ffcb5fdb3
628b292a0f2fbb488e96767aeeb4b5b738ddd65d50e143aeb6e37fd6110e0dda
5e2120fbd267c00b1b90626ede7eb1fd37636e7f3b01ffc6f7216b396bc1a5a6
04db15b065c9a030d08127a538f69779a5bd73a6d6f55d2da851afb9da60bbba
d782ffc339762792fbf66b2d139b11e76a28e8b483aa5d858ad779e905456574
8c2f59a94010ed153e90a961b45a225cd9a131315d2691f6fcce8b321078deea
6582997e824705703029a8aecbfce61d427d476b559145f5ee40364c49ae13ee
bd940e7104a70a5501f5c0a632ca1902b75598d3ae606319d0ff4f0bdae32c70
b1e4d5065157499c15fae5349fd68e77acedeb296cda2c912562546b9d1a966b
e7959a20045af835d9e7348b5b33614005f05fa26f45c3de3397e5ef9cf02036
37b89a5085099eedcc025785eb5fc2130988a19570a1661772be2497d4c8ebdb
fde6619b3af0fb42f4d4c3516db21d44941a62f62f087ae3731735beab50f7c5
27856884c3c4c01b83a40af32613638be95390c4c66f71672651df6bb4c4a0bc
a9533884c477f6ae268cf7c21fb12678c54e4b3fd8ef032d3e3c2d3651bd905c
562e643f42d81fbbdf819a28c81c9874100a65e70466226d90662ade31127e95
c7f5052cf24df0a04d14d1d8a5d5d9b89d212272860db459e07731b3a6537c6a
83c352d2d1f09389fd4da36da0cc25a57f402976cf3a473ff27b6554faf254c2
ec8e52fe59d346baf8b1fa0afb13289b6ab06c8d25afaf4f218ba88565cd3acd
f1fbe77f5209e6d5dc361c18d9152ea6a5267116769c987b89dece4fabec70de
3adbac8e5e7cffa82f4b7e2ac41696dfe89443c64d8fc50c5699117d094ce852
c2d7dc91ca9f1e764bd117ea7cdcf98ecb87648e82c64d93f58daee619f531c3
615ce9df8abd7a63b2957eaa3cdb10e48c379a96334f8510a1f04a7c433f3756
81c565d30f7c7d9e05f4d6f249d203b286c45eaf851190ac74822c5a6caf4f04
94520c329c09cc8e0f5ac803219b76c8be672d5eeb878eee0c1116ca9942b133
0734efd931c8e5d54bf9f22d194475e14eb923a453c9b7f4ebba83c80b4d36e0
617ba6de745ac02953a63a9d73bc2e07c52a6fb3182e001c021916ca4993647f
8be57505636b75e2013b781c2ee65591694009f1aecc0195349d48d780db2da2
dc8c60410235e5c3c7b4542873c609f921a62953760759d81b718500fa45b09c
504f9cff973ce3e32a35b426c6f3fb2cbc366d980e133fee86f961dff6a21602
d8e14529e91e10105afe9f88c9f65052db9b32cc2d88c7bbd1248d4448706d3a
defe4dd13ed573d1e8d31bfaa9fdec9fb1dd2ecba1ec8cffb0601523d86b8187
e1a1526c20166314bb65cfc266d01bbd562c79f00c390ff40c42e424976ba541
ee3860910e852c72b4c8de0cbc50679ea9434f4f21940480f148365bcb9f8889
9c6e39c309a84f63a0b01896d4165395618198fc9ce773a874074a2667120e7f
3bd73ba915a657c161c04ed2ad2a25d81c286f5ef0e0fdf91f6c0a1da5da4d13
56a4df2f14c4cc408165ad0fe1c60373909ce04828b2c333569b78f35c6594cf
11becc7df1fe0d9980cbfc9b083e6ff52fdc26cd74e64b22196586f4b21768d5
2fa9107d541384b53737b872b0d7b0f931eb1525e118a45764bb1945fa8b91cd
944fbf3b63be737baad604e8e93d08521e00f8cc93052e4692f1de842505c39f
054d49d7bc49a836c724d887c0905d218fec71b710fb0a5fd2dd736354c6b43a
4bf4cca989e6deb360b45e8a353b712c2745da57934d5629923523686d471c60
6e09b539c13f1c803dd11cdc41a41e8427a513e4808dd4f787114870b3bf8ece
f68fcdc6820e82b4d537534cb2a7c81f4e6878bd0cf435cf5845fb8402aeb47a
8be7f8658575e7fbcce387587b6563eac02d806aae5a83c185627d9e8857edea
2fa37ab90a1aa2cacdacdcca8a23a2be13b1016b2716ff29c2a660d49d4d2400
dbd73edf65aa2981dd46641d15f2b5bfd40768309c570d5b5bad657915ae022f
7fd0805df33437b8ed5d961a0c65b33a6ac36a9ddbfe78169d4a6d2cfaa5672d
9354964e4c756a20b65f1550064e435dc5b0b8a53732a97ae91c337bbfba8e1e
87d2ce72b4577c8474a0ef36cd33a3dde20e650eba05a242065b8d1fe3c702df
ce2f7ed86080bf5e0efd27e162ebc4471770197c13ab838945cc2df10daee14d
901fe063ba909462ade61c130d7911c73be3d448f36455ed30c21bbc2527d0f2
5bacfb066c0c45300752906845190fb3e76db5fa537fde62a1e7dea1af35898c
3edf2d9e6bc359439e38d363fbc37f89001e3dec0a7b6a4a7b112cd687e7c5e8
4b0fa42d52b534b2d16ee3ac26b4dc3fec6272d1867a390ff9fe074d449b6cfc
93129a45a6624d91800c7e638c32c057c99673e7b57c5fa81e60e1b7a0604424
458e1bdf2f7f77d1fd1275512e3f6d5411cfc20cb8571ef52c16adc6073a9542
da94754c931e74704ac639fb69b227ec66d4446231bb566792321da24cc3510d
89b59bff3d647ef55be28555e93318d9eb888e32abfb2b0276ad9483ae8bd4b6
5f2a3876c094f8e22056b9a26dbda1b09b0ce8cdfa085c8803151de9187d5c63
406d87c00f3ce085b9bec794b99b2365504e7274aff81c5032db910e31c8f2f7
c647a598171a4e8b19d05b5a77f1073ca0d5f58ebef2be11c6f390af9c835f90
9b726589c9e75687dd9ba33beb8a42b5bb63076a5cd2cbfbbd43498934b67f6d
35556aaf2341c4d3121ace6338238cf67a76ad99ce7be30040484133b44d08f6
e0d6acf93cbb3d7b3b860e620efafbb0c644fbf90228a339e9a5e99632238e99
b1644b65c08859787d58988cb9b6b792aa43981c2eba7136fe17c9fa4a671864
d39bc4ce208357e4e178a4c0ece034f01955199d38dad2b9e9f3c47ce7c652a1
29476742bb9219514553a1bf9aba5af84a3530746f5d24795f7d79a1927f1266
4e9e31e2d86be65a64463db3077fbb4d8ba24f577ad99d58220c09d15d7478ae
7360e3f048ce8bc4e7c171f86d2055c61cb4e32c8e28e52d3e8a87109f5b9c43
b3948f9e62356c4f55797f5c349ee40bdd6e6d98406e361af69bca61870057a6
63c08cd95a45c1d49e5ded4ca719b675f8fecbbf6cae5bf666352afee85c48da
e728a4672cfd6a669015d51c96e74ef3d42233fa3e5271134aac9354d289e7e6
4d5d7be4433a62bbe54900fea32c6bb7863dde80c914a4d50e11db676af24cbc
6161712ed2e4ae8f0d7407c83889a3e52486a3d7b1e4ff2676a80039d4217eae
febc054ff700f06cbc565353cb08267b50276528e5da421aca6d458c8d381645
3d524f1b7d143791c3939bb177dedd8000a364a4fcf6d27912cd57f8b1aae2e1
2f012bd0049675d4b47fab974f28d57b151bb0cf1290aff5fa25e4219aa3e1d0
e6007552aa41b3446b5624135d748405749cc1db00a6575685b8f78856607a3d
db56c0232cec6d31608342c785fcf455898832088ae4e608d6fb63c57ef4b2c1
055ba60b0945c580292adeb56bded15de6311695fc74c35f311ebf99702b54d8
77a677ac6df01ce3fdc54c527c543d3bfd19265fc579c4fb688ba9f1b7233e29
ae0355e5a94f2a25fffb4a8fbd74f6798c863a2f318b4ce0c826f738614f5726
2a2dccf3d5b00fc8140b153f360dfcadc36873b124a035781afb7ed41fd3d546
79598d5030a8661603465eb2ac7a615c3537ff96556dde086f16040935e04a1d
32fca9256ce570eaa1ba42b53032bea880447949fe73fe1633a552346c930fea
020e94d8f22f115b195d0d8ebab7a07cda81497b824bd9961b660836709e0c9a
a7e61662b0684e925e4dd74fc97b675b79674aa3f6d16e96a1c86fdd2c6cc712
1b00906b4d70c2b27390ad840962c2aea555822be94c6a46dd5a09d0ef16d346
32e6e2d9563d10762b2606ed532d652b7949ab11efe383cd0afd2a92804998e0
7559a8140c01ae4b4fb4eae2c147464cc71403bbfb42cf1bfb2e009327d29ed8
65feaf33fe93c081568949c382806f465e178142d956b250a827bfa2c39f462f
5fa069853ba2e2d18ef99bb53878c78ae61d3ca268e5123ce3706d0aa11c336e
f5d8cc7036c9ff0e73c16bf7fba5d692847cf0016535be27b2119efef09c8890
ea845adc88dae8bc41a11a861d31e86ffc64ea135aa53c1823a4e1d4aa8ba507
9e5df4efce318b6d3f486b385377ebe65bc916f13609cea424f9c42e8921f757
50a6c10c6c13ac5f8a5b629fdb16ce10200ea8f825280a115dd641cdf1763f47
acad9790c90657ef835ee1d02f3d2e241b7d6749c9e4371216931887778de4b4
60d0ce386aa03cad70c7e8a133ac4ed3ccf31d91dd0986caa5962c6312679d01
359dff0b5a357fee803d61922367be5e97abb50f4048aaf6c76b6c65410a57eb
77bbdaaa596833dabaedf537786063320d766fc6e6826be6fed511babaaee583
7b0643306d7cdd12f02e183d46350bb7c25b94b7d4166666c556d2f3adaed6af
fb36837cd861192b7a36e91b295cd832a67094197972348f44d0ce3870a0891e
0ec2fdd4413f67060f412a3cd31a509028d6361353a5e50058fe4ea46a6732c9
98900dec7fd4b20e6d6fec5a30fe1f4d743d0deca667b5fe871a7c68143acc21
ed2512990fd94b7ae6fe738dd326258f6cd6060f243df995a3af97687a78eed4
271f3dc4f79ed18dea9a84694cb969ef7221be2485c8ce9101e9b1dc59af6222
02a8ff7225ea661d01d5370b535c33ea4202b6f1a3ec6525a56e1ea71611491f
47bd21ca86ce6d8c924f28c8d5e5f66b9222827382dad72df22ea440726154c4
19aa0517d06423294bcbc7c6f64940c999d9fe1c6f514c73cdf05acc4105bd2a
764b69f7d17204e9291a1d13be111261c53b5aa1fc255a44acfbd6a3e27d20ba
0c2d9fc7cb0fd2bf3a9cbfbb53eb0b03c6ab2cd636bac90076e486ce9ee10b8f
e69b03cf6aff7b5a3fec75294bdf32f198d65680f6779a9c6ef6b8edd936ce66
e9cca6bbdcf4cd0808f33175d40c3f2ab76a02d0051ab512e32adbc595014767
1ff6c57a77b8303e50e7bf024c8af22a2c7d5adcb9ba42ee6565ffff73212f22
5f69eec749413749bd663976da9e39c7f2120fc475427a64d578e332cc5d2ec2
84307c30b92e8ec05500c8635a069294902bf4f2969dad5068bac49cac00a307
0e5203b294f7a42c2db51b4bf3e80bbecb780b0adac425db4e167f893ef4d257
8aca20afe2773ef6fdc6e07c20ed0b0b4620ff0d5dc28edb59f39ed3e976c510
e339c4a0fc9f56524df0aaebc9d985fb6781da97e651df018635096cac944d58
ab8634a3b02cf1376b1c5fad86ab57c5ce85435ace10216a9eb3bd3deb8ee5b9
acd93838e9c5b26f6f02f5541ecf8e94ff33839b93e16b740beba88c19410250
301614938b4a69ba7b9cd47d79d9f58649225ec073b8c86503ef606afcbcc79c
287c9315cad03844d0a465ace3e83ed75182fa6de0166ed492659d8e872e5f32
c3043b055e44b91ee3bee18777b28169dadc7375fd25ea67288c8a7919fb6b90
e5a805a38a40f1ebd4bebbf6e26b5d5ef18e381970be0753c35de24b6253f9d2
4a77ed4532f5eb2f464e946babea6cfb2450543655fdd5ba1d46894538dcaf49
d824b3dcf5484ca5098cdcd138997398a60b711568f26ad6eb964de44038fc32
07c85ae853ccf6406906bd00b9af16f3eb2246d3ae95f6944ff39b64453b5a99
5592ab75507ebdd4a7f5ed567eef5aa2697e973e95b512549da9190dafa3f3bd
abb118f5a81cda2103aac289be4ad5dcfa566bc51d8ba2d5b19b7bb31a484cd1
8a84c5dce3c7e50429d637c22b29d8f750b60e65e2fe3a7316d458557c1e87c9
c6f8105537aa3b5bc789cd7c370b741477d21af453ea2781d5ce29d20f2176c9
9418ea8db195cdc57bc982e91ef665411741fda4f9fbfe4bb3966ac00b76e517
c7c0988339bb8848e616a134f95078a0a5c1a295bee68499d1428ada2d995633
39d92748fe4016a88f9d534b35c766c9f970903c0b466bf4867cf914c570ee9d
f4ebc45e058759a851b8aa472d14c305f6410fc4d05acc1b11b431fa438490e1
816475c1a6316c7635226b49399563b3dda5efa5b38d1a32efde767ba4a0fb3f
3d93da113a8e3e32a33b5fabf64917251122ec6dc8165f26af07005b2da6aacb
b7c8e299cd7cac6e3251bb812a600ffb1a028d36ce351b3216a7d23a195a9afe
e3461a40159201f1a89b8ad3da7fccf08906310fd84eab1ae0a76589d69e13be
ee3ba496b5c3f34757363c4f5623f975ec58c115901ddd9cf4b8e135f1d996e9
1a438123b50d453656a44088b62fa37f575da53a2a42bc659540bc759ca7f24b
d7290430dbd53edd3c602f2decc5d5c2e305e8c9ce56626fe5591ae3f6fc9b50
62fb69b77e75d7e0808f984b7a062383d2514b0d682ee525b39fbad9b84663fc
3b74f5ff1adee4775b8060b227c1df3c3b2fba8bd36cb216a5717ae6d6629a06
c099300377b741877ef1db03047a531dffbf8dc6aca4ce0097b9e623717eef1f
b40465ba5b56d6d618aa3e837cb2791736223243621b3c0f36e8cf3c5fa3b815
36815d1a277411b9026a9eb2b1d1591aacbfc92b49babeb49269eecdc24efcbc
7b0871150cb009a229b3d1665d05a796f84dea63d9c2e23af3a999f11f53aede
953dedb336ff92e7839bc7023f8f4b868b5171e03857ac0ad870c8dc42c6ebdb
cc7d51d1484f93106fbc79a677e97db44190ca4478f1c6e3ed2dd57eb8098798
f3f163280a6ed0a87e57581fd663917674328cd2d1c6f720ba8c01f8ab79d4fa
7c08f1417a9a9eedfdbc6d7c84bb796f3be629e476565a34885dfb7c4164e821
529be9d79022b6e1268cf6517a0fc2864e4b718f4018744984a2376a822dff4c
0344668df1caf06b38c229dabc5a8f51ba120b53ff9291b99340e46786c809f1
912c2d910700252081efab3d5e1d96d8933b6709ec5198b27fe6c511209d84e3
cb1aeaf13f364a8dc62bc37803dd71c663f7cceaee35e5b39f87bc7affc1dc97
16f93f931ccb3164490787b1432be66246f8e8d2d01232ad3ecb7962b9ff256b
886ca9f86af2956dd082491d015635e02d867a199c481d51f8751c6c9a3efc2c
7de4970992cb00d45e1d71522ae651b177c2a5afa2ea22a1c083eec3fa104296
b5b9b74464f293e394fe8ec2d4bb82b7a27b7b4878eb9ca6d8fda476b76c872f
5f030a803c7b1f1c0483a96c6fbd43a97302e17b5e0e28eacca87be1015d91f7
0ebb16c26c51a8d7efcf50cdaa4d7c3449b1134c30d181932ccd93ec16b44993
329b44a6c3638b23521eefdd79e7200228425465b0c3df07868818cdf53d9945
a740ccd2413b8d53b52f3f6b435f960173b0b4cff2e52cea7bf54e5002d254ed
c7ed8f69fbe4fa8de61973454ca0a0c9ab2c2a9882f346d5ae456b074f367547
54e4995e568d48065f9421f039c6b36d2b4baee80bec636374611123995da79d
54a8e318f034d9245cc5f2fcb4096743366a1b6ea840461f45a74129ef2be0d1
ab028681ab144f68f02aa84cd5bf824f06d7ebe88d55ce185e9defa29b55592f
3896f917281ff282a252bc9e725ffafe2db8374c7c34820944991671ebc16393
cdf712bb663640cfe1e700bed129c5b3b37cdc5b725396e471534e75acc23657
12cb452bb410347fa3de8f13d2ae52de9e4a0291cff3f45bd1387e0b0717b0ab
c7998d15558b3c80adc7f8d1dc3d3565a506050f3bb187f49ee8019da0b49917
124e925bc05e633da732acaf407477b47dd9329dcaae0c3af728c577b5402521
887d20bfefa2028e1ee7ebe6f5092cbf0493e4256dfb59d62b9f26ebbf5e3b96
5be553da8a9e3ffa7ce4de7e428d8e02795ff5d7fe6dc2ccfde9c708233201cc
b433e0fabcbc498b649af575066ec7a198ebd6d6bde12a1a5fba108bb34d316c
03ff8b0368fef25689c95d3f1a1dbfdc891cedd92dbe22a037a2f3054897eeb4
ddf01d1bda48d1eba76f5441354c342963da0b9b3f86b0738a8c58f4e798fdcd
04882c1a3f18075e31b33c78529f4ee2fa66f09ff4c3975df960c6ba32ae0532
3719cbbb5702844f3a84478c7e234341a02d44cc60e4390f335629b692832ffc
ea98f9ccfcad11ea232a22998660ffd04320d94dc276af68dda0050406307287
5e1af46fcc2859f6e91190b15e68e7aa56cf02593dc4e76631c3dcb7fc9ddd64
6c7ab907941ec4348ff95fe5dba5d6233696b3ed777eaab40c4eaea7766f4856
3679565c66ca57b9bf714be5504191337b4c9ac40f87b0255e22d24e4adce456
3502c29ac30988ded18faafc96bd0ff8a268b0bf5830a6ff11cddda5929659fc
ed4cee41b07eee3229d1241f8b0a3b9b1757e1a6cd2b53b29d245c0409321e53
7f2c66df8356115857782d74dccbd69190852001b7bd6612511ca1c458c0459b
1b1afc2dea548cd9984cb34af196b08d5147ab49aba60c0014077442e5b39170
2c6c265b25a215fc6bb84931dc00156975908b76cf1fb54996cd798ea06d1840
937c7e1caf0f5c595bc038d3689e7d4af6c15a3943bd3d5fa645bdc0fe0c7fa4
a100b71d2b60cfe9ffc65d2503372bd5d7713cbbb0dd9481c19e5b24f987ab34
861beac180f61c7e8ee68846db98c9c88aad992d8875d7b44b5de8e8c780722f
4849ebaf90244b14d141264fceb58ea3ecffd9ca893f44a1d5104c6c9c4e63b1
ced9e015b9f415690d526fff0be7ed7a93535c5f823ac753dc074d9cacd4a3fd
c03879f8216a892924cc696b1d256f0d7f8f24850db602ddb9591126aa067343
d61163475489a8ce2a4643c5e3db6a9608c1c506cfa2439991d807b8a3d89a5e
711969217daca3bbee9289e474a0de12e6ccff0984f10aa965e6a38725f99f0d
881053165dfe7f8a1bab8b24b9bfb7a382298cb7e9b625b78e53b731de5e5938
24f1d53d970622a380d913c0db2b4f156033aa2c3a4423c5ba3be0261808c000
a42bda95a6768ada32b929234e4c3a9d0c212f0b197615695f84c9a12cf77a02
d446971dc5ffd7cbe68775d948fd9c9aca5d93438b7b2f77e83cca3d3c3cf885
badbc9166d3eeff0473edbef598a9ddc5e5d50637fe9cec73e695d38e413fae5
7f11aa2c1453c9c56d3a92fc33ad60e0f79e3da79b49c8cc77c40e4c82f2e9e9
3f7eed0a0b488dcec44f8846f8a58a970dd88c12114ea58e1cdd105c656c4c34
3d01f7bb5a35e18d730c3967c5b0e711009f14574d947ef626b4ed333baa9537
ad0ad7d4bb5daed5af9903990bef203a529e310e5fee72aa629cfbaede5bde1b
7a53c7cbf91a177fd3a40fff8a1eb215aa5ac9ecba228b853d55d54e5205d0d3
2d68813164ff61cb3ed67387de44ee9d93ab2a082b50aa39050f11cb7c894914
3850b334dc7db907ad6b5e7f3f89bacad3771604f0f1eb2843a1a5df85dc6d23
bdfbaf5665edf79e652006a822f6b9dc0692af54a51860be74678b1c57f16240
8c18a8efb515987fc1ef7522224dcd42450dd4a195c43117885d66850aa1f701
54af834ee208ff22b3d49667cd64b18aab0c528ddb3363c1fc375f1f6c133f1a
5e5ca1e83aa6cde513b11d0d80ce36bf7562a35643266996e4502fdc85ba05d3
c1a7e39f62276ff85deaa199c3e357757f547d77467886ef66af72968e363221
25b228b05df472afdf38d64dd4b846df73cbbf2ce1030d8ae964b330e8d7a3ce
52eb0aa4f164d44d8683a6270e952f984dfc78a720775be462cbd41bc8d56ccb
383daf0007b58e6fde73ad44e701c23750fb4a2afc174905b2554d8d95aff499
713856cbb4cbd369455ba97b123ecde74a928afc03e2e11235b0c7373293bcbe
6193a86693f5db6c6aa905dd539c19a5e953222e1c68007068f5d6b7e232fec9
4ba203a44ec7318ef6a0db9d255585b7589f0d3e08e420f8ef7b612637a8ef0a
2692f52d9b094d32e8c197d8c3aa8c671529a4012453bf23e7ae1def8600f056
e715dbebf72bb7eeaee282c926f4ae871c495282c6dc98f191d03380337c8881
07e6113ff0fbf360a225f354f0ed4ddf9c97d30fe5d4dd03dd929ee799714ce7
6182e399f53969cf4c8cc2df7332a9de56dbd19a8704661d842997f06f3589ba
f63999f94b4c00cf23d1b540818fe68ffe739b1773d8ac1746e51d8606e06400
4eaed25d729b5168e8f4bef2c59eb953fcb9956d3f3e733591a3978700510553
8f9c1721aa7cf7552188c2ee521116b1cdf4852ef70b0d2066a2b2e5dc194fe4
c14503d57f7d7802014079f915f3af34d0fa4b3fd4a0be01952933c8e939eb95
dba63306dfa0f2ecefa6f8a868e9b513f5ae04d8fa9715e04daef3225ff5dc69
b6e455f4c8e8670f7b4c27f62f0c9c3083945d538b46edd1c20cca764e370178
fd0e0753664691abf1be81c197fc26aef772e4cd6e3b3c08b092e84b80946788
1a00df4bc03890f5464d453a683d59eef81734570e635e4c2ddfa34b41a6736c
b740641e676556182d98b7e50c56f8bd57020f515d73867fe1a24e098a656833
38039c76da859391f701e404a0218adb77abb649687e685821f0f32691a82b07
15ab96d0bdb3baab325089911ea11f6112b69c1e877cd8f2b8dcfe55108f0fd2
12efbd4f7954149e64e1aa1bd0d0b709f13d032d1ffd5c68bd4fb14fbd7721d7
a9f9355767875dd2dbf55d85ff6f0378d60e81b3e37ff6bcb6031668ffaa0ecc
cf414fe67e9261eb0f791a092f7c97dc98f9fdfd212a363bf7473fc107ee5fac
0790704f6fb87246143e0a53472593157180617772180173d0b0ea6949dcc49a
3f8ffc3f033bd306ef4b6a9001d34d05e7a8a200016afaaf6a69b5a10e6fc316
5d9341b62bbc709ab8424b84d25965834f27750afae63468b791cba8dfcc32b4
158cec29e8112e70bd237c1a180d078070c6feaabb7d35f5167447e54ce4c6f6
0f55d9f97ce8f1a157af6b49567a16bbe23233e68bfadba699b69b22d70b94b3
bb551113de57d25e2cc22a3096eae40bea7e353034231ec33554fba3920e3f80
3611e4dc2d2bb7580869d0bf7cd3a0b24ba6a2cee9210164bf0e14e76642cea9
bed0b279b01007826296ff79b3fdc11c24beac7857667139a80f0243b16b153d
a927925575ae6315fba5e54d5f9958004e31a4acdb315b78d08188accf9a0099
e8e706553f926d14afbd8c39cd805eff90b3993c96405990aa9d189fd66acbd3
123e94f66d3d1a5f4061b0ae1a032ed9eb5010307a368c1c6104767bab95bc46
86ddea0248d2eb0a886115e22304e81deafce6e1dfca09a8a8c93e7a44b6baee
d222d3cdcc5a8b1661b9a874c1bd871a4cce7f9666d0ae94735fa3535a44920d
6740d5e6dd0372733bebbc603d89cac8450df40be169d9bb90e3480f3659f28f
5b2de56f7c596b3d3c2b80dd82c1f01196c301855d6217b99bfffa93b9d40e43
1a22f7a9ab31462d3f14b5dfb0795ca0675cceddb8e7eb010b1521406689b491
9d264bc1f084564e6a564a76437c6bd4317fcfedb47653f191d6c6b649270aaf
d083771031afca63850e7a552dc8cde90b1c6556d49c09bcd3284956ff555e57
77365a2ab6d7e265a1a4f29de8203a2a15756ae595a48e11b958c6ebc478ecec
29e0ba3a7cdba448c7abc3bb6e21d21bedc9e49e730a5872082e9b2ce588956d
b35eacbc2f3f41485b0b0403029b7a5405ed1a40d6d0a7a802c0bc4a0b1fa6c3
32450dc5b1cc97378a2edba812918155e84e78267ec5e73f2c0bbdc96604294b
3f840dd3d7159999fe24b2c6f297adff2074ae10a9087e2222af7bfa938a2058
4dce0e6185a493b9c177b8238af099bd737ee30fc40381833e6a08176f3cb0a0
504270ae6e7701a47a721d70ca5099a2c55e9558320285e4584e64d639560d37
50993e0757a1419eeeee61f8186f8493034da3997e6000354bf0e07d948afd88
ed9fb679b76b39b0b4b444bca762ffe47103bf812b63a2b7c26e56ae4b8c73c5
58d0f7d6971cde60c257de6688d38bcda59d35ccda430df05fb25f2a3c75d377
d88b1f22eb46731b7b36f8d9ecfdacd3f11f9fbbc1b4587141c4b3b9b0d0a9d9
bac41cf6c67a995491d52d695aa14aedbbe48d49126e55b2ae43d0545d031e05
1d52602401c68d41fd21e4faef1cc86a15dcd17c1975b2d841556017a8f39f26
b6c0c9cf80e7b5d9488f9d4b08564c8ff25c52c16c3d11ee55e18dd3462f9436
ce1e4bb7eb090929b4816aea6150fea2659389445800576aac75dd801524bc6b
3f8852f9a999c05c7b29820afd544071e9432ef5cdea3610acb574cca130ddff
aca7b6ac5a6d86758edcc686be2a61b6f5f2a3405faf7a9b06edc40df07b74b7
409e2a172430b1d504b041a5c7b0a4107bf60581e1fd5e43a5732ad62a2301d0
07c3a3489e8ed7c963cb5ea2bb5f4befeddce251c8fa914c424764311d16c7d4
1e855f4b2fc2d4237ab0457b3a1752971bccd0e321dea65684d0e44a73023a19
4a9823950a620ea838eb5d98f9959992f48d5ad23be313438385a6ffb29362ea
a69f16b642066ffabd919bbccf49db54550299476a7143d3d89a6c1a18e94db0
656a51dc9d77dbb58568b3365ccdd85d0e98097342559bace953b33cde10d826
8a3839a28d7c218bb4b0a31d105c5d5464c2fa0ae31f69994057717892369191
3b67d16584ae555dfe1aa32d4b40d144cdb9584cc5777d1e30429f09e0fc25ad
c87af39d21328dca6c31be361d2382b2c20435e255c6f3014f052e8f4d305a06
7b3866bfed5eb9b9434909f117bb602c1b743496c9ea13dfe4558b443c347a96
6665d7e5c06125a8aa4e906f7cc0bc04118a26e9129fc23a63fb0eaa010deb73
76fcce5d4a3d4ae626517d2d77550c50f7a45b4ade74018b746f5aed001cef52
fec5e73534a2de1256ada58aa26e5bc0d9d848c8d0a7f42dbd7ecab47d6be1df
71b7632ba5fdfc28fad09fab97fcdf3ba729973d39a5cffdcec0d1173be7e7d0
85a59a4504617981421dc9600b2b65fc122d34ee2fab5640893e34ed50f53037
102330d718f7c2bb7b9f50d18643e04af5c860215f7211271fb2075e0152d85e
c440a933a87d20a0677ea2717baec45e8578a077805615c7d492db713259949f
0392cd91480059059dff830b24e2e6e5c715efb4aced3dc637f88cf3ce57ca45
2e96edc986edce66a831ee92e6e284c72b1307ce4ac479e57014d5114a9054f9
cf9fba1ae66b0f83e173f31378fa74e8b087490bca05f77da7842d7167d8964d
c59089eb222f1a33963622a9a17c3542d9940baf4d1b8d68cd4d51f16acf31f2
91570def9261b67da9222acbeba21a6f3fa905e081be29befe3152b326902afb
bc6a57c110bdfda8a5c4ce8e272eabdca671be16f4eb7953d1df46e250acc0b0
674deed452de3b04f258e081110c5ba20a934fa9180abc09806e257abb22041b
836ab3b73f5ea8dd1dd8cd56216d1bfee9a8ba78d83e2cf0935586c0cd7340cc
5980e5ca4989174cd46fdadb86398bbcca75c9580310422f692812bbd8a91602
4f28a49d35ca1c80d5c6c7b879f14d037ce98c0be18eefbca8acccec2fb01923
69154709b772b7a13d2a9737652a1bea28da5be3815867a952fbdbdb8c45c347
afb7d7de74620d97b52a6bb27d2b25fc3d0515129eb92be62f7efcadaeba61ef
88d82df3daf2d1b0a117493075b501ad84ff1650bd7196a1e481c393de324dcc
c50ed36647ad32f2c8c1820aee9190126c1e622d1f686c71855f8a50062abab5
54fbc041de1cb0f6163df274e0b148cc24ff66aa16590447b5018474e5bc20eb
77a870366b3ba577a6f484ef97c606cf71f15470905be27bac6fa4375629f80c
b8aef8957b856abe4f885c47acd94a7a32b2d81cb5ccea65a52328820404f5bb
512ae315f257f6196effbbb484ae7c877ca278a1f086e771092915d375667811
f874c618cc6a117310287382fecd5fad1bf3656f9d41c6b798cc1f6e6f7f789c
8eab2b1634cc255f999a72ea0e3e201a4335558e1dfc26a8b02a8a1fa55cfbca
d1bc49f81b8f7837a29d7af533d9fdca050cbc2809ad4cb54699131364977a03
775148adc0957ecf2d7317ee6b582592baea2f25d926a1738df65b37a519ab95
a99bab4dc07e56df6e4ebfa0a8e4d8f90693f8de9d1cba77b3f11dbf7b1eb907
a381fa256e2e943d68e773459c7ae4e7ff969b43d5693b5b612fbbcccc4c5411
49a3dd6f01ba7bb3d11a331cfe3734ba2b9e1f8a353be0f7a6a0567e1f0acccc
0e5f27eb18fd60d809ddea01e8d6d1e8c5f59b9554910206ec9c56e7c483478a
1e4a4cbe7bfc97c7c96f119726e45d23b337e547667281022269e8b66dbf241b
d74d3cb856da025944794c90cd5196bc8f3a57869c9f9647a0faf133263e96d1
78ee01be0f6b67db214b2e9c3f10aefa22e7e0dc1ff6d463a2cf77d66218fc1d
9a0f754c3681591d66f8c455a85e3e7d396fa1cecc04b218a892da24ea8dedf6
2084965bb34be5e897a6a6c7818f0cc42be52edd5cd7c45cbcfaf89c94569343
de0c4bfff1f6120684fed8652a071b6587f37aa970b5be3e8574918b97cba372
81ec8e16a93fd111d796dc5833b76f3f3254a1c126fe28e103ce61a0eb89ef8c
9202f06e04949602431a5cf57a940c059876b3724e461b7cba19a398427e0d07
8a88c2eb0f9d12b608d777cc1558522e5f0e96362ee59cf1ee03b1ba9b864bc1
d186152f24e7e28b92e3a799b23c2416afc0820ecb57a0fc36f44b73aaf6ea3e
3b952de790af21f21a273a5c0f86ea671ed2ab467469a0ebba738b6257ad34ff
499b8e21a77a5685dc1de5f21f8ae20d85c6c921f1ca434b67e876e9d3bc0136
dce9e8d97c769e07e7535ede8bcc169bd38b5f3881f94fc7526ffa91b7870255
f62e423e2ea2946faa96353152b56ae03010a42f15cea95dd27c89a4614605d3
636f44785350a920f63f0cbc5566aa9fdfdaf29c109dc83169b3a64ffb0a7c58
084a88d385ade5c3273c02f809e3dac76dbad71e76462d909348c609dc0bc7f5
0cbe05994df34bd38feb8b5876cf5a50989a44792c36f0a989084acd73cb700b
64103a290df126dadf1449a45c713a71353969d41bdda892871644f895fef631
15b63299dc61de72c7201d8392dd3f10b819b792ef91f1bcddf9f04bf3b94c25
ce81c6edced5ac26c9cc0e304683656c9ef456b1b666bedb3fac1c597bff1d9e
932d05de433889db501bfa1abf7158b428b24a0d49b467b3a38f916601c9d36e
22374c7c96dbb04a5de8ee1013ec7d8515a9eba05c41def2b8af7bae6691abad
51b58b489b3b20f3276878bd520e6e70a9005ee6cd9651199e71269a9dc9e1b4
cd24d01d37448b25ea34d6e0ec921c4c1904b7c4deb93a5c6a0c736a501ce7b3
5e386a065c1f5df136c8bb89103febc7b9b3de8f74e0504fbd7ff702c5c7250b
881e83499ecb6d80b3f9d9d17aed8414a6e723603c8762a3409f6766be726bc2
79f70435a56a0e096209231f5b0c134bdb7aa6cb3db9895720cd02b9dc0476ca
90c7a9ed37ff0b380348f66d85037e9abc95499cf6a3006a40c3874fbbc4efbc
93b261cde619c9f86a4203dcb642e59c59709dfcad0d15235264fc7923227094
ca105019b1bd20e4bfe5f3ea8edbc33a7bcb6ebc523533228ff682e8a1150aca
fbee38b10517877329be50b45bd87df864b02c6c8cb5125b01e79c64048ddd3b
7bc817f3998adc78819febb2d81c7cbfd179908cd36b7e5169218b449cfd1a1d
d0979eee4aa2c2f664b9532775113f6d528c0e7c1ddf7f92738b1ec1c903710f
9543712c0133b208a9bbafaca0a5ec4a087a1e5de118fda50b4b79a658e73a04
ea556ee2b9891278e414984da3dc6d739ce951cf058e2bd79d671877c4d5680c
2a757744cfe0cf31bf587043af1acf031fe82e85f1199c1e606232edd4501949
dc70516a3c30b356f35c2b4e14fdc62abc3cef6b7703937a1c5561694b2e2022
457603ec1210cfe44401a8ef8e43ab2c2244428c4f8138828f2a7646a8482b49
00a7fca8dbfc30cf7dacbcb003d06c36644d61e901114cc5348ebf805af2f70f
84398891f59cc49846e23463d89ba78dcadb3efe0f1f00f4b45c40bc0d2f3cd7
63c231487557e68ebbb01cc9f96f246e9c2634cecff4a381dd27814e07cf1eef
75a68378e92e56ff486dafb1bba124ccb1f2e9dfdf43c5fabc0336d8f237f9c8
c0a656c7691c674c0031ec4531b6e7adffa69d634c9af413fe014cb6464b4ba8
649cf7bf0b6ef90126d312c361fbf4618957798879d967496ae34ecdd53985be
1bcea07ff99f19a7bf3dad5bc31e1941161e60b98a7f78316b95db862315bd25
c46474a7e5cbe24c54f54fa23b299901cc2488480d258db25713dc97ab0e4b5a
d66bc1131efb7296f2edbf6d27734cd45603cf5c1e0915dee25996df850dfa31
13e23c722aef49ad2be71db794c86926c13ad66f17d8b8de2a4b66d9191afc44
2b4e95d2cd04cf23374f172a183ba3d6adf8b2f919f14b4ab794f53531aa0da7
fa0d5ea6f4c30e24db53640d51248cf1001e7330286c1668a7250d3a0d34f84a
7acecf297caec2198b56eb1b9263ba531658d06a3d9902c3c96034cf79f4038a
2859c9e47088ed6025d59a69abe6c86ffb467b4d8b44028fcd099729e37bf8f9
5584763055731e7d7ac8b76765955c5c73c00bdbd426950f69ac97f55f9f9f2f
bc217e166c442e627ee893965fbc7200396403b1f52693f6079886eac12d3553
a02944981ead829e3bef62ce5d9274e039e305c48b23c604a86e6d816b649faa
ecdff4777472f32094796aae48960b0f3d74e40fd1e4e72bb37e54322c126341
039c3b3fc15a137fbee162877a2b1a27dbc2cb91b9b8cecad345cb00d519c051
f46df1f96224e95646e02d4af7739638c5a261e36e46be731e3425d8f62d25ce
5a7f1e72abf75237bc7109d0f6503b684c728a888ce8a67d622a298fc09d08a0
39b7508db67d1fad998be17d49438696e9c402bf6ca7e5ad7ca7a1450e04d17c
2b0fa2feb0b9d5af5dfa5367faab7b3e2f104dffb5da896d00b890764b83543f
70d04bf1fb56fdf0a5873edc4124e42b1546dcd43f4896a2b313617c5675e84c
1fb558d1b944084e431c29a8b1b1ea67f22bca2ea0e503c422f13d3d18c16dc2
2f8215c71346762875d4864e20c107f50d9b42963aa2185db553e60af6c6a6a9
886ed38b48052b0cce1e5b3f2f5d96975849eef01399553443d2722f1fbe8266
aab7b8c1d69ee6926c1f916c62dce7a293589e10141173e30b5790f18763f1d8
e1168cac05882de798bf361b1ec201c9f5fc970e5bba72515ecf3ddfcb14a6d6
2f38cec6004fca8ec75d0b2ae1fc8bc94d4bac124c97b770759987d4322ae181
55c7a1d7c0d3005385eeae58b8a07c556dd439b676b496682931327031d68f43
daa631dff340b8531a8f1cddc1f7061dafd88109b78958d52acbf458dd3ee2a1
fce78a3e5a8ad9a5314464a53699701ce17dfba2e176e9ac0f34e1692224926b
61782f6ffad4c6278a32b1bdb73cf6b8b2f5646dbea6de31b2078cad5a87dcd9
04ea4e60601f7547c1544b863ba383ee95ceb4a42ce89e815740e0abf8d3087c
637a29e0645de6f2edd027cdd56e8b558dbd26e9582186bd5b4792500926f8b2
5d0dd38c7af1ce0e2d99942b0099757140f9dfbd514c38e3d15e502dbbc34c0f
61e3262e48f80bc3b6f93ac5aaab909cac3b776094c2a101b0ef5c891e28ce69
9ecbbb70fa9a71bab19a9a4ae49f9316b7dbdfd63b43db9aa064df4092cee544
7507275807a430358ff0df98f2fbaf716785742ba18888994a8e2b54aa0f6fdc
07feee88c4c98951495943bd42b395b92b545cce05be3db4a0ffc43ca064ac4a
9baad9d402c97fb79b8f70184e011f78d8e96538f4ef66d45950a80d186f3b96
56ce530f019a1269ecd2249f6514a6b77e486198ed8e4c28760a6482b1d3bd8e
8104a8c964154d8d23fb33f0028960d92c5bd3dfa6bb390f6dc708a9f4fec7a3
7327a97723acf7b62070cd7dffb6aaa77505a05fb292c54baaf428f9b81e5ded
8c92353e46081e4fc0edf39c53ad0e64a29e34aed73d18e25d0fb73134ca03fb
382c1f2d703e241986e9bcb36f4a9a45bf183e5898364d95369febc1c8867d7a
77b76f92c3668ace60e180b685698d403b86e27cb9725405955fecf496aaf327
e4f5e75ff5123402b23b0e8c5f7cf7ccf1364fbc8d9bcb94f0f3f018a2902f61
ce2e4992a5dd21e2432a6f412e77b13cc968355f9298c0fda7bef338086560a6
7c774e05aa3e6d1f0a3b61bc1ea89cbe39423ab0c3bccf10e091a76563ff8b09
c509f9f69409b7bbe8ccfb574ea7a80cb1ff7502388b4969f91f319e0c609bb5
c3ab477330dc67d7d900f2cae9890caf14312ba25c8722f73ab0d9b0c884aafb
b548a565b92af0ff6940d36d40442671cd9513f93d19107cca2462d5432696d0
97f56f580721b0cc583aee69922f3d0209989ed6e6bf55e70e293c0c3e89f0a7
caa0d50b519ffe06b5e86068deb84ae448d5041d20e03955b5bbf0d78ae45bbe
2e548c64687229f1033fe264d1eddd5437e2519aaf8d1f7f2b8acff36cd650a6
3cac39cb7c215c7b34e29b58fbf5d8493cc1bbc9e4dc92d8df9cbc28cb3884a9
b5031e13c12ea77e63e74504fb6609b446ea85900cd8351853e71bbaad97ef8c
14c8691ea4a1b39c94b7ffd97605796e7125ea5a6ef7cb631ed1d7805155f6dc
d64e6773ab236b4c9eeabdae300229d4710af35c88f3a61fb3934dfcf4756533
bf1a27c68f82bba998f2abd86aab3b741fa09837a3f32cfb5f0a35732c7df553
1277c984e1c32777107cfbe7962971b6bc352ef17179d94a3accaa0730f3d360
ccc9e41a9d019759443990c8ad253b2726872bb53a3e6217b3ecc57242068b70
1116465f90b58a16e703ad75c3da86b2ca6f6e2f88ce00a60175843321e6e6d1
8e1fb76455d729634c196a8048ba1d7e399b7e5529e312610cd6e38e11de0500
47bf6d8e7fc899e90a40f639ab3ed83e4f8614245ae7929da6a8872a15b3b6d8
c0769d8468fa9911fda9fe875c7f4532d2cd641d8024b1b43de1d9fc0cf0dd55
0337769be1a0da6afa42b12c8b3d21ea450df447196ed23071ecc65385cf7111
d025b3d4d396fc2ff5f814fc7b249b88c012765d66402a3cf926e3769522c650
62336560b22fd0865350c75541ddece29da646b137d467aea2b14e0c8a9d004d
c914e1e36247f010d5acdfc06a5a86477392e6b5df8bfa0201bd160c16edd42a
1611d8569db8d294d64cd76f9132970e961a57cbc9478fa37507f44197d73490
3a6aa1706d0e0b4115e3c6bddbd4cf4be087fe8cdde3a8d764b299fcaefbf13a
2f480e9385aa9db1d4358cecb9958ef760f6dd14459f2a93bd2d49e354b7a672
8e6bd45f17e2c6d86512f24234f1e8df4e5ebfab6c094504041adaaea0139028
5b4223e9cefe6e67c7a055c9b56402202fc3964989ae3d435ab73b071c022aa4
6c2226809e1c38f8436f42d3ac3a262c94cd71383b50dca30ca2d275ec463405
e7de48bbfcf53a152083382b8426ce804c958b9aae95a13f2575658991c91988
e3af9fc6d24986700798775ed0edfe717510f248d3fd68a5e229c9862f7e5850
1527886041988bcb5fb8e76b80a5292e982f25d51a97a594c4bc30a0cf3739e5
6381ee320c7a2908da6732b29184ba5312b24e0935fdea476a5aeaa75df407fa
4ebf181d5a15a5ad7d0561ebf0a4e55c220a9beccae6fd830c0fda764583cc5f
f59dd84c9c2486afd7039eb22a657ebcb48c5ca9053f9c6671cc6cb448897f01
a33036fc2438c9afec583261506e1ce938894644af8d14b5af8b020f0061502c
685d2919d5babfcbb1658002a2b5b6f5fe8636f4addbf97dc7c28eb93dde4f93
831cb9aec8d2d3f6173f16d4c31ae3871e4bfad4b652c0f5e11af2270c983cf7
47e3d8ef4619c2d76db67ffac11988cf3e7f6a2976305532a2a2174656a926fc
1e9dbc981c19937c1fbfd11871ab3b96599182227ba0f840f29bf8533d33e4c0
052e60cc0a266a97857f7c6bdb412ea779b4094a5ca237960957dd0fa2a37c66
97db26e03b892bbf9ea9b6e39d131f8ceeaced68e12b7f2ce9ac64dfc61009a6
a4959924651f7158dc9c25f3e603629c979aac5c308fb57a460f4229ec663d88
51e6947cc40678dfc56e8472f021d3264c06a3b76cf1f77f4b4d0b1886039d3f
e166d739bafe7cbfd72331c343fdd7395cb8cda3a85c735bb35a286090b6055b
62808bd999289763d9e433d22573fa3f069f40aa45df44cd9e17e843e9841828
d18b10f2325aafb6c872e26486f9c059476a50a4a3654ea8ae9e7f6fb7496a4a
887054819293cfe78eb82ce32bdaf4c291faee9b01a04ca557cb09e614d0568b
4fd1eb0cadaa3014aeb0325eb68606f07885a5234e54e76ef6d9769b80e640c1
8b21a8f0d80f7029228c704ac5d747edc455493f33e844a3b450f36141417a52
0677fad5d7812b65640113f8daf1c45aac012f587719d26e84e8f98cdbc9e3cd
a9e148ec08d03ebbc9e875b21ae8a3456a032d39fb1e726effa0b5039871cd72
6faf210bc4927de53fffa0d987c2232bf27dcecda650bb7d7ebadaa7036079c8
0f067aa09ade73b0f22f92ca57ab4a1873ed901c7eb934fef1524824604092ff
6f255608a2ad18636b7f69d842764c672d3392b35b7e12ddc355aaec9023f8c2
730ddc4e7b49e754e7550c70a21b5bdd7acd0bde562b34faa2ecbbded4956cc6
45747aaf479ddf5269180428781e96f3589ab47bf5d772b0a6a01eda4dbf27f1
49cc8d9fc57e95245f9da122f1e3b6a31ca21d7bce9e846b3484bc32310902ff
acbc3b5e1191ce9a65878695678a3ea186d03fa6cd13b1743f327ab65a848d30
1dd0abd9aac5b38e9f727365120dd5024f3da20c5cd4f6105490e3e45df85ac1
300a041e5a5ea1d8d882bbb1afd4cd7156f19a48f43ac58cbab60674b4daaf45
7528e5c201d32ca7e06ff8719b79b94d6c571a99221e5ef00c8bcd7d82ce818a
e2c4f8b426438788ba84c02e7c9395b00f9780b7770facde25094236e106c405
14913fcd39c2d2b0d084931cb5b4ebdda3d91d8a25ecdef23154779b30a277c9
509902c229d32557328bfb32b20561dc38e88fa16a9ebcd74377ee96dae69167
87481289a5d322e9c34dce09c833d7e479200afdd9998669b16a30264e72ca70
f34e79f2c544ab96c323dadda4c9900737121d68aef6978feac182fe1214b1e5
e2f3321680eb5f0e6fa1f2bee600b07db3d9546954fb6137443956de879d58ad
cb7d8a870f12a58c5475ab59790565de421f59b39e643ab54f38a94336cc0f6e
c5c54c6fe6f61d6c6121aad3b9b1d66350061d1679e795c252dc429647c83d29
c684a07d0b41c72a62be91564cccf41edc5b079b2f06e6a9236addad84d0fc4a
b5d3403b42c9dbe80d119de3855e1b17498a9c2e827f4a8b85736371e39bc01f
7d7d14421219d136d182783f5d6f8aadad83109f5bbe65ce504735367ca9b26c
40e10288fbdea5d46532331eeaabcdd5ea937f1537796f43c7a3ff713ea2c86a
2f325acf06ef921222eaaa928a4dbc95a9a720bdcab273737e7753294e42a8ab
3a27daddd130ef705f2702702f39c84de8e5de998f507bc54a69de88621f1968
bc135bd14c3ce226befa65d8e117c7dad682ce6dd839c443e5368b1dfcbb8cb1
7357d126b021a47bc4b3eb4967183f4a5ad66bb4ced909993a13f365604432ab
42bbda92d93d132d4194a64717c65d6bdfad697e3e2c1b2962382d43cfdb20c5
ed6a61484b3322140ae300953acaff45077505701f7c161f108b902734803f93
b6e89a2cb7640388248e2c263135f0dcf9d6945f8b648c89ffa5d39911bf196d
24dd049e561cf823988605fb0b3b2b960be8448dafe7cb335dfd8870f2fed3a5
ad8784ec4c5835f2aaac6a5f7fdf8f15e1bb5f28296f6af7f8fece5ba290030b
885e30d245ed99db82c0d669ac135390af59032d6b44b34cf43df8b7e3434bec
eb8794acf138dbe43d028c6bfb9b27f2690324f201352eaa4f3ae5601cfd4802
81180cb05c6bb3a90948f261ae1e31edd7b34dd8f1dd2451eff15bc7b881e47e
98183454b03554d5aaf541f087bf56ad3087681521bf3c7b84c8047d3c23b02f
b80a55f6a26ea426feb545f7075a84d903e1ee6386e8bf13deef625de5ebef1e
5d693c8fde15c264113394b035931bfd00308cb7fe9b84d4b18df639a65f454a
9136598ad3f5fe4a71e2d7ee28ac54ab4735957cd1aeaf5b91247426a706415b
1f974db500682c188fecb8ec291a81647d09c3326e05a671e6f406140d94a86c
edccd20538009a2867c3fd9d8b8a703de2f9e2dda4a495f6c8adf2d8da8e06ef
9f21cc11807dfa35a9732f9fe8bdb8f2406552ab3d67621d23bf5f425b4dbdca
79a2796dd74cab6305c5a8459e54f5cd6d8ed1768676386d19e483f257b9fcee
1c8d9a0ad09997343314ad22fd713a49b8a7da1e2302ab57360ac15abf917f4a
930dece02b588a0564271e207746e4e7c330889a3f272e85120c7d1348379a95
80f40a595b1a0f939aa46b7eb17d07981136851acc677e78e8715639357668fe
fb162d0bf9e7b58eb0a0a8e3040de3cc5d6ba9236cfe5a921dc2342167ec9d9b
1a6ed1b7da0da8474ffed17740221cfeb7de9aad199e58bbddce88ba826a14aa
9b11abcc6f897453ddca5565a096a79125a192cfebe71ed37b5810f13fa07e1b
286bca46d8f8c1e76c7ea100c2dcc5501653c7cf156582152bb6aa34ae041580
6292f09c7c3ccf7581a1d1828a99866f5148de42ab060932d1366c2bd8104053
f5d3744c9d825171812dbce8ef600b937ec0430f4a6129d69fefbefbd94aeaa5
12673d7ab07f05528c529d4552ed20f00e8bfe54721cd959ccbca7f5c1850168
5d5ab669a3aabbf6d431a429696274df399f3cf3760a4ac9453d407b211b2bd1
b8416c1c23b3e68343ec28cc5e520fc2dacff7d41274aeb8f77beb6174560e35
fcbf7fc20042393bf52b5278402ac5cd9f2e1ecd5a9dba963b9acf08d78cc5ec
33bf5c620b6f5520cb781a48f247158899cb347b977cd62c54cad66c7e6cfade
433e5be9ff8af602bdccec15daaa4316c8289921d19f809c20e68fca2b71db82
df2bf6f0ab9d90b9830d399d838547fecad841b07fc429abdf02d544b41d5e31
7655829d65c3c4eceb08dedbc1b2db023ad5e524ac9cfba00bbfc2bb2df14d60
954d55560fa6e2ff31786acf9277e5e2018ea2f839873e3a4cff611d83a1cf8b
7ea655c6bd34c7904818fd857cfcf4a9b39e5fd5cc551655dd75602a432a077e
653d3096ebdfe93e0c7c7fdb0e52ac0145a5857d5bc46daccc9d9bc048cd6e56
1ac2f5cac16d537abcf20dfe3c6aba54f400573da27a781dcbcee2bae55db664
18f8f003aba535662a2c891070ae99839b4e0fcdf5ff9b7201ff83bbc9a5d39e
bb61de440b63d08fe86bbb51e251a65501f8ace59ec1907c1268c639012974f0
d8c09babdaad610d2c065174687fa1d783dad6567f1bc9056b4e7f90796ae034
529735bb2c26f89166b67385c0b3ea8befaf0d3c5d2021c5e95f332659f57c20
7b74c058fe712f9f6f7bba2afa24d922819590e2fdaa7b44e2b7a401fad4d28f
05ab33026864f395ec60b658d42784533883b1cf6d5c32baf24262d835be78f1
eb7b30ab043abc420085fc13622e1eea3438d9736c4ea328912fe1f587d4252d
bac29ae8f47e156b7183c1b79034bbda31e08619fc106ff4ab743bf8b9d8bfcf
a8ab6b810c1e1f862201010a3e3da3ebb9457071f5bfc811869fdfa0ea873a0f
fc47d86b663bb3309bce467a81d6f757f62a195f7c6962a729794666635c8703
db1e2d71ae67d5dcbae3cbf5b948eaae1e7e7ee478a0e4584ed70a019449b463
9f812dea0ca7e191c9698c48defd784c76b4692337749365039b13055ad9114b
1a456477c879893ec3bdd4d34184ae5f3e7bbd8b8ac1abeb315540673a792b21
6473e225bf2b925d77276d0741cc50fb9ffdd34459642d337c044cb4d58c123d
652dffe4dc9fb7df87bef364cd7d62d8fe7046cdcf4b387798754c409fea2867
c3c3e9603e79d53c02465ed8b57b752f76477dbbd1c8945522901478aacf4871
c3dc4aa90b10c08351d608a02d168a5978fba8eebb48ec93653a8e178d44daba
e42ec334aea08544a01dd57943e050e87fdf8200d2cc475c02dff5ab8f90e58b
d3efce06d49ea472df3f149b629059dd9e839536fe014503ebd1e6acc1c35637
bee2f01138614af7521c7a9b0ad46187f168e387a095d795c067660f71b43521
7132c561f442b12f84e183d1f5dc887b3e38f904dfa73777e7bd7d3ab6e91fe8
fed58db8f42e4b65856d633c75e5c9f27fa20a7c352f30d2ceda868f61fc0548
f2ef8ebcaa18fcbb4439ad597a2f2d89ecba33181e770d5e8b89d95ac0c9b137
a8cabedafbb3a2071a0484c54dc24ba3b00314555dd3268690308c839cb0f0e0
410116d5bdea76dc4e37d89e5262df1a2d5214bdc3287a67e7657cf5b5f66892
0e5b549eb0929093bc809afe32a60894d7d5dbcc546b0664cdea07442c75ad9d
8bcd96bc17af39925125ccad1a305829e9054454ae7db205fdb3c459836ae540
ce773785b55b103bbb400a6472c14b125d85426138c06d67c5944cd0cb27b224
e1dba5ac9631e37b2a3ef25adfb0271f729f6c4b9402771e0dac6068e12a0df7
7dfe8ff7eabc22211bdc35062527e1ef26da58a905331a5062c66abafb46feb5
05aa06ae1a4d7e88143666dc8a7add3f049fdede30f7785f968dbb0ea2de8bf0
a08da2f0c9473885963b710f96fed7751c4ce01b4f954e486325488951d4bf83
01662f9bb15f48623f8b61c7bf7dd47fc35e73669f03286ac8b1d714dfb24415
7083b0340c5535e329f6a114070d01df96675ca34d6a0280ae26e81a64ce055a
8261cf5db776cfae5cf1fdc3f8bee14efdd08b832ac93ed7094b7e57a02b86c1
574ed345d13e9f9ee91bc99fff620fba401de9c6265a1ab696a581e15a3e5d56
dc74fc5c276b7d1136c94e1816aa95c0f3207ee25769b17948d9a4aa60f7968b
e033c5e124780bc4f532ec96f64e542a0851a75664b073dd16034e76adc06931
5f5f2492fb97859d59cf2f6fa3a6053caf4013c4f47469b9bb1636e7017a0c3f
5d90c8d94894613b51895a11f37e9f31fd98a37bbf4cc1e01e5420556160dcc1
25bd769c66ebe645bfdd4eaa9c53d414ab9757c860861eac507a612a9309e5fe
e75c85868fc360a41846a8fc79b60a175606bce995b584b5d645f9223b7b3a3c
e4e249600921f783ec8f13abf5aaa3b5008389c46da5721dc304f56d7ed99f40
264d7c378d96a6d1a960cc2e412a85fc3a2c47c6f70edf2cedede7cf39d8512b
9679bb345fbc28716239bd95246b59d21a9d8378897e9c7607af76ce59e66ca9
d07928d73ca7744dc5cddfbdac11468497cfc89dfa1a24b120753229598af48d
0d2e6332a73189656cd1f584496e6f3b1552b04cdd6e51a8388dcaee3dcf8c88
072651ee9cd44e3cf938172570ebb994bb821bc04e1a7723e16ea720bd8cceb8
af7ddbb6ac14a58e144ebc37819ce9f2e78c0b8e5f68ed48a5529e47b2d9a7d5
b381bbbf863a318d719ac4e678cc1092f721b5ca28430dd1b597004390ca2a34
a54e73d1d8a556c14c4f04eddb0adfa43ce6ec57c8fd98ef7abc2cefbe991786
a58ff65ee2bcc0ac940a0f07e367c032f0a306375948b54546c7379053ce25a5
707d9c54aaeabe39fadadd74e1710f617d751f7751d396c5d054d28f93314ea1
0e6803b627f3e1a1b2a2c4f14a8e9f9da3a032f7bfd872f86470b2f50e7247f8
97ac03b1a4fc9ae5fe6dd088171746706b39432cacff853edf1b2fe87208a44b
c1304b88b88f70b2c7596cf5ea33d9d11317da7dd2b8497aac6100ca08551dd2
a57ce4efd1522a62f292caf6e273878073c82f48a6c9f10c42be92c439cf49aa
8d4302a85a9f6f313ffeeef59e54aac64b7dbaa8265613f46e62d9a93b3e5740
9eed68eea907fc84a6ad170b8e1ca16b62e46b1067f28eb6b35756c71443e0e7
f0ea70df14dba384368a68baef33b7b2105085d179980d6622db47c1dd4f027c
0fb25e8b1ed04be113f6961107c235eec39afa4db672fccd5c11737b22ca2a50
af750d838462311c1111d31211fc41b76a96197ed5f62505bd627efc44c98cbb
12d82f9969735d8f4f36e097ef1cc66629b901cd7d530d74521decc86ccc0707
07e8a5208fd26e0fca9dc03e4b5b5bb6ee8d899d439457f139f7d2eaa4dc6676
a411a40412cf8808cd1e9ece8df377dd30af8dcb28e8635c0596c9f68728a877
671ff5872c81635145e98e055dbf7eff602a4cd98986e49a26c3754b8a264b99
b53dde3ac4448ba76a550df75661094484ae263c8bc0613152890d17b1a09774
a16fec15c779257a9038a6b81ce29162050e1fcdfb9aa0c88dad199a74470938
21b4585d40e628823c42a45bad0c67427ae3dea6367b69d76820550cf3b9820b
043f6c646cf42d20f1241d0ce8196b2c652e9e10278545cd289bcf37c813fe83
1a437c6e4991750aa4c7970e200b86ec8e26657deaf058e633011946e8569c27
fa7da592b78df95c94607eb1779393c5c2b4de50c425384431505a93cef43c13
e042c12a57eeaa2c48d002369aceedda1b7866c865c8f043bc4e50d824d2e312
04bd8c1a4d01a688bb2133a4ef37b858fe1de27fdf8e5382e4a3f323e016e151
8b1da5df2706b1d76b72db59fd5753e23b41306070650d6990cc2e7ba3014d2f
fde0d8e36ddc3de6917bf3ef03c84f8eafdbfa7dce1c3007bb57c3860f65e991
13cbca57852b97ed21a7a55ea9f0803d13accab45fac1e4009be81d3da217dae
45b421d6a76150d7997de83c0aec3d7c09d5e0cacc5678f2834e21c22aeb8ab7
aa20ed2112bf7d3583cb287740f2e6f828f9d07385eafed61527d19055cdb129
e1e5f1326edb56d107250346184e4580fe83bf44e2343cac0f366e04dc9aba72
0261224f33ea17f779ca8532400edad7ac6a5868a0251b030360b1ec93aad4dd
04caa52edd01246e2e275f007849b317394f9ef09064dfa18fd3c0b32ab4073c
855cd08ffd407d937292f54b0857c281b0d7dc4ded8070d8616e92faac8fdecc
710a60c61f88319733d888795c8b45ed895fd29863943b6632336aa49c2fe365
752b6463cefbc1fcec8e8ace944ba4d35a4dbd87a519eb5ce02348e106c3eb9f
a57577ba58e7d357e55259f182f72c426eb9e12daa1cee7210f75cd7519fa1bf
80b5b2f3bb7eef8348e39c270336f4c96a4e458f71699be5eb94f26d0c7ae5a1
b815789c4c038cdc8285c6394b442ae05dd2322f5aa943badddad4449606020b
f0f63647645d0c1cefe0a0e4964c2f2c9df5754c5d671182d8e15774752e06e4
f9bc5cb91fc5d31fe4fe2f9bfcb53b920c22ce8d0c244851c9433f2d8f9d00be
47a03a8d4246ed7a9a9bf27235f7537d071a6208b6eaca1b2b6c7c43e4354a23
b97ba4ecaa7d93fbb73a4bda88c406a9a1fbd9861252f03b2385c3517c321411
2f968c018db6e3e3de8246c05da9f4dfa2b5d0f2cd2fc3ccc8005486b9c1d1be
e1e8c14ffa77b18c024a98dc5a38b6b0e057d80111e7d2296414e87276c13da0
e550c489381ec3da91eb4dcca6affcec210ecee0c8911fd338b9dc590d17896c
9f96dd780503bb15e0638db81ce3f26b1857c3705fed78959c83ba079e49fd0e
08b3a60d8641ff061398f779498633441313aaad5d54e4b3fdef4e0ce6013b20
f17c6b7b1bcf14289c52cd55b0c456a69960f32fc06077819f32bade75c69b23
bc97b0dc78532580f9fcb1a7d13b2dca90785a7e958b579d397a5df187d4b0b5
f76282b4b00e94c92cc3653a047ea5a003a72f2fa4053e3e9098f82944131394
8c471657767911fafb254abb22cb50693c8c644096a922069e2e2498d230bd19
4d2cfb8086eff1761fa2b13a0ea61d6ee0923c7c4d31897eebeb1d73982e21d9
66f303ae3ec4dc108f8ff4da3bb381be3f4050d101d37042b11b314535a1c370
1b3dd8305868241efaeb394ea2d3efbfbbf3cadaad79416bcd47f8c19cec479d
19bc73567bfe6d55713ad85a42337200752777a1af0bdfe46bd53534c4fee290
c23aa58863bc3d0f1afc9622c886244c3da73f429f8a130510505b554f1688b7
709b0282c94e36468a0af39d04fe42825369197784a82d13011cb3d193bc40d5
5ca5393d2b32c57abbed3a0bdcf6f5dc0bf26c97aa46daa4269d6e2e104a78a5
c96a39b4b121958a0e643244f646480b348563e417335e5703c368081c178054
293bd69b1d6de52d140ac2dba03cf45c83dcadcddc046526b004373d8aa9d2fc
bd3b354ea1136b0161a7de8ba06051f3cc22b028a2f76b6780f74c5509c49268
0ac5e71a1578e344dfa147d4136d57a579bbae76a482d742b44ca381a96ee349
613690f1a6f8bb1e8af1df6b2555681485122512162894c2e85b2b9a3bf5628c
2a7d925e263edcf66eda45022c012d0ae52faced11324543ce842b5c6349a832
d91999d162fdeea3451b87671f83bfe321726a6244d6f948088c0392d140d0d5
5931ea72d9f6ef32bfbddc2fc6ed0c50decba208beefbe2da0f1b6d918f55ecd
be29f3017d358dcc4a3daad96fbd6edce1a0144dcd3bac07d163935f8580040b
426a9434da7a234354e273f62500f88237cba7771d4ca0f50a506f07b7eced7a
6704154a7eb342bf879fc79f7bc6c488a155fadb56df553ac0b5546cdf3a167c
528b6ea5f65f0e02c863a738ffba4c4a0fb67a788609bcea6e504547c6e66cd4
45ac2c76dcde706679ef2d509f7f45624afc984e0a3f2bb2e1d1364b038d214c
11260ea09afb681a4fdea1df8aa130bfe08ecb071e7fdc72c90dc1b114d81935
92e0b7a609fe06b2c805730195748a02b1879ee6e350c7837c109063628d1f76
e39e0b4fd212b2da075c82e052a9f66be9446c306a956f412a39fa903e430eba
d831d86c7f66765f92137a8cf309cefde84580726dd922a4defc48604b992684
a793c7a25a21438dc7eed60e0a0c82007888457ff63cbab3184d5009ebc30428
b5c1359c3fd71ae27bccf2be4eeabd9ad709ac34bc6afeb60c3afecc39358bdc
5b56f65b2768111635621d7797882dd1db755d26bb2e0f03b1a8d80f902c3960
396dfc9aeca6ff7d42e7f6b4f83d1f217a68f3affb54e7e9aa1d0524e1bab538
3843852ddf61b2ad706af3d609a8d1f841f9e9d12b1b0f92f3b47b0dd0bc5f04
866528c0f63db3feab76645cde31f0283fb8c711fc34504697fa0cfb8cd7b150
172cfc93d64ab6a1da1bc96d501d49ac87cc6a04db1635c167a1fdb58eb9e3cb
94c90d2f9046890700513232d5bf1678651012e2f817114f46a1019e72220d7e
d216d12712bb8ae73140aeeb48a3339dc1d5a6efac1e8f50c92b58e32725424e
ba86033bb1bba686695cedd75be66d54275c8ddecbdcbbdda44f595e6b686af6
f6117bcef51df5e3c98e90171952a23f445c2bca9b8626400905fdce9e0464b1
c1d241ae619844513e9cc3a58a6f978089d209bd775438d7b87108a342c76b62
8c3a6a28b9d0c42e696f3d5908cb2c70d8d3ead811ef4dd19023faf86ee053c3
014ff20983774efe8e26646abda4954ead06c80c7670
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
cleartomark
| {
"pile_set_name": "Github"
} |
<?php
/*
* This file is part of PHPUnit.
*
* (c) Sebastian Bergmann <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
use SebastianBergmann\Environment\Console;
/**
* Prints the result of a TextUI TestRunner run.
*
* @since Class available since Release 2.0.0
*/
class PHPUnit_TextUI_ResultPrinter extends PHPUnit_Util_Printer implements PHPUnit_Framework_TestListener
{
const EVENT_TEST_START = 0;
const EVENT_TEST_END = 1;
const EVENT_TESTSUITE_START = 2;
const EVENT_TESTSUITE_END = 3;
const COLOR_NEVER = 'never';
const COLOR_AUTO = 'auto';
const COLOR_ALWAYS = 'always';
const COLOR_DEFAULT = self::COLOR_NEVER;
/**
* @var array
*/
private static $ansiCodes = [
'bold' => 1,
'fg-black' => 30,
'fg-red' => 31,
'fg-green' => 32,
'fg-yellow' => 33,
'fg-blue' => 34,
'fg-magenta' => 35,
'fg-cyan' => 36,
'fg-white' => 37,
'bg-black' => 40,
'bg-red' => 41,
'bg-green' => 42,
'bg-yellow' => 43,
'bg-blue' => 44,
'bg-magenta' => 45,
'bg-cyan' => 46,
'bg-white' => 47
];
/**
* @var int
*/
protected $column = 0;
/**
* @var int
*/
protected $maxColumn;
/**
* @var bool
*/
protected $lastTestFailed = false;
/**
* @var int
*/
protected $numAssertions = 0;
/**
* @var int
*/
protected $numTests = -1;
/**
* @var int
*/
protected $numTestsRun = 0;
/**
* @var int
*/
protected $numTestsWidth;
/**
* @var bool
*/
protected $colors = false;
/**
* @var bool
*/
protected $debug = false;
/**
* @var bool
*/
protected $verbose = false;
/**
* @var int
*/
private $numberOfColumns;
/**
* @var bool
*/
private $reverse = false;
/**
* @var bool
*/
private $defectListPrinted = false;
/**
* Constructor.
*
* @param mixed $out
* @param bool $verbose
* @param string $colors
* @param bool $debug
* @param int|string $numberOfColumns
* @param bool $reverse
*
* @throws PHPUnit_Framework_Exception
*
* @since Method available since Release 3.0.0
*/
public function __construct($out = null, $verbose = false, $colors = self::COLOR_DEFAULT, $debug = false, $numberOfColumns = 80, $reverse = false)
{
parent::__construct($out);
if (!is_bool($verbose)) {
throw PHPUnit_Util_InvalidArgumentHelper::factory(2, 'boolean');
}
$availableColors = [self::COLOR_NEVER, self::COLOR_AUTO, self::COLOR_ALWAYS];
if (!in_array($colors, $availableColors)) {
throw PHPUnit_Util_InvalidArgumentHelper::factory(
3,
vsprintf('value from "%s", "%s" or "%s"', $availableColors)
);
}
if (!is_bool($debug)) {
throw PHPUnit_Util_InvalidArgumentHelper::factory(4, 'boolean');
}
if (!is_int($numberOfColumns) && $numberOfColumns != 'max') {
throw PHPUnit_Util_InvalidArgumentHelper::factory(5, 'integer or "max"');
}
if (!is_bool($reverse)) {
throw PHPUnit_Util_InvalidArgumentHelper::factory(6, 'boolean');
}
$console = new Console;
$maxNumberOfColumns = $console->getNumberOfColumns();
if ($numberOfColumns == 'max' || $numberOfColumns > $maxNumberOfColumns) {
$numberOfColumns = $maxNumberOfColumns;
}
$this->numberOfColumns = $numberOfColumns;
$this->verbose = $verbose;
$this->debug = $debug;
$this->reverse = $reverse;
if ($colors === self::COLOR_AUTO && $console->hasColorSupport()) {
$this->colors = true;
} else {
$this->colors = (self::COLOR_ALWAYS === $colors);
}
}
/**
* @param PHPUnit_Framework_TestResult $result
*/
public function printResult(PHPUnit_Framework_TestResult $result)
{
$this->printHeader();
$this->printErrors($result);
$this->printWarnings($result);
$this->printFailures($result);
if ($this->verbose) {
$this->printRisky($result);
$this->printIncompletes($result);
$this->printSkipped($result);
}
$this->printFooter($result);
}
/**
* @param array $defects
* @param string $type
*/
protected function printDefects(array $defects, $type)
{
$count = count($defects);
if ($count == 0) {
return;
}
if ($this->defectListPrinted) {
$this->write("\n--\n\n");
}
$this->write(
sprintf(
"There %s %d %s%s:\n",
($count == 1) ? 'was' : 'were',
$count,
$type,
($count == 1) ? '' : 's'
)
);
$i = 1;
if ($this->reverse) {
$defects = array_reverse($defects);
}
foreach ($defects as $defect) {
$this->printDefect($defect, $i++);
}
$this->defectListPrinted = true;
}
/**
* @param PHPUnit_Framework_TestFailure $defect
* @param int $count
*/
protected function printDefect(PHPUnit_Framework_TestFailure $defect, $count)
{
$this->printDefectHeader($defect, $count);
$this->printDefectTrace($defect);
}
/**
* @param PHPUnit_Framework_TestFailure $defect
* @param int $count
*/
protected function printDefectHeader(PHPUnit_Framework_TestFailure $defect, $count)
{
$this->write(
sprintf(
"\n%d) %s\n",
$count,
$defect->getTestName()
)
);
}
/**
* @param PHPUnit_Framework_TestFailure $defect
*/
protected function printDefectTrace(PHPUnit_Framework_TestFailure $defect)
{
$e = $defect->thrownException();
$this->write((string) $e);
while ($e = $e->getPrevious()) {
$this->write("\nCaused by\n" . $e);
}
}
/**
* @param PHPUnit_Framework_TestResult $result
*/
protected function printErrors(PHPUnit_Framework_TestResult $result)
{
$this->printDefects($result->errors(), 'error');
}
/**
* @param PHPUnit_Framework_TestResult $result
*/
protected function printFailures(PHPUnit_Framework_TestResult $result)
{
$this->printDefects($result->failures(), 'failure');
}
/**
* @param PHPUnit_Framework_TestResult $result
*/
protected function printWarnings(PHPUnit_Framework_TestResult $result)
{
$this->printDefects($result->warnings(), 'warning');
}
/**
* @param PHPUnit_Framework_TestResult $result
*/
protected function printIncompletes(PHPUnit_Framework_TestResult $result)
{
$this->printDefects($result->notImplemented(), 'incomplete test');
}
/**
* @param PHPUnit_Framework_TestResult $result
*
* @since Method available since Release 4.0.0
*/
protected function printRisky(PHPUnit_Framework_TestResult $result)
{
$this->printDefects($result->risky(), 'risky test');
}
/**
* @param PHPUnit_Framework_TestResult $result
*
* @since Method available since Release 3.0.0
*/
protected function printSkipped(PHPUnit_Framework_TestResult $result)
{
$this->printDefects($result->skipped(), 'skipped test');
}
protected function printHeader()
{
$this->write("\n\n" . PHP_Timer::resourceUsage() . "\n\n");
}
/**
* @param PHPUnit_Framework_TestResult $result
*/
protected function printFooter(PHPUnit_Framework_TestResult $result)
{
if (count($result) === 0) {
$this->writeWithColor(
'fg-black, bg-yellow',
'No tests executed!'
);
return;
}
if ($result->wasSuccessful() &&
$result->allHarmless() &&
$result->allCompletelyImplemented() &&
$result->noneSkipped()) {
$this->writeWithColor(
'fg-black, bg-green',
sprintf(
'OK (%d test%s, %d assertion%s)',
count($result),
(count($result) == 1) ? '' : 's',
$this->numAssertions,
($this->numAssertions == 1) ? '' : 's'
)
);
} else {
if ($result->wasSuccessful()) {
$color = 'fg-black, bg-yellow';
if ($this->verbose) {
$this->write("\n");
}
$this->writeWithColor(
$color,
'OK, but incomplete, skipped, or risky tests!'
);
} else {
$this->write("\n");
if ($result->errorCount()) {
$color = 'fg-white, bg-red';
$this->writeWithColor(
$color,
'ERRORS!'
);
} elseif ($result->failureCount()) {
$color = 'fg-white, bg-red';
$this->writeWithColor(
$color,
'FAILURES!'
);
} elseif ($result->warningCount()) {
$color = 'fg-black, bg-yellow';
$this->writeWithColor(
$color,
'WARNINGS!'
);
}
}
$this->writeCountString(count($result), 'Tests', $color, true);
$this->writeCountString($this->numAssertions, 'Assertions', $color, true);
$this->writeCountString($result->errorCount(), 'Errors', $color);
$this->writeCountString($result->failureCount(), 'Failures', $color);
$this->writeCountString($result->warningCount(), 'Warnings', $color);
$this->writeCountString($result->skippedCount(), 'Skipped', $color);
$this->writeCountString($result->notImplementedCount(), 'Incomplete', $color);
$this->writeCountString($result->riskyCount(), 'Risky', $color);
$this->writeWithColor($color, '.', true);
}
}
/**
*/
public function printWaitPrompt()
{
$this->write("\n<RETURN> to continue\n");
}
/**
* An error occurred.
*
* @param PHPUnit_Framework_Test $test
* @param Exception $e
* @param float $time
*/
public function addError(PHPUnit_Framework_Test $test, Exception $e, $time)
{
$this->writeProgressWithColor('fg-red, bold', 'E');
$this->lastTestFailed = true;
}
/**
* A failure occurred.
*
* @param PHPUnit_Framework_Test $test
* @param PHPUnit_Framework_AssertionFailedError $e
* @param float $time
*/
public function addFailure(PHPUnit_Framework_Test $test, PHPUnit_Framework_AssertionFailedError $e, $time)
{
$this->writeProgressWithColor('bg-red, fg-white', 'F');
$this->lastTestFailed = true;
}
/**
* A warning occurred.
*
* @param PHPUnit_Framework_Test $test
* @param PHPUnit_Framework_Warning $e
* @param float $time
*
* @since Method available since Release 5.1.0
*/
public function addWarning(PHPUnit_Framework_Test $test, PHPUnit_Framework_Warning $e, $time)
{
$this->writeProgressWithColor('fg-yellow, bold', 'W');
$this->lastTestFailed = true;
}
/**
* Incomplete test.
*
* @param PHPUnit_Framework_Test $test
* @param Exception $e
* @param float $time
*/
public function addIncompleteTest(PHPUnit_Framework_Test $test, Exception $e, $time)
{
$this->writeProgressWithColor('fg-yellow, bold', 'I');
$this->lastTestFailed = true;
}
/**
* Risky test.
*
* @param PHPUnit_Framework_Test $test
* @param Exception $e
* @param float $time
*
* @since Method available since Release 4.0.0
*/
public function addRiskyTest(PHPUnit_Framework_Test $test, Exception $e, $time)
{
$this->writeProgressWithColor('fg-yellow, bold', 'R');
$this->lastTestFailed = true;
}
/**
* Skipped test.
*
* @param PHPUnit_Framework_Test $test
* @param Exception $e
* @param float $time
*
* @since Method available since Release 3.0.0
*/
public function addSkippedTest(PHPUnit_Framework_Test $test, Exception $e, $time)
{
$this->writeProgressWithColor('fg-cyan, bold', 'S');
$this->lastTestFailed = true;
}
/**
* A testsuite started.
*
* @param PHPUnit_Framework_TestSuite $suite
*
* @since Method available since Release 2.2.0
*/
public function startTestSuite(PHPUnit_Framework_TestSuite $suite)
{
if ($this->numTests == -1) {
$this->numTests = count($suite);
$this->numTestsWidth = strlen((string) $this->numTests);
$this->maxColumn = $this->numberOfColumns - strlen(' / (XXX%)') - (2 * $this->numTestsWidth);
}
}
/**
* A testsuite ended.
*
* @param PHPUnit_Framework_TestSuite $suite
*
* @since Method available since Release 2.2.0
*/
public function endTestSuite(PHPUnit_Framework_TestSuite $suite)
{
}
/**
* A test started.
*
* @param PHPUnit_Framework_Test $test
*/
public function startTest(PHPUnit_Framework_Test $test)
{
if ($this->debug) {
$this->write(
sprintf(
"\nStarting test '%s'.\n",
PHPUnit_Util_Test::describe($test)
)
);
}
}
/**
* A test ended.
*
* @param PHPUnit_Framework_Test $test
* @param float $time
*/
public function endTest(PHPUnit_Framework_Test $test, $time)
{
if (!$this->lastTestFailed) {
$this->writeProgress('.');
}
if ($test instanceof PHPUnit_Framework_TestCase) {
$this->numAssertions += $test->getNumAssertions();
} elseif ($test instanceof PHPUnit_Extensions_PhptTestCase) {
$this->numAssertions++;
}
$this->lastTestFailed = false;
if ($test instanceof PHPUnit_Framework_TestCase) {
if (!$test->hasExpectationOnOutput()) {
$this->write($test->getActualOutput());
}
}
}
/**
* @param string $progress
*/
protected function writeProgress($progress)
{
$this->write($progress);
$this->column++;
$this->numTestsRun++;
if ($this->column == $this->maxColumn
|| $this->numTestsRun == $this->numTests
) {
if ($this->numTestsRun == $this->numTests) {
$this->write(str_repeat(' ', $this->maxColumn - $this->column));
}
$this->write(
sprintf(
' %' . $this->numTestsWidth . 'd / %' .
$this->numTestsWidth . 'd (%3s%%)',
$this->numTestsRun,
$this->numTests,
floor(($this->numTestsRun / $this->numTests) * 100)
)
);
if ($this->column == $this->maxColumn) {
$this->writeNewLine();
}
}
}
protected function writeNewLine()
{
$this->column = 0;
$this->write("\n");
}
/**
* Formats a buffer with a specified ANSI color sequence if colors are
* enabled.
*
* @param string $color
* @param string $buffer
*
* @return string
*
* @since Method available since Release 4.0.0
*/
protected function formatWithColor($color, $buffer)
{
if (!$this->colors) {
return $buffer;
}
$codes = array_map('trim', explode(',', $color));
$lines = explode("\n", $buffer);
$padding = max(array_map('strlen', $lines));
$styles = [];
foreach ($codes as $code) {
$styles[] = self::$ansiCodes[$code];
}
$style = sprintf("\x1b[%sm", implode(';', $styles));
$styledLines = [];
foreach ($lines as $line) {
$styledLines[] = $style . str_pad($line, $padding) . "\x1b[0m";
}
return implode("\n", $styledLines);
}
/**
* Writes a buffer out with a color sequence if colors are enabled.
*
* @param string $color
* @param string $buffer
* @param bool $lf
*
* @since Method available since Release 4.0.0
*/
protected function writeWithColor($color, $buffer, $lf = true)
{
$this->write($this->formatWithColor($color, $buffer));
if ($lf) {
$this->write("\n");
}
}
/**
* Writes progress with a color sequence if colors are enabled.
*
* @param string $color
* @param string $buffer
*
* @since Method available since Release 4.0.0
*/
protected function writeProgressWithColor($color, $buffer)
{
$buffer = $this->formatWithColor($color, $buffer);
$this->writeProgress($buffer);
}
/**
* @param int $count
* @param string $name
* @param string $color
* @param bool $always
*
* @since Method available since Release 4.6.5
*/
private function writeCountString($count, $name, $color, $always = false)
{
static $first = true;
if ($always || $count > 0) {
$this->writeWithColor(
$color,
sprintf(
'%s%s: %d',
!$first ? ', ' : '',
$name,
$count
),
false
);
$first = false;
}
}
}
| {
"pile_set_name": "Github"
} |
<template>
<v-content>
<v-container fluid class="pa-0">
<slot/>
</v-container>
<app-snackbar/>
</v-content>
</template>
<script>
import AppSnackbar from '@/components/AppSnackbar'
export default {
components: {
AppSnackbar
}
}
</script>
| {
"pile_set_name": "Github"
} |
//===--- TokenAnalyzer.cpp - Analyze Token Streams --------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
///
/// \file
/// This file implements an abstract TokenAnalyzer and associated helper
/// classes. TokenAnalyzer can be extended to generate replacements based on
/// an annotated and pre-processed token stream.
///
//===----------------------------------------------------------------------===//
#include "TokenAnalyzer.h"
#include "AffectedRangeManager.h"
#include "Encoding.h"
#include "FormatToken.h"
#include "FormatTokenLexer.h"
#include "TokenAnnotator.h"
#include "UnwrappedLineParser.h"
#include "clang/Basic/Diagnostic.h"
#include "clang/Basic/DiagnosticOptions.h"
#include "clang/Basic/FileManager.h"
#include "clang/Basic/SourceManager.h"
#include "clang/Format/Format.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/Support/Debug.h"
#define DEBUG_TYPE "format-formatter"
namespace clang {
namespace format {
Environment::Environment(StringRef Code, StringRef FileName,
ArrayRef<tooling::Range> Ranges,
unsigned FirstStartColumn, unsigned NextStartColumn,
unsigned LastStartColumn)
: VirtualSM(new SourceManagerForFile(FileName, Code)), SM(VirtualSM->get()),
ID(VirtualSM->get().getMainFileID()), FirstStartColumn(FirstStartColumn),
NextStartColumn(NextStartColumn), LastStartColumn(LastStartColumn) {
SourceLocation StartOfFile = SM.getLocForStartOfFile(ID);
for (const tooling::Range &Range : Ranges) {
SourceLocation Start = StartOfFile.getLocWithOffset(Range.getOffset());
SourceLocation End = Start.getLocWithOffset(Range.getLength());
CharRanges.push_back(CharSourceRange::getCharRange(Start, End));
}
}
TokenAnalyzer::TokenAnalyzer(const Environment &Env, const FormatStyle &Style)
: Style(Style), Env(Env),
AffectedRangeMgr(Env.getSourceManager(), Env.getCharRanges()),
UnwrappedLines(1),
Encoding(encoding::detectEncoding(
Env.getSourceManager().getBufferData(Env.getFileID()))) {
LLVM_DEBUG(
llvm::dbgs() << "File encoding: "
<< (Encoding == encoding::Encoding_UTF8 ? "UTF8" : "unknown")
<< "\n");
LLVM_DEBUG(llvm::dbgs() << "Language: " << getLanguageName(Style.Language)
<< "\n");
}
std::pair<tooling::Replacements, unsigned> TokenAnalyzer::process() {
tooling::Replacements Result;
FormatTokenLexer Tokens(Env.getSourceManager(), Env.getFileID(),
Env.getFirstStartColumn(), Style, Encoding);
UnwrappedLineParser Parser(Style, Tokens.getKeywords(),
Env.getFirstStartColumn(), Tokens.lex(), *this);
Parser.parse();
assert(UnwrappedLines.rbegin()->empty());
unsigned Penalty = 0;
for (unsigned Run = 0, RunE = UnwrappedLines.size(); Run + 1 != RunE; ++Run) {
LLVM_DEBUG(llvm::dbgs() << "Run " << Run << "...\n");
SmallVector<AnnotatedLine *, 16> AnnotatedLines;
TokenAnnotator Annotator(Style, Tokens.getKeywords());
for (unsigned i = 0, e = UnwrappedLines[Run].size(); i != e; ++i) {
AnnotatedLines.push_back(new AnnotatedLine(UnwrappedLines[Run][i]));
Annotator.annotate(*AnnotatedLines.back());
}
std::pair<tooling::Replacements, unsigned> RunResult =
analyze(Annotator, AnnotatedLines, Tokens);
LLVM_DEBUG({
llvm::dbgs() << "Replacements for run " << Run << ":\n";
for (tooling::Replacements::const_iterator I = RunResult.first.begin(),
E = RunResult.first.end();
I != E; ++I) {
llvm::dbgs() << I->toString() << "\n";
}
});
for (unsigned i = 0, e = AnnotatedLines.size(); i != e; ++i) {
delete AnnotatedLines[i];
}
Penalty += RunResult.second;
for (const auto &R : RunResult.first) {
auto Err = Result.add(R);
// FIXME: better error handling here. For now, simply return an empty
// Replacements to indicate failure.
if (Err) {
llvm::errs() << llvm::toString(std::move(Err)) << "\n";
return {tooling::Replacements(), 0};
}
}
}
return {Result, Penalty};
}
void TokenAnalyzer::consumeUnwrappedLine(const UnwrappedLine &TheLine) {
assert(!UnwrappedLines.empty());
UnwrappedLines.back().push_back(TheLine);
}
void TokenAnalyzer::finishRun() {
UnwrappedLines.push_back(SmallVector<UnwrappedLine, 16>());
}
} // end namespace format
} // end namespace clang
| {
"pile_set_name": "Github"
} |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.broker.processor;
import io.netty.channel.ChannelHandlerContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.rocketmq.broker.BrokerController;
import com.alibaba.rocketmq.common.constant.LoggerName;
import com.alibaba.rocketmq.remoting.netty.NettyRequestProcessor;
import com.alibaba.rocketmq.remoting.protocol.RemotingCommand;
/**
* @author shijia.wxr
*/
public class ForwardRequestProcessor implements NettyRequestProcessor {
private static final Logger log = LoggerFactory.getLogger(LoggerName.BrokerLoggerName);
private final BrokerController brokerController;
public ForwardRequestProcessor(final BrokerController brokerController) {
this.brokerController = brokerController;
}
@Override
public RemotingCommand processRequest(ChannelHandlerContext ctx, RemotingCommand request) {
return null;
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2014 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "mongoc-array-private.h"
#include "mongoc-error.h"
#include "mongoc-server-description-private.h"
#include "mongoc-topology-description-apm-private.h"
#include "mongoc-trace-private.h"
#include "mongoc-util-private.h"
#include "mongoc-read-prefs-private.h"
#include "mongoc-set-private.h"
#include "mongoc-client-private.h"
#include "mongoc-thread-private.h"
static bool
_is_data_node (mongoc_server_description_t *sd)
{
switch (sd->type) {
case MONGOC_SERVER_MONGOS:
case MONGOC_SERVER_STANDALONE:
case MONGOC_SERVER_RS_SECONDARY:
case MONGOC_SERVER_RS_PRIMARY:
return true;
case MONGOC_SERVER_RS_OTHER:
case MONGOC_SERVER_RS_ARBITER:
case MONGOC_SERVER_UNKNOWN:
case MONGOC_SERVER_POSSIBLE_PRIMARY:
case MONGOC_SERVER_RS_GHOST:
case MONGOC_SERVER_DESCRIPTION_TYPES:
default:
return false;
}
}
static void
_mongoc_topology_server_dtor (void *server_, void *ctx_)
{
mongoc_server_description_destroy ((mongoc_server_description_t *) server_);
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_init --
*
* Initialize the given topology description
*
* Returns:
* None.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
void
mongoc_topology_description_init (mongoc_topology_description_t *description,
int64_t heartbeat_msec)
{
ENTRY;
BSON_ASSERT (description);
memset (description, 0, sizeof (*description));
bson_oid_init (&description->topology_id, NULL);
description->opened = false;
description->type = MONGOC_TOPOLOGY_UNKNOWN;
description->heartbeat_msec = heartbeat_msec;
description->servers =
mongoc_set_new (8, _mongoc_topology_server_dtor, NULL);
description->set_name = NULL;
description->max_set_version = MONGOC_NO_SET_VERSION;
description->stale = true;
description->rand_seed = (unsigned int) bson_get_monotonic_time ();
bson_init (&description->cluster_time);
description->session_timeout_minutes = MONGOC_NO_SESSIONS;
EXIT;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_copy_to --
*
* Deep-copy @src to an uninitialized topology description @dst.
* @dst must not already point to any allocated resources. Clean
* up with mongoc_topology_description_destroy.
*
* WARNING: @dst's rand_seed is not initialized.
*
* Returns:
* None.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
void
_mongoc_topology_description_copy_to (const mongoc_topology_description_t *src,
mongoc_topology_description_t *dst)
{
size_t nitems;
size_t i;
mongoc_server_description_t *sd;
uint32_t id;
ENTRY;
BSON_ASSERT (src);
BSON_ASSERT (dst);
bson_oid_copy (&src->topology_id, &dst->topology_id);
dst->opened = src->opened;
dst->type = src->type;
dst->heartbeat_msec = src->heartbeat_msec;
nitems = bson_next_power_of_two (src->servers->items_len);
dst->servers = mongoc_set_new (nitems, _mongoc_topology_server_dtor, NULL);
for (i = 0; i < src->servers->items_len; i++) {
sd = mongoc_set_get_item_and_id (src->servers, (int) i, &id);
mongoc_set_add (
dst->servers, id, mongoc_server_description_new_copy (sd));
}
dst->set_name = bson_strdup (src->set_name);
dst->max_set_version = src->max_set_version;
memcpy (&dst->compatibility_error,
&src->compatibility_error,
sizeof (bson_error_t));
dst->max_server_id = src->max_server_id;
dst->stale = src->stale;
memcpy (&dst->apm_callbacks,
&src->apm_callbacks,
sizeof (mongoc_apm_callbacks_t));
dst->apm_context = src->apm_context;
bson_copy_to (&src->cluster_time, &dst->cluster_time);
dst->session_timeout_minutes = src->session_timeout_minutes;
EXIT;
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_destroy --
*
* Destroy allocated resources within @description
*
* Returns:
* None.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
void
mongoc_topology_description_destroy (mongoc_topology_description_t *description)
{
ENTRY;
BSON_ASSERT (description);
if (description->servers) {
mongoc_set_destroy (description->servers);
}
if (description->set_name) {
bson_free (description->set_name);
}
bson_destroy (&description->cluster_time);
EXIT;
}
/* find the primary, then stop iterating */
static bool
_mongoc_topology_description_has_primary_cb (void *item, void *ctx /* OUT */)
{
mongoc_server_description_t *server = (mongoc_server_description_t *) item;
mongoc_server_description_t **primary = (mongoc_server_description_t **) ctx;
/* TODO should this include MONGOS? */
if (server->type == MONGOC_SERVER_RS_PRIMARY ||
server->type == MONGOC_SERVER_STANDALONE) {
*primary = (mongoc_server_description_t *) item;
return false;
}
return true;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_has_primary --
*
* If topology has a primary, return it.
*
* Returns:
* A pointer to the primary, or NULL.
*
* Side effects:
* None
*
*--------------------------------------------------------------------------
*/
static mongoc_server_description_t *
_mongoc_topology_description_has_primary (
mongoc_topology_description_t *description)
{
mongoc_server_description_t *primary = NULL;
mongoc_set_for_each (description->servers,
_mongoc_topology_description_has_primary_cb,
&primary);
return primary;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_later_election --
*
* Check if we've seen a more recent election in the replica set
* than this server has.
*
* Returns:
* True if the topology description's max replica set version plus
* election id is later than the server description's.
*
* Side effects:
* None
*
*--------------------------------------------------------------------------
*/
static bool
_mongoc_topology_description_later_election (mongoc_topology_description_t *td,
mongoc_server_description_t *sd)
{
/* initially max_set_version is -1 and max_election_id is zeroed */
return td->max_set_version > sd->set_version ||
(td->max_set_version == sd->set_version &&
bson_oid_compare (&td->max_election_id, &sd->election_id) > 0);
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_set_max_set_version --
*
* Remember that we've seen a new replica set version. Unconditionally
* sets td->set_version to sd->set_version.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_set_max_set_version (
mongoc_topology_description_t *td, mongoc_server_description_t *sd)
{
td->max_set_version = sd->set_version;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_set_max_election_id --
*
* Remember that we've seen a new election id. Unconditionally sets
* td->max_election_id to sd->election_id.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_set_max_election_id (
mongoc_topology_description_t *td, mongoc_server_description_t *sd)
{
bson_oid_copy (&sd->election_id, &td->max_election_id);
}
static bool
_mongoc_topology_description_server_is_candidate (
mongoc_server_description_type_t desc_type,
mongoc_read_mode_t read_mode,
mongoc_topology_description_type_t topology_type)
{
switch ((int) topology_type) {
case MONGOC_TOPOLOGY_SINGLE:
switch ((int) desc_type) {
case MONGOC_SERVER_STANDALONE:
return true;
default:
return false;
}
case MONGOC_TOPOLOGY_RS_NO_PRIMARY:
case MONGOC_TOPOLOGY_RS_WITH_PRIMARY:
switch ((int) read_mode) {
case MONGOC_READ_PRIMARY:
switch ((int) desc_type) {
case MONGOC_SERVER_RS_PRIMARY:
return true;
default:
return false;
}
case MONGOC_READ_SECONDARY:
switch ((int) desc_type) {
case MONGOC_SERVER_RS_SECONDARY:
return true;
default:
return false;
}
default:
switch ((int) desc_type) {
case MONGOC_SERVER_RS_PRIMARY:
case MONGOC_SERVER_RS_SECONDARY:
return true;
default:
return false;
}
}
case MONGOC_TOPOLOGY_SHARDED:
switch ((int) desc_type) {
case MONGOC_SERVER_MONGOS:
return true;
default:
return false;
}
default:
return false;
}
}
typedef struct _mongoc_suitable_data_t {
mongoc_read_mode_t read_mode;
mongoc_topology_description_type_t topology_type;
mongoc_server_description_t *primary; /* OUT */
mongoc_server_description_t **candidates; /* OUT */
size_t candidates_len; /* OUT */
bool has_secondary; /* OUT */
} mongoc_suitable_data_t;
static bool
_mongoc_replica_set_read_suitable_cb (void *item, void *ctx)
{
mongoc_server_description_t *server = (mongoc_server_description_t *) item;
mongoc_suitable_data_t *data = (mongoc_suitable_data_t *) ctx;
/* primary's used in staleness calculation, even with mode SECONDARY */
if (server->type == MONGOC_SERVER_RS_PRIMARY) {
data->primary = server;
}
if (_mongoc_topology_description_server_is_candidate (
server->type, data->read_mode, data->topology_type)) {
if (server->type == MONGOC_SERVER_RS_PRIMARY) {
if (data->read_mode == MONGOC_READ_PRIMARY ||
data->read_mode == MONGOC_READ_PRIMARY_PREFERRED) {
/* we want a primary and we have one, done! */
return false;
}
}
if (server->type == MONGOC_SERVER_RS_SECONDARY) {
data->has_secondary = true;
}
/* add to our candidates */
data->candidates[data->candidates_len++] = server;
} else {
TRACE ("Rejected [%s] [%s] for mode [%s]",
mongoc_server_description_type (server),
server->host.host_and_port,
_mongoc_read_mode_as_str (data->read_mode));
}
return true;
}
/* if any mongos are candidates, add them to the candidates array */
static void
_mongoc_try_mode_secondary (mongoc_array_t *set, /* OUT */
mongoc_topology_description_t *topology,
const mongoc_read_prefs_t *read_pref,
size_t local_threshold_ms)
{
mongoc_read_prefs_t *secondary;
secondary = mongoc_read_prefs_copy (read_pref);
mongoc_read_prefs_set_mode (secondary, MONGOC_READ_SECONDARY);
mongoc_topology_description_suitable_servers (
set, MONGOC_SS_READ, topology, secondary, local_threshold_ms);
mongoc_read_prefs_destroy (secondary);
}
/* if any mongos are candidates, add them to the candidates array */
static bool
_mongoc_find_suitable_mongos_cb (void *item, void *ctx)
{
mongoc_server_description_t *server = (mongoc_server_description_t *) item;
mongoc_suitable_data_t *data = (mongoc_suitable_data_t *) ctx;
if (_mongoc_topology_description_server_is_candidate (
server->type, data->read_mode, data->topology_type)) {
data->candidates[data->candidates_len++] = server;
}
return true;
}
/*
*-------------------------------------------------------------------------
*
* mongoc_topology_description_lowest_max_wire_version --
*
* The topology's max wire version.
*
* NOTE: this method should only be called while holding the mutex on
* the owning topology object.
*
* Returns:
* The minimum of all known servers' max wire versions, or INT32_MAX
* if there are no known servers.
*
* Side effects:
* None.
*
*-------------------------------------------------------------------------
*/
int32_t
mongoc_topology_description_lowest_max_wire_version (
const mongoc_topology_description_t *td)
{
int i;
int32_t ret = INT32_MAX;
mongoc_server_description_t *sd;
for (i = 0; (size_t) i < td->servers->items_len; i++) {
sd = (mongoc_server_description_t *) mongoc_set_get_item (td->servers, i);
if (sd->type != MONGOC_SERVER_UNKNOWN && sd->max_wire_version < ret) {
ret = sd->max_wire_version;
}
}
return ret;
}
/*
*-------------------------------------------------------------------------
*
* mongoc_topology_description_all_sds_have_write_date --
*
* Whether the primary and all secondaries' server descriptions have
* last_write_date_ms.
*
* Side effects:
* None.
*
*-------------------------------------------------------------------------
*/
bool
mongoc_topology_description_all_sds_have_write_date (
const mongoc_topology_description_t *td)
{
int i;
mongoc_server_description_t *sd;
for (i = 0; (size_t) i < td->servers->items_len; i++) {
sd = (mongoc_server_description_t *) mongoc_set_get_item (td->servers, i);
if (sd->last_write_date_ms <= 0 &&
(sd->type == MONGOC_SERVER_RS_PRIMARY ||
sd->type == MONGOC_SERVER_RS_SECONDARY)) {
return false;
}
}
return true;
}
/*
*-------------------------------------------------------------------------
*
* _mongoc_topology_description_validate_max_staleness --
*
* If the provided "maxStalenessSeconds" component of the read
* preference is not valid for this topology, fill out @error and
* return false.
*
* Side effects:
* None.
*
*-------------------------------------------------------------------------
*/
bool
_mongoc_topology_description_validate_max_staleness (
const mongoc_topology_description_t *td,
int64_t max_staleness_seconds,
bson_error_t *error)
{
mongoc_topology_description_type_t td_type;
/* Server Selection Spec: A driver MUST raise an error if the TopologyType
* is ReplicaSetWithPrimary or ReplicaSetNoPrimary and either of these
* conditions is false:
*
* maxStalenessSeconds * 1000 >= heartbeatFrequencyMS + idleWritePeriodMS
* maxStalenessSeconds >= smallestMaxStalenessSeconds
*/
td_type = td->type;
if (td_type != MONGOC_TOPOLOGY_RS_WITH_PRIMARY &&
td_type != MONGOC_TOPOLOGY_RS_NO_PRIMARY) {
return true;
}
if (max_staleness_seconds * 1000 <
td->heartbeat_msec + MONGOC_IDLE_WRITE_PERIOD_MS) {
bson_set_error (error,
MONGOC_ERROR_COMMAND,
MONGOC_ERROR_COMMAND_INVALID_ARG,
"maxStalenessSeconds is set to %" PRId64
", it must be at least heartbeatFrequencyMS (%" PRId64
") + server's idle write period (%d seconds)",
max_staleness_seconds,
td->heartbeat_msec,
MONGOC_IDLE_WRITE_PERIOD_MS / 1000);
return false;
}
if (max_staleness_seconds < MONGOC_SMALLEST_MAX_STALENESS_SECONDS) {
bson_set_error (error,
MONGOC_ERROR_COMMAND,
MONGOC_ERROR_COMMAND_INVALID_ARG,
"maxStalenessSeconds is set to %" PRId64
", it must be at least %d seconds",
max_staleness_seconds,
MONGOC_SMALLEST_MAX_STALENESS_SECONDS);
return false;
}
return true;
}
/*
*-------------------------------------------------------------------------
*
* mongoc_topology_description_suitable_servers --
*
* Fill out an array of servers matching the read preference and
* localThresholdMS.
*
* NOTE: this method should only be called while holding the mutex on
* the owning topology object.
*
* Side effects:
* None.
*
*-------------------------------------------------------------------------
*/
void
mongoc_topology_description_suitable_servers (
mongoc_array_t *set, /* OUT */
mongoc_ss_optype_t optype,
mongoc_topology_description_t *topology,
const mongoc_read_prefs_t *read_pref,
size_t local_threshold_ms)
{
mongoc_suitable_data_t data;
mongoc_server_description_t **candidates;
mongoc_server_description_t *server;
int64_t nearest = -1;
int i;
mongoc_read_mode_t read_mode = mongoc_read_prefs_get_mode (read_pref);
candidates = (mongoc_server_description_t **) bson_malloc0 (
sizeof (*candidates) * topology->servers->items_len);
data.read_mode = read_mode;
data.topology_type = topology->type;
data.primary = NULL;
data.candidates = candidates;
data.candidates_len = 0;
data.has_secondary = false;
/* Single server --
* Either it is suitable or it isn't */
if (topology->type == MONGOC_TOPOLOGY_SINGLE) {
server = (mongoc_server_description_t *) mongoc_set_get_item (
topology->servers, 0);
if (_mongoc_topology_description_server_is_candidate (
server->type, read_mode, topology->type)) {
_mongoc_array_append_val (set, server);
} else {
TRACE (
"Rejected [%s] [%s] for read mode [%s] with topology type Single",
mongoc_server_description_type (server),
server->host.host_and_port,
_mongoc_read_mode_as_str (read_mode));
}
goto DONE;
}
/* Replica sets --
* Find suitable servers based on read mode */
if (topology->type == MONGOC_TOPOLOGY_RS_NO_PRIMARY ||
topology->type == MONGOC_TOPOLOGY_RS_WITH_PRIMARY) {
if (optype == MONGOC_SS_READ) {
mongoc_set_for_each (
topology->servers, _mongoc_replica_set_read_suitable_cb, &data);
if (read_mode == MONGOC_READ_PRIMARY) {
if (data.primary) {
_mongoc_array_append_val (set, data.primary);
}
goto DONE;
}
if (read_mode == MONGOC_READ_PRIMARY_PREFERRED && data.primary) {
_mongoc_array_append_val (set, data.primary);
goto DONE;
}
if (read_mode == MONGOC_READ_SECONDARY_PREFERRED) {
/* try read_mode SECONDARY */
_mongoc_try_mode_secondary (
set, topology, read_pref, local_threshold_ms);
/* otherwise fall back to primary */
if (!set->len && data.primary) {
_mongoc_array_append_val (set, data.primary);
}
goto DONE;
}
if (read_mode == MONGOC_READ_SECONDARY) {
for (i = 0; i < data.candidates_len; i++) {
if (candidates[i] &&
candidates[i]->type != MONGOC_SERVER_RS_SECONDARY) {
TRACE ("Rejected [%s] [%s] for mode [%s] with RS topology",
mongoc_server_description_type (candidates[i]),
candidates[i]->host.host_and_port,
_mongoc_read_mode_as_str (read_mode));
candidates[i] = NULL;
}
}
}
/* mode is SECONDARY or NEAREST, filter by staleness and tags */
mongoc_server_description_filter_stale (data.candidates,
data.candidates_len,
data.primary,
topology->heartbeat_msec,
read_pref);
mongoc_server_description_filter_tags (
data.candidates, data.candidates_len, read_pref);
} else if (topology->type == MONGOC_TOPOLOGY_RS_WITH_PRIMARY) {
/* includes optype == MONGOC_SS_WRITE as the exclusion of the above if
*/
mongoc_set_for_each (topology->servers,
_mongoc_topology_description_has_primary_cb,
&data.primary);
if (data.primary) {
_mongoc_array_append_val (set, data.primary);
goto DONE;
}
}
}
/* Sharded clusters --
* All candidates in the latency window are suitable */
if (topology->type == MONGOC_TOPOLOGY_SHARDED) {
mongoc_set_for_each (
topology->servers, _mongoc_find_suitable_mongos_cb, &data);
}
/* Ways to get here:
* - secondary read
* - secondary preferred read
* - primary_preferred and no primary read
* - sharded anything
* Find the nearest, then select within the window */
for (i = 0; i < data.candidates_len; i++) {
if (candidates[i] &&
(nearest == -1 || nearest > candidates[i]->round_trip_time_msec)) {
nearest = candidates[i]->round_trip_time_msec;
}
}
for (i = 0; i < data.candidates_len; i++) {
if (candidates[i] && (candidates[i]->round_trip_time_msec <=
nearest + local_threshold_ms)) {
_mongoc_array_append_val (set, candidates[i]);
}
}
DONE:
bson_free (candidates);
return;
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_has_data_node --
*
* Internal method: are any servers not Arbiter, Ghost, or Unknown?
*
*--------------------------------------------------------------------------
*/
bool
mongoc_topology_description_has_data_node (mongoc_topology_description_t *td)
{
int i;
mongoc_server_description_t *sd;
for (i = 0; i < (int) td->servers->items_len; i++) {
sd = (mongoc_server_description_t *) mongoc_set_get_item (td->servers, i);
if (_is_data_node (sd)) {
return true;
}
}
return false;
}
/*
*-------------------------------------------------------------------------
*
* mongoc_topology_description_select --
*
* Return a server description of a node that is appropriate for
* the given read preference and operation type.
*
* NOTE: this method simply attempts to select a server from the
* current topology, it does not retry or trigger topology checks.
*
* NOTE: this method should only be called while holding the mutex on
* the owning topology object.
*
* Returns:
* Selected server description, or NULL upon failure.
*
* Side effects:
* None.
*
*-------------------------------------------------------------------------
*/
mongoc_server_description_t *
mongoc_topology_description_select (mongoc_topology_description_t *topology,
mongoc_ss_optype_t optype,
const mongoc_read_prefs_t *read_pref,
int64_t local_threshold_ms)
{
mongoc_array_t suitable_servers;
mongoc_server_description_t *sd = NULL;
int rand_n;
ENTRY;
if (topology->type == MONGOC_TOPOLOGY_SINGLE) {
sd = (mongoc_server_description_t *) mongoc_set_get_item (
topology->servers, 0);
if (sd->has_is_master) {
RETURN (sd);
} else {
TRACE ("Topology type single, [%s] is down", sd->host.host_and_port);
RETURN (NULL);
}
}
_mongoc_array_init (&suitable_servers,
sizeof (mongoc_server_description_t *));
mongoc_topology_description_suitable_servers (
&suitable_servers, optype, topology, read_pref, local_threshold_ms);
if (suitable_servers.len != 0) {
rand_n = _mongoc_rand_simple (&topology->rand_seed);
sd = _mongoc_array_index (&suitable_servers,
mongoc_server_description_t *,
rand_n % suitable_servers.len);
}
_mongoc_array_destroy (&suitable_servers);
if (sd) {
TRACE ("Topology type [%s], selected [%s] [%s]",
mongoc_topology_description_type (topology),
mongoc_server_description_type (sd),
sd->host.host_and_port);
}
RETURN (sd);
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_server_by_id --
*
* Get the server description for @id, if that server is present
* in @description. Otherwise, return NULL and fill out optional
* @error.
*
* NOTE: In most cases, caller should create a duplicate of the
* returned server description. Caller should hold the mutex on the
* owning topology object while calling this method and while using
* the returned reference.
*
* Returns:
* A mongoc_server_description_t *, or NULL.
*
* Side effects:
* Fills out optional @error if server not found.
*
*--------------------------------------------------------------------------
*/
mongoc_server_description_t *
mongoc_topology_description_server_by_id (
mongoc_topology_description_t *description, uint32_t id, bson_error_t *error)
{
mongoc_server_description_t *sd;
BSON_ASSERT (description);
sd =
(mongoc_server_description_t *) mongoc_set_get (description->servers, id);
if (!sd) {
bson_set_error (error,
MONGOC_ERROR_STREAM,
MONGOC_ERROR_STREAM_NOT_ESTABLISHED,
"Could not find description for node %u",
id);
}
return sd;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_remove_server --
*
* If present, remove this server from this topology description.
*
* Returns:
* None.
*
* Side effects:
* Removes the server description from topology and destroys it.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_remove_server (
mongoc_topology_description_t *description,
mongoc_server_description_t *server)
{
BSON_ASSERT (description);
BSON_ASSERT (server);
_mongoc_topology_description_monitor_server_closed (description, server);
mongoc_set_rm (description->servers, server->id);
}
typedef struct _mongoc_address_and_id_t {
const char *address; /* IN */
bool found; /* OUT */
uint32_t id; /* OUT */
} mongoc_address_and_id_t;
/* find the given server and stop iterating */
static bool
_mongoc_topology_description_has_server_cb (void *item,
void *ctx /* IN - OUT */)
{
mongoc_server_description_t *server = (mongoc_server_description_t *) item;
mongoc_address_and_id_t *data = (mongoc_address_and_id_t *) ctx;
if (strcasecmp (data->address, server->connection_address) == 0) {
data->found = true;
data->id = server->id;
return false;
}
return true;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_has_set_version --
*
* Whether @topology's max replica set version has been set.
*
* Returns:
* True if the max setVersion was ever set.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
static bool
_mongoc_topology_description_has_set_version (mongoc_topology_description_t *td)
{
return td->max_set_version != MONGOC_NO_SET_VERSION;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_topology_has_server --
*
* Return true if @server is in @topology. If so, place its id in
* @id if given.
*
* Returns:
* True if server is in topology, false otherwise.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
static bool
_mongoc_topology_description_has_server (
mongoc_topology_description_t *description,
const char *address,
uint32_t *id /* OUT */)
{
mongoc_address_and_id_t data;
BSON_ASSERT (description);
BSON_ASSERT (address);
data.address = address;
data.found = false;
mongoc_set_for_each (
description->servers, _mongoc_topology_description_has_server_cb, &data);
if (data.found && id) {
*id = data.id;
}
return data.found;
}
typedef struct _mongoc_address_and_type_t {
const char *address;
mongoc_server_description_type_t type;
} mongoc_address_and_type_t;
static bool
_mongoc_label_unknown_member_cb (void *item, void *ctx)
{
mongoc_server_description_t *server = (mongoc_server_description_t *) item;
mongoc_address_and_type_t *data = (mongoc_address_and_type_t *) ctx;
if (strcasecmp (server->connection_address, data->address) == 0 &&
server->type == MONGOC_SERVER_UNKNOWN) {
mongoc_server_description_set_state (server, data->type);
return false;
}
return true;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_label_unknown_member --
*
* Find the server description with the given @address and if its
* type is UNKNOWN, set its type to @type.
*
* Returns:
* None.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_label_unknown_member (
mongoc_topology_description_t *description,
const char *address,
mongoc_server_description_type_t type)
{
mongoc_address_and_type_t data;
BSON_ASSERT (description);
BSON_ASSERT (address);
data.type = type;
data.address = address;
mongoc_set_for_each (
description->servers, _mongoc_label_unknown_member_cb, &data);
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_set_state --
*
* Change the state of this cluster and unblock things waiting
* on a change of topology type.
*
* Returns:
* None.
*
* Side effects:
* Unblocks anything waiting on this description to change states.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_set_state (
mongoc_topology_description_t *description,
mongoc_topology_description_type_t type)
{
description->type = type;
}
static void
_update_rs_type (mongoc_topology_description_t *topology)
{
if (_mongoc_topology_description_has_primary (topology)) {
_mongoc_topology_description_set_state (topology,
MONGOC_TOPOLOGY_RS_WITH_PRIMARY);
} else {
_mongoc_topology_description_set_state (topology,
MONGOC_TOPOLOGY_RS_NO_PRIMARY);
}
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_check_if_has_primary --
*
* If there is a primary in topology, set topology
* type to RS_WITH_PRIMARY, otherwise set it to
* RS_NO_PRIMARY.
*
* Returns:
* None.
*
* Side effects:
* Changes the topology type.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_check_if_has_primary (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
_update_rs_type (topology);
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_invalidate_server --
*
* Invalidate a server if a network error occurred while using it in
* another part of the client. Server description is set to type
* UNKNOWN, the error is recorded, and other parameters are reset to
* defaults. Pass in the reason for invalidation in @error.
*
* NOTE: this method should only be called while holding the mutex on
* the owning topology object.
*
*--------------------------------------------------------------------------
*/
void
mongoc_topology_description_invalidate_server (
mongoc_topology_description_t *topology,
uint32_t id,
const bson_error_t *error /* IN */)
{
BSON_ASSERT (error);
/* send NULL ismaster reply */
mongoc_topology_description_handle_ismaster (topology, id, NULL, 0, error);
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_add_server --
*
* Add the specified server to the cluster topology if it is not
* already a member. If @id, place its id in @id.
*
* NOTE: this method should only be called while holding the mutex on
* the owning topology object.
*
* Return:
* True if the server was added or already existed in the topology,
* false if an error occurred.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
bool
mongoc_topology_description_add_server (mongoc_topology_description_t *topology,
const char *server,
uint32_t *id /* OUT */)
{
uint32_t server_id;
mongoc_server_description_t *description;
BSON_ASSERT (topology);
BSON_ASSERT (server);
if (!_mongoc_topology_description_has_server (
topology, server, &server_id)) {
/* TODO this might not be an accurate count in all cases */
server_id = ++topology->max_server_id;
description =
(mongoc_server_description_t *) bson_malloc0 (sizeof *description);
mongoc_server_description_init (description, server, server_id);
mongoc_set_add (topology->servers, server_id, description);
/* if we're in topology_new then no callbacks are registered and this is
* a no-op. later, if we discover a new RS member this sends an event. */
_mongoc_topology_description_monitor_server_opening (topology,
description);
}
if (id) {
*id = server_id;
}
return true;
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_update_cluster_time --
*
* Drivers Session Spec: Drivers MUST examine responses to server commands to
* see if they contain a top level field named $clusterTime formatted as
* follows:
*
* {
* ...
* $clusterTime : {
* clusterTime : <BsonTimestamp>,
* signature : {
* hash : <BsonBinaryData>,
* keyId : <BsonInt64>
* }
* },
* ...
* }
*
* Whenever a driver receives a clusterTime from a server it MUST compare it
* to the current highest seen clusterTime for the cluster. If the new
* clusterTime is higher than the highest seen clusterTime it MUST become
* the new highest seen clusterTime. Two clusterTimes are compared using
* only the BsonTimestamp value of the clusterTime embedded field (be sure to
* include both the timestamp and the increment of the BsonTimestamp in the
* comparison). The signature field does not participate in the comparison.
*
*--------------------------------------------------------------------------
*/
void
mongoc_topology_description_update_cluster_time (
mongoc_topology_description_t *td, const bson_t *reply)
{
bson_iter_t iter;
bson_iter_t child;
const uint8_t *data;
uint32_t size;
bson_t cluster_time;
if (!reply || !bson_iter_init_find (&iter, reply, "$clusterTime")) {
return;
}
if (!BSON_ITER_HOLDS_DOCUMENT (&iter) ||
!bson_iter_recurse (&iter, &child)) {
MONGOC_ERROR ("Can't parse $clusterTime");
return;
}
bson_iter_document (&iter, &size, &data);
bson_init_static (&cluster_time, data, (size_t) size);
if (bson_empty (&td->cluster_time) ||
_mongoc_cluster_time_greater (&cluster_time, &td->cluster_time)) {
bson_destroy (&td->cluster_time);
bson_copy_to (&cluster_time, &td->cluster_time);
}
}
static void
_mongoc_topology_description_add_new_servers (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
bson_iter_t member_iter;
const bson_t *rs_members[3];
int i;
rs_members[0] = &server->hosts;
rs_members[1] = &server->arbiters;
rs_members[2] = &server->passives;
for (i = 0; i < 3; i++) {
bson_iter_init (&member_iter, rs_members[i]);
while (bson_iter_next (&member_iter)) {
mongoc_topology_description_add_server (
topology, bson_iter_utf8 (&member_iter, NULL), NULL);
}
}
}
typedef struct _mongoc_primary_and_topology_t {
mongoc_topology_description_t *topology;
mongoc_server_description_t *primary;
} mongoc_primary_and_topology_t;
/* invalidate old primaries */
static bool
_mongoc_topology_description_invalidate_primaries_cb (void *item, void *ctx)
{
mongoc_server_description_t *server = (mongoc_server_description_t *) item;
mongoc_primary_and_topology_t *data = (mongoc_primary_and_topology_t *) ctx;
if (server->id != data->primary->id &&
server->type == MONGOC_SERVER_RS_PRIMARY) {
mongoc_server_description_set_state (server, MONGOC_SERVER_UNKNOWN);
mongoc_server_description_set_set_version (server, MONGOC_NO_SET_VERSION);
mongoc_server_description_set_election_id (server, NULL);
}
return true;
}
/* Remove and destroy all replica set members not in primary's hosts lists */
static void
_mongoc_topology_description_remove_unreported_servers (
mongoc_topology_description_t *topology,
mongoc_server_description_t *primary)
{
mongoc_array_t to_remove;
int i;
mongoc_server_description_t *member;
const char *address;
_mongoc_array_init (&to_remove, sizeof (mongoc_server_description_t *));
/* Accumulate servers to be removed - do this before calling
* _mongoc_topology_description_remove_server, which could call
* mongoc_server_description_cleanup on the primary itself if it
* doesn't report its own connection_address in its hosts list.
* See hosts_differ_from_seeds.json */
for (i = 0; i < topology->servers->items_len; i++) {
member = (mongoc_server_description_t *) mongoc_set_get_item (
topology->servers, i);
address = member->connection_address;
if (!mongoc_server_description_has_rs_member (primary, address)) {
_mongoc_array_append_val (&to_remove, member);
}
}
/* now it's safe to call _mongoc_topology_description_remove_server,
* even on the primary */
for (i = 0; i < to_remove.len; i++) {
member =
_mongoc_array_index (&to_remove, mongoc_server_description_t *, i);
_mongoc_topology_description_remove_server (topology, member);
}
_mongoc_array_destroy (&to_remove);
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_matches_me --
*
* Server Discovery And Monitoring Spec: "Removal from the topology of
* seed list members where the "me" property does not match the address
* used to connect prevents clients from being able to select a server,
* only to fail to re-select that server once the primary has responded.
*
* Returns:
* True if "me" matches "connection_address".
*
* Side Effects:
* None.
*
*--------------------------------------------------------------------------
*/
static bool
_mongoc_topology_description_matches_me (mongoc_server_description_t *server)
{
BSON_ASSERT (server->connection_address);
if (!server->me) {
/* "me" is unknown: consider it a match */
return true;
}
return strcasecmp (server->connection_address, server->me) == 0;
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_update_rs_from_primary --
*
* First, determine that this is really the primary:
* -If this node isn't in the cluster, do nothing.
* -If the cluster's set name is null, set it to node's set name.
* Otherwise if the cluster's set name is different from node's,
* we found a rogue primary, so remove it from the cluster and
* check the cluster for a primary, then return.
* -If any of the members of cluster reports an address different
* from node's, node cannot be the primary.
* Now that we know this is the primary:
* -If any hosts, passives, or arbiters in node's description aren't
* in the cluster, add them as UNKNOWN servers.
* -If the cluster has any servers that aren't in node's description,
* remove and destroy them.
* Finally, check the cluster for the new primary.
*
* Returns:
* None.
*
* Side effects:
* Changes to the cluster, possible removal of cluster nodes.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_update_rs_from_primary (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
mongoc_primary_and_topology_t data;
bson_error_t error;
BSON_ASSERT (topology);
BSON_ASSERT (server);
if (!_mongoc_topology_description_has_server (
topology, server->connection_address, NULL))
return;
/* If server->set_name was null this function wouldn't be called from
* mongoc_server_description_handle_ismaster(). static code analyzers however
* don't know that so we check for it explicitly. */
if (server->set_name) {
/* 'Server' can only be the primary if it has the right rs name */
if (!topology->set_name) {
topology->set_name = bson_strdup (server->set_name);
} else if (strcmp (topology->set_name, server->set_name) != 0) {
_mongoc_topology_description_remove_server (topology, server);
_update_rs_type (topology);
return;
}
}
if (mongoc_server_description_has_set_version (server) &&
mongoc_server_description_has_election_id (server)) {
/* Server Discovery And Monitoring Spec: "The client remembers the
* greatest electionId reported by a primary, and distrusts primaries
* with lesser electionIds. This prevents the client from oscillating
* between the old and new primary during a split-brain period."
*/
if (_mongoc_topology_description_later_election (topology, server)) {
bson_set_error (&error,
MONGOC_ERROR_STREAM,
MONGOC_ERROR_STREAM_CONNECT,
"member's setVersion or electionId is stale");
mongoc_topology_description_invalidate_server (
topology, server->id, &error);
_update_rs_type (topology);
return;
}
/* server's electionId >= topology's max electionId */
_mongoc_topology_description_set_max_election_id (topology, server);
}
if (mongoc_server_description_has_set_version (server) &&
(!_mongoc_topology_description_has_set_version (topology) ||
server->set_version > topology->max_set_version)) {
_mongoc_topology_description_set_max_set_version (topology, server);
}
/* 'Server' is the primary! Invalidate other primaries if found */
data.primary = server;
data.topology = topology;
mongoc_set_for_each (topology->servers,
_mongoc_topology_description_invalidate_primaries_cb,
&data);
/* Add to topology description any new servers primary knows about */
_mongoc_topology_description_add_new_servers (topology, server);
/* Remove from topology description any servers primary doesn't know about */
_mongoc_topology_description_remove_unreported_servers (topology, server);
/* Finally, set topology type */
_update_rs_type (topology);
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_update_rs_without_primary --
*
* Update cluster's information when there is no primary.
*
* Returns:
* None.
*
* Side Effects:
* Alters cluster state, may remove node from cluster.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_update_rs_without_primary (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
BSON_ASSERT (topology);
BSON_ASSERT (server);
if (!_mongoc_topology_description_has_server (
topology, server->connection_address, NULL)) {
return;
}
/* make sure we're talking about the same replica set */
if (server->set_name) {
if (!topology->set_name) {
topology->set_name = bson_strdup (server->set_name);
} else if (strcmp (topology->set_name, server->set_name) != 0) {
_mongoc_topology_description_remove_server (topology, server);
return;
}
}
/* Add new servers that this replica set member knows about */
_mongoc_topology_description_add_new_servers (topology, server);
/* If this server thinks there is a primary, label it POSSIBLE_PRIMARY */
if (server->current_primary) {
_mongoc_topology_description_label_unknown_member (
topology, server->current_primary, MONGOC_SERVER_POSSIBLE_PRIMARY);
}
if (!_mongoc_topology_description_matches_me (server)) {
_mongoc_topology_description_remove_server (topology, server);
return;
}
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_update_rs_with_primary_from_member --
*
* Update cluster's information when there is a primary, but the
* update is coming from another replica set member.
*
* Returns:
* None.
*
* Side Effects:
* Alters cluster state.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_update_rs_with_primary_from_member (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
BSON_ASSERT (topology);
BSON_ASSERT (server);
if (!_mongoc_topology_description_has_server (
topology, server->connection_address, NULL)) {
return;
}
/* set_name should never be null here */
if (strcmp (topology->set_name, server->set_name) != 0) {
_mongoc_topology_description_remove_server (topology, server);
_update_rs_type (topology);
return;
}
if (!_mongoc_topology_description_matches_me (server)) {
_mongoc_topology_description_remove_server (topology, server);
return;
}
/* If there is no primary, label server's current_primary as the
* POSSIBLE_PRIMARY */
if (!_mongoc_topology_description_has_primary (topology) &&
server->current_primary) {
_mongoc_topology_description_set_state (topology,
MONGOC_TOPOLOGY_RS_NO_PRIMARY);
_mongoc_topology_description_label_unknown_member (
topology, server->current_primary, MONGOC_SERVER_POSSIBLE_PRIMARY);
}
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_set_topology_type_to_sharded --
*
* Sets topology's type to SHARDED.
*
* Returns:
* None
*
* Side effects:
* Alter's topology's type
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_set_topology_type_to_sharded (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
_mongoc_topology_description_set_state (topology, MONGOC_TOPOLOGY_SHARDED);
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_transition_unknown_to_rs_no_primary --
*
* Encapsulates transition from cluster state UNKNOWN to
* RS_NO_PRIMARY. Sets the type to RS_NO_PRIMARY,
* then updates the replica set accordingly.
*
* Returns:
* None.
*
* Side effects:
* Changes topology state.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_transition_unknown_to_rs_no_primary (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
_mongoc_topology_description_set_state (topology,
MONGOC_TOPOLOGY_RS_NO_PRIMARY);
_mongoc_topology_description_update_rs_without_primary (topology, server);
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_remove_and_check_primary --
*
* Remove the server and check if the topology still has a primary.
*
* Returns:
* None.
*
* Side effects:
* Removes server from topology and destroys it.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_remove_and_check_primary (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
_mongoc_topology_description_remove_server (topology, server);
_update_rs_type (topology);
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_update_unknown_with_standalone --
*
* If the cluster doesn't contain this server, do nothing.
* Otherwise, if the topology only has one seed, change its
* type to SINGLE. If the topology has multiple seeds, it does not
* include us, so remove this server and destroy it.
*
* Returns:
* None.
*
* Side effects:
* Changes the topology type, might remove server from topology.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_update_unknown_with_standalone (
mongoc_topology_description_t *topology, mongoc_server_description_t *server)
{
BSON_ASSERT (topology);
BSON_ASSERT (server);
if (!_mongoc_topology_description_has_server (
topology, server->connection_address, NULL))
return;
if (topology->servers->items_len > 1) {
/* This cluster contains other servers, it cannot be a standalone. */
_mongoc_topology_description_remove_server (topology, server);
} else {
_mongoc_topology_description_set_state (topology, MONGOC_TOPOLOGY_SINGLE);
}
}
/*
*--------------------------------------------------------------------------
*
* This table implements the 'ToplogyType' table outlined in the Server
* Discovery and Monitoring spec. Each row represents a server type,
* and each column represents the topology type. Given a current topology
* type T and a newly-observed server type S, use the function at
* state_transions[S][T] to transition to a new state.
*
* Rows should be read like so:
* { server type for this row
* UNKNOWN,
* SHARDED,
* RS_NO_PRIMARY,
* RS_WITH_PRIMARY
* }
*
*--------------------------------------------------------------------------
*/
typedef void (*transition_t) (mongoc_topology_description_t *topology,
mongoc_server_description_t *server);
transition_t gSDAMTransitionTable
[MONGOC_SERVER_DESCRIPTION_TYPES][MONGOC_TOPOLOGY_DESCRIPTION_TYPES] = {
{
/* UNKNOWN */
NULL, /* MONGOC_TOPOLOGY_UNKNOWN */
NULL, /* MONGOC_TOPOLOGY_SHARDED */
NULL, /* MONGOC_TOPOLOGY_RS_NO_PRIMARY */
_mongoc_topology_description_check_if_has_primary /* MONGOC_TOPOLOGY_RS_WITH_PRIMARY
*/
},
{/* STANDALONE */
_mongoc_topology_description_update_unknown_with_standalone,
_mongoc_topology_description_remove_server,
_mongoc_topology_description_remove_server,
_mongoc_topology_description_remove_and_check_primary},
{/* MONGOS */
_mongoc_topology_description_set_topology_type_to_sharded,
NULL,
_mongoc_topology_description_remove_server,
_mongoc_topology_description_remove_and_check_primary},
{/* POSSIBLE_PRIMARY */
NULL,
NULL,
NULL,
NULL},
{/* PRIMARY */
_mongoc_topology_description_update_rs_from_primary,
_mongoc_topology_description_remove_server,
_mongoc_topology_description_update_rs_from_primary,
_mongoc_topology_description_update_rs_from_primary},
{/* SECONDARY */
_mongoc_topology_description_transition_unknown_to_rs_no_primary,
_mongoc_topology_description_remove_server,
_mongoc_topology_description_update_rs_without_primary,
_mongoc_topology_description_update_rs_with_primary_from_member},
{/* ARBITER */
_mongoc_topology_description_transition_unknown_to_rs_no_primary,
_mongoc_topology_description_remove_server,
_mongoc_topology_description_update_rs_without_primary,
_mongoc_topology_description_update_rs_with_primary_from_member},
{/* RS_OTHER */
_mongoc_topology_description_transition_unknown_to_rs_no_primary,
_mongoc_topology_description_remove_server,
_mongoc_topology_description_update_rs_without_primary,
_mongoc_topology_description_update_rs_with_primary_from_member},
{/* RS_GHOST */
NULL,
_mongoc_topology_description_remove_server,
NULL,
_mongoc_topology_description_check_if_has_primary}};
#ifdef MONGOC_TRACE
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_type --
*
* Get this topology's type, one of the types defined in the Server
* Discovery And Monitoring Spec.
*
* NOTE: this method should only be called while holding the mutex on
* the owning topology object.
*
* Returns:
* A string.
*
* Side effects:
* None.
*
*--------------------------------------------------------------------------
*/
static const char *
_mongoc_topology_description_type (mongoc_topology_description_t *topology)
{
switch (topology->type) {
case MONGOC_TOPOLOGY_UNKNOWN:
return "Unknown";
case MONGOC_TOPOLOGY_SHARDED:
return "Sharded";
case MONGOC_TOPOLOGY_RS_NO_PRIMARY:
return "RSNoPrimary";
case MONGOC_TOPOLOGY_RS_WITH_PRIMARY:
return "RSWithPrimary";
case MONGOC_TOPOLOGY_SINGLE:
return "Single";
case MONGOC_TOPOLOGY_DESCRIPTION_TYPES:
default:
MONGOC_ERROR ("Invalid mongoc_topology_description_type_t type");
return "Invalid";
}
}
#endif
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_update_session_timeout --
*
* Fill out td.session_timeout_minutes.
*
* Server Discovery and Monitoring Spec: "set logicalSessionTimeoutMinutes
* to the smallest logicalSessionTimeoutMinutes value among all
* ServerDescriptions of known ServerType. If any ServerDescription of
* known ServerType has a null logicalSessionTimeoutMinutes, then
* logicalSessionTimeoutMinutes MUST be set to null."
*
* --------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_update_session_timeout (
mongoc_topology_description_t *td)
{
mongoc_set_t *set;
size_t i;
mongoc_server_description_t *sd;
set = td->servers;
td->session_timeout_minutes = MONGOC_NO_SESSIONS;
for (i = 0; i < set->items_len; i++) {
sd = (mongoc_server_description_t *) mongoc_set_get_item (set, (int) i);
if (!_is_data_node (sd)) {
continue;
}
if (sd->session_timeout_minutes == MONGOC_NO_SESSIONS) {
td->session_timeout_minutes = MONGOC_NO_SESSIONS;
return;
} else if (td->session_timeout_minutes == MONGOC_NO_SESSIONS) {
td->session_timeout_minutes = sd->session_timeout_minutes;
} else if (td->session_timeout_minutes > sd->session_timeout_minutes) {
td->session_timeout_minutes = sd->session_timeout_minutes;
}
}
}
/*
*--------------------------------------------------------------------------
*
* _mongoc_topology_description_check_compatible --
*
* Fill out td.compatibility_error if any server's wire versions do
* not overlap with ours. Otherwise clear td.compatibility_error.
*
* If any server is incompatible, the topology as a whole is considered
* incompatible.
*
*--------------------------------------------------------------------------
*/
static void
_mongoc_topology_description_check_compatible (
mongoc_topology_description_t *td)
{
size_t i;
mongoc_server_description_t *sd;
memset (&td->compatibility_error, 0, sizeof (bson_error_t));
for (i = 0; i < td->servers->items_len; i++) {
sd = (mongoc_server_description_t *) mongoc_set_get_item (td->servers,
(int) i);
if (sd->type == MONGOC_SERVER_UNKNOWN ||
sd->type == MONGOC_SERVER_POSSIBLE_PRIMARY) {
continue;
}
if (sd->min_wire_version > WIRE_VERSION_MAX) {
bson_set_error (
&td->compatibility_error,
MONGOC_ERROR_PROTOCOL,
MONGOC_ERROR_PROTOCOL_BAD_WIRE_VERSION,
"Server at %s requires wire version %d,"
" but this version of libmongoc only supports up to %d",
sd->host.host_and_port,
sd->min_wire_version,
WIRE_VERSION_MAX);
} else if (sd->max_wire_version < WIRE_VERSION_MIN) {
bson_set_error (
&td->compatibility_error,
MONGOC_ERROR_PROTOCOL,
MONGOC_ERROR_PROTOCOL_BAD_WIRE_VERSION,
"Server at %s reports wire version %d, but this"
" version of libmongoc requires at least 2 (MongoDB 2.6)",
sd->host.host_and_port,
sd->max_wire_version);
}
}
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_handle_ismaster --
*
* Handle an ismaster. This is called by the background SDAM process,
* and by client when invalidating servers. If there was an error
* calling ismaster, pass it in as @error.
*
* NOTE: this method should only be called while holding the mutex on
* the owning topology object.
*
*--------------------------------------------------------------------------
*/
void
mongoc_topology_description_handle_ismaster (
mongoc_topology_description_t *topology,
uint32_t server_id,
const bson_t *ismaster_response,
int64_t rtt_msec,
const bson_error_t *error /* IN */)
{
mongoc_topology_description_t *prev_td = NULL;
mongoc_server_description_t *prev_sd = NULL;
mongoc_server_description_t *sd;
BSON_ASSERT (topology);
BSON_ASSERT (server_id != 0);
sd = mongoc_topology_description_server_by_id (topology, server_id, NULL);
if (!sd) {
return; /* server already removed from topology */
}
if (topology->apm_callbacks.topology_changed) {
prev_td = bson_malloc0 (sizeof (mongoc_topology_description_t));
_mongoc_topology_description_copy_to (topology, prev_td);
}
if (topology->apm_callbacks.server_changed) {
prev_sd = mongoc_server_description_new_copy (sd);
}
/* pass the current error in */
mongoc_server_description_handle_ismaster (
sd, ismaster_response, rtt_msec, error);
mongoc_topology_description_update_cluster_time (topology,
ismaster_response);
_mongoc_topology_description_monitor_server_changed (topology, prev_sd, sd);
if (gSDAMTransitionTable[sd->type][topology->type]) {
TRACE ("Transitioning to %s for %s",
_mongoc_topology_description_type (topology),
mongoc_server_description_type (sd));
gSDAMTransitionTable[sd->type][topology->type](topology, sd);
} else {
TRACE ("No transition entry to %s for %s",
_mongoc_topology_description_type (topology),
mongoc_server_description_type (sd));
}
_mongoc_topology_description_update_session_timeout (topology);
/* Don't bother checking wire version compatibility if we already errored */
if (ismaster_response && (!error || !error->code)) {
_mongoc_topology_description_check_compatible (topology);
}
_mongoc_topology_description_monitor_changed (prev_td, topology);
if (prev_td) {
mongoc_topology_description_destroy (prev_td);
bson_free (prev_td);
}
if (prev_sd) {
mongoc_server_description_destroy (prev_sd);
}
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_has_readable_server --
*
* SDAM Monitoring Spec:
* "Determines if the topology has a readable server available."
*
* NOTE: this method should only be called by user code in an SDAM
* Monitoring callback, while the monitoring framework holds the mutex
* on the owning topology object.
*
*--------------------------------------------------------------------------
*/
bool
mongoc_topology_description_has_readable_server (
mongoc_topology_description_t *td, const mongoc_read_prefs_t *prefs)
{
bson_error_t error;
if (!mongoc_topology_compatible (td, NULL, &error)) {
return false;
}
/* local threshold argument doesn't matter */
return mongoc_topology_description_select (td, MONGOC_SS_READ, prefs, 0) !=
NULL;
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_has_writable_server --
*
* SDAM Monitoring Spec:
* "Determines if the topology has a writable server available."
*
* NOTE: this method should only be called by user code in an SDAM
* Monitoring callback, while the monitoring framework holds the mutex
* on the owning topology object.
*
*--------------------------------------------------------------------------
*/
bool
mongoc_topology_description_has_writable_server (
mongoc_topology_description_t *td)
{
bson_error_t error;
if (!mongoc_topology_compatible (td, NULL, &error)) {
return false;
}
return mongoc_topology_description_select (td, MONGOC_SS_WRITE, NULL, 0) !=
NULL;
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_type --
*
* Get this topology's type, one of the types defined in the Server
* Discovery And Monitoring Spec.
*
* NOTE: this method should only be called by user code in an SDAM
* Monitoring callback, while the monitoring framework holds the mutex
* on the owning topology object.
*
* Returns:
* A string.
*
*--------------------------------------------------------------------------
*/
const char *
mongoc_topology_description_type (const mongoc_topology_description_t *td)
{
switch (td->type) {
case MONGOC_TOPOLOGY_UNKNOWN:
return "Unknown";
case MONGOC_TOPOLOGY_SHARDED:
return "Sharded";
case MONGOC_TOPOLOGY_RS_NO_PRIMARY:
return "ReplicaSetNoPrimary";
case MONGOC_TOPOLOGY_RS_WITH_PRIMARY:
return "ReplicaSetWithPrimary";
case MONGOC_TOPOLOGY_SINGLE:
return "Single";
case MONGOC_TOPOLOGY_DESCRIPTION_TYPES:
default:
fprintf (stderr, "ERROR: Unknown topology type %d\n", td->type);
BSON_ASSERT (0);
}
return NULL;
}
/*
*--------------------------------------------------------------------------
*
* mongoc_topology_description_get_servers --
*
* Fetch an array of server descriptions for all known servers in the
* topology.
*
* Returns:
* An array you must free with mongoc_server_descriptions_destroy_all.
*
*--------------------------------------------------------------------------
*/
mongoc_server_description_t **
mongoc_topology_description_get_servers (
const mongoc_topology_description_t *td, size_t *n /* OUT */)
{
size_t i;
mongoc_set_t *set;
mongoc_server_description_t **sds;
mongoc_server_description_t *sd;
BSON_ASSERT (td);
BSON_ASSERT (n);
set = td->servers;
/* enough room for all descriptions, even if some are unknown */
sds = (mongoc_server_description_t **) bson_malloc0 (
sizeof (mongoc_server_description_t *) * set->items_len);
*n = 0;
for (i = 0; i < set->items_len; ++i) {
sd = (mongoc_server_description_t *) mongoc_set_get_item (set, (int) i);
if (sd->type != MONGOC_SERVER_UNKNOWN) {
sds[*n] = mongoc_server_description_new_copy (sd);
++(*n);
}
}
return sds;
}
| {
"pile_set_name": "Github"
} |
Euclidean distances from the first data instance:
0.0000 0.2156 0.1681 0.2176 0.0501
Pearson correlation distance from the first data instance:
0.0000 0.0020 0.0000 0.0009 0.0003
| {
"pile_set_name": "Github"
} |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.connect.plugin.util;
import org.camunda.connect.spi.Connector;
import org.camunda.connect.spi.ConnectorProvider;
public class TestConnectorProvider implements ConnectorProvider {
public String getConnectorId() {
return TestConnector.ID;
}
public Connector<?> createConnectorInstance() {
return new TestConnector(TestConnector.ID);
}
}
| {
"pile_set_name": "Github"
} |
FROM nvcr.io/nvidia/cuda:11.0-cudnn8-devel-centos7
#We need both CUDA and manylinux. But the CUDA Toolkit End User License Agreement says NVIDIA CUDA Driver Libraries(libcuda.so, libnvidia-ptxjitcompiler.so) are only distributable in applications that meet this criteria:
#1. The application was developed starting from a NVIDIA CUDA container obtained from Docker Hub or the NVIDIA GPU Cloud, and
#2. The resulting application is packaged as a Docker container and distributed to users on Docker Hub or the NVIDIA GPU Cloud only.
#So we use CUDA as the base image then add manylinux on top of it.
#Build manylinux2014 docker image begin
ENV AUDITWHEEL_ARCH x86_64
ENV AUDITWHEEL_PLAT manylinux2014_$AUDITWHEEL_ARCH
ENV LC_ALL en_US.UTF-8
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US.UTF-8
ENV DEVTOOLSET_ROOTPATH /opt/rh/devtoolset-9/root
ENV PATH $DEVTOOLSET_ROOTPATH/usr/bin:$PATH
ENV LD_LIBRARY_PATH $DEVTOOLSET_ROOTPATH/usr/lib64:$DEVTOOLSET_ROOTPATH/usr/lib:$DEVTOOLSET_ROOTPATH/usr/lib64/dyninst:$DEVTOOLSET_ROOTPATH/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib
ENV PKG_CONFIG_PATH /usr/local/lib/pkgconfig
COPY manylinux2014_build_scripts /manylinux2014_build_scripts
RUN bash /manylinux2014_build_scripts/build.sh 9 && rm -r manylinux2014_build_scripts
ENV SSL_CERT_FILE=/opt/_internal/certs.pem
#Build manylinux2014 docker image end
#Add our own dependencies
ADD scripts /tmp/scripts
RUN cd /tmp/scripts && /tmp/scripts/manylinux/install_centos.sh && /tmp/scripts/manylinux/install_deps.sh && rm -rf /tmp/scripts
ARG BUILD_UID=1001
ARG BUILD_USER=onnxruntimedev
RUN adduser --uid $BUILD_UID $BUILD_USER
WORKDIR /home/$BUILD_USER
USER $BUILD_USER
ENV PATH /usr/local/gradle/bin:/usr/local/dotnet:$PATH
| {
"pile_set_name": "Github"
} |
@mixin alert-variant($background, $border, $color) {
color: $color;
@include gradient-bg($background);
border-color: $border;
hr {
border-top-color: darken($border, 5%);
}
.alert-link {
color: darken($color, 10%);
}
}
| {
"pile_set_name": "Github"
} |
# $Id: da.po,v 1.10 2006/03/22 04:17:58 mindless Exp $
#
# Gallery - a web based photo album viewer and editor
# Copyright (C) 2000-2006 Bharat Mediratta
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA 02110-1301, USA.
#
# Previous translators (as far as known, add yourself here, please):
# - Allan Beaufour <[email protected]>
# - Bjørn Graabek <[email protected]>
#
msgid ""
msgstr ""
"Project-Id-Version: Gallery: Archive Upload 1.0.3\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2005-07-25 14:14+0200\n"
"PO-Revision-Date: 2006-02-08 19:50+0100\n"
"Last-Translator: Bjorn Graabek <[email protected]>\n"
"Language-Team: Danish <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
"X-Poedit-Language: Danish\n"
"X-Poedit-Country: DENMARK\n"
msgid "Archive Upload"
msgstr "Arkivoverførsel"
msgid "Extract items from uploaded zip files"
msgstr "Pakker elementer ud fra en uploadet zipfil"
msgid "Import"
msgstr "Importer"
msgid "Archive"
msgstr "Arkiv"
msgid "Archive Upload Settings"
msgstr "Indstillinger for Arkivoverførsel "
msgid "Settings saved successfully"
msgstr "Indstillinger gemt"
msgid "This module will enable extraction of individual files from a zip archive to add each item to Gallery. You must locate or install an unzip binary on your server, then enter the path to it in the text box below. If you're on a Unix machine, don't forget to make the binary executable (<i>chmod 755 unzip</i> in the right directory should do it)"
msgstr "Dette modul tillader udpakning af individuelle filer fra et zip-arkiv til nye elementer i Gallery. Du må finde eller installere unzip programmet på din server, og indtaste stien til det i indtastningsfeltet nedenfor. Hvis du er på en Unix maskine, så husk at gøre programmet udførbart (<i>chmod 755 unzip</i> i det korrekte bibliotek burde få det til at virke)"
msgid "Path to unzip:"
msgstr "Sti til unzip:"
msgid "You must enter a path to your unzip binary"
msgstr "Du skal indtaste stien til dit unzip program"
msgid "The path you entered doesn't contain a valid unzip binary."
msgstr "Stien du indtastede indeholder ikke et gyldigt unzip program"
msgid "The path you entered isn't a valid path to an unzip binary."
msgstr "Stien som du indtastede er ikke en gyldig sti til et unzip program."
#, c-format
msgid "The unzip binary is not executable. To fix it, run <b>chmod 755 %s</b>"
msgstr "unzip programmet kan ikke udføres. For at rette det så kør <b>chmod 755 %s</b>"
msgid "Save Settings"
msgstr "Gem indstillinger"
msgid "Test Settings"
msgstr "Afprøv indstillinger"
msgid "Cancel"
msgstr "Annuler"
msgid "Reset"
msgstr "Nulstil"
msgid "unzip binary test results"
msgstr "unzip program afprøvningsresultater"
msgid "Binary Name"
msgstr "Program navn"
msgid "Pass/Fail"
msgstr "Bestået/fejlet"
msgid "Passed"
msgstr "Bestået"
msgid "Failed"
msgstr "Fejlede"
msgid "Error messages:"
msgstr "Fejlmeddelelser:"
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<dynaForm name="GroupList" width="500" mode="edit" border="0" enableTemplate="0" height="85px">
<SESSION_ID type="text" size="36" maxlength="32">
<en><![CDATA[Session Id]]></en>
<es><![CDATA[Id de Sesión]]></es>
</SESSION_ID>
<ACTION type="hidden">
</ACTION>
<ACTIONB type="button" onclick="submitThisForm(this.form);">
<en><![CDATA[GroupList]]></en>
<es>Lista de Grupo</es>
</ACTIONB>
</dynaForm>
| {
"pile_set_name": "Github"
} |
{
"$schema": "node_modules/ng-packagr/ng-package.schema.json",
"dest": "dist-editor",
"lib": {
"entryFile": "src/public_api.ts"
}
}
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <swiftCore/_TtCs12_SwiftObject.h>
@interface _TtC7NewsUI235MyMagazinesBlueprintModifierFactory : _TtCs12_SwiftObject
{
// Error parsing type: , name: issueModelFactory
// Error parsing type: , name: bundleSubscriptionManager
// Error parsing type: , name: offlineIssueManager
}
@end
| {
"pile_set_name": "Github"
} |
// Copyright 2015 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package kv
import (
"github.com/hanchuanchuan/goInception/store/tikv/oracle"
"github.com/hanchuanchuan/goInception/util/execdetails"
"golang.org/x/net/context"
)
// Transaction options
const (
// PresumeKeyNotExists indicates that when dealing with a Get operation but failing to read data from cache,
// we presume that the key does not exist in Store. The actual existence will be checked before the
// transaction's commit.
// This option is an optimization for frequent checks during a transaction, e.g. batch inserts.
PresumeKeyNotExists Option = iota + 1
// PresumeKeyNotExistsError is the option key for error.
// When PresumeKeyNotExists is set and condition is not match, should throw the error.
PresumeKeyNotExistsError
// BinlogInfo contains the binlog data and client.
BinlogInfo
// Skip existing check when "prewrite".
SkipCheckForWrite
// SchemaChecker is used for checking schema-validity.
SchemaChecker
// IsolationLevel sets isolation level for current transaction. The default level is SI.
IsolationLevel
// Priority marks the priority of this transaction.
Priority
// NotFillCache makes this request do not touch the LRU cache of the underlying storage.
NotFillCache
// SyncLog decides whether the WAL(write-ahead log) of this request should be synchronized.
SyncLog
// KeyOnly retrieve only keys, it can be used in scan now.
KeyOnly
)
// Priority value for transaction priority.
const (
PriorityNormal = iota
PriorityLow
PriorityHigh
)
// IsoLevel is the transaction's isolation level.
type IsoLevel int
const (
// SI stands for 'snapshot isolation'.
SI IsoLevel = iota
// RC stands for 'read committed'.
RC
)
// Those limits is enforced to make sure the transaction can be well handled by TiKV.
var (
// TxnEntrySizeLimit is limit of single entry size (len(key) + len(value)).
TxnEntrySizeLimit = 6 * 1024 * 1024
// TxnEntryCountLimit is limit of number of entries in the MemBuffer.
TxnEntryCountLimit uint64 = 300 * 1000
// TxnTotalSizeLimit is limit of the sum of all entry size.
TxnTotalSizeLimit = 100 * 1024 * 1024
)
// Retriever is the interface wraps the basic Get and Seek methods.
type Retriever interface {
// Get gets the value for key k from kv store.
// If corresponding kv pair does not exist, it returns nil and ErrNotExist.
Get(k Key) ([]byte, error)
// Seek creates an Iterator positioned on the first entry that k <= entry's key.
// If such entry is not found, it returns an invalid Iterator with no error.
// The Iterator must be Closed after use.
Seek(k Key) (Iterator, error)
// SeekReverse creates a reversed Iterator positioned on the first entry which key is less than k.
// The returned iterator will iterate from greater key to smaller key.
// If k is nil, the returned iterator will be positioned at the last key.
SeekReverse(k Key) (Iterator, error)
}
// Mutator is the interface wraps the basic Set and Delete methods.
type Mutator interface {
// Set sets the value for key k as v into kv store.
// v must NOT be nil or empty, otherwise it returns ErrCannotSetNilValue.
Set(k Key, v []byte) error
// Delete removes the entry for key k from kv store.
Delete(k Key) error
}
// RetrieverMutator is the interface that groups Retriever and Mutator interfaces.
type RetrieverMutator interface {
Retriever
Mutator
}
// MemBuffer is an in-memory kv collection, can be used to buffer write operations.
type MemBuffer interface {
RetrieverMutator
// Size returns sum of keys and values length.
Size() int
// Len returns the number of entries in the DB.
Len() int
// Reset cleanup the MemBuffer
Reset()
// SetCap sets the MemBuffer capability, to reduce memory allocations.
// Please call it before you use the MemBuffer, otherwise it will not works.
SetCap(cap int)
}
// Transaction defines the interface for operations inside a Transaction.
// This is not thread safe.
type Transaction interface {
MemBuffer
// Commit commits the transaction operations to KV store.
Commit(context.Context) error
// Rollback undoes the transaction operations to KV store.
Rollback() error
// String implements fmt.Stringer interface.
String() string
// LockKeys tries to lock the entries with the keys in KV store.
LockKeys(keys ...Key) error
// SetOption sets an option with a value, when val is nil, uses the default
// value of this option.
SetOption(opt Option, val interface{})
// DelOption deletes an option.
DelOption(opt Option)
// IsReadOnly checks if the transaction has only performed read operations.
IsReadOnly() bool
// StartTS returns the transaction start timestamp.
StartTS() uint64
// Valid returns if the transaction is valid.
// A transaction become invalid after commit or rollback.
Valid() bool
// GetMemBuffer return the MemBuffer binding to this transaction.
GetMemBuffer() MemBuffer
// GetSnapshot returns the snapshot of this transaction.
GetSnapshot() Snapshot
// SetVars sets variables to the transaction.
SetVars(vars *Variables)
}
// Client is used to send request to KV layer.
type Client interface {
// Send sends request to KV layer, returns a Response.
Send(ctx context.Context, req *Request, vars *Variables) Response
// IsRequestTypeSupported checks if reqType and subType is supported.
IsRequestTypeSupported(reqType, subType int64) bool
}
// ReqTypes.
const (
ReqTypeSelect = 101
ReqTypeIndex = 102
ReqTypeDAG = 103
ReqTypeAnalyze = 104
ReqTypeChecksum = 105
ReqSubTypeBasic = 0
ReqSubTypeDesc = 10000
ReqSubTypeGroupBy = 10001
ReqSubTypeTopN = 10002
ReqSubTypeSignature = 10003
ReqSubTypeAnalyzeIdx = 10004
ReqSubTypeAnalyzeCol = 10005
)
// Request represents a kv request.
type Request struct {
// Tp is the request type.
Tp int64
StartTs uint64
Data []byte
KeyRanges []KeyRange
// KeepOrder is true, if the response should be returned in order.
KeepOrder bool
// Desc is true, if the request is sent in descending order.
Desc bool
// Concurrency is 1, if it only sends the request to a single storage unit when
// ResponseIterator.Next is called. If concurrency is greater than 1, the request will be
// sent to multiple storage units concurrently.
Concurrency int
// IsolationLevel is the isolation level, default is SI.
IsolationLevel IsoLevel
// Priority is the priority of this KV request, its value may be PriorityNormal/PriorityLow/PriorityHigh.
Priority int
// NotFillCache makes this request do not touch the LRU cache of the underlying storage.
NotFillCache bool
// SyncLog decides whether the WAL(write-ahead log) of this request should be synchronized.
SyncLog bool
// Streaming indicates using streaming API for this request, result in that one Next()
// call would not corresponds to a whole region result.
Streaming bool
}
// ResultSubset represents a result subset from a single storage unit.
// TODO: Find a better interface for ResultSubset that can reuse bytes.
type ResultSubset interface {
// GetData gets the data.
GetData() []byte
// GetStartKey gets the start key.
GetStartKey() Key
// GetExecDetails gets the detail information.
GetExecDetails() *execdetails.ExecDetails
}
// Response represents the response returned from KV layer.
type Response interface {
// Next returns a resultSubset from a single storage unit.
// When full result set is returned, nil is returned.
Next(ctx context.Context) (resultSubset ResultSubset, err error)
// Close response.
Close() error
}
// Snapshot defines the interface for the snapshot fetched from KV store.
type Snapshot interface {
Retriever
// BatchGet gets a batch of values from snapshot.
BatchGet(keys []Key) (map[string][]byte, error)
// SetPriority snapshot set the priority
SetPriority(priority int)
}
// Driver is the interface that must be implemented by a KV storage.
type Driver interface {
// Open returns a new Storage.
// The path is the string for storage specific format.
Open(path string) (Storage, error)
}
// Storage defines the interface for storage.
// Isolation should be at least SI(SNAPSHOT ISOLATION)
type Storage interface {
// Begin transaction
Begin() (Transaction, error)
// BeginWithStartTS begins transaction with startTS.
BeginWithStartTS(startTS uint64) (Transaction, error)
// GetSnapshot gets a snapshot that is able to read any data which data is <= ver.
// if ver is MaxVersion or > current max committed version, we will use current version for this snapshot.
GetSnapshot(ver Version) (Snapshot, error)
// GetClient gets a client instance.
GetClient() Client
// Close store
Close() error
// UUID return a unique ID which represents a Storage.
UUID() string
// CurrentVersion returns current max committed version.
CurrentVersion() (Version, error)
// GetOracle gets a timestamp oracle client.
GetOracle() oracle.Oracle
// SupportDeleteRange gets the storage support delete range or not.
SupportDeleteRange() (supported bool)
}
// FnKeyCmp is the function for iterator the keys
type FnKeyCmp func(key Key) bool
// Iterator is the interface for a iterator on KV store.
type Iterator interface {
Valid() bool
Key() Key
Value() []byte
Next() error
Close()
}
| {
"pile_set_name": "Github"
} |
"""Custom data source template."""
##########################################################
# DEFINE CUSTOM DATA SOURCE CLASS.
#
# For basic functionality, implement the __init__(),
# __repr__(), columns(), and __iter__() methods. To
# improve performance, implement some of all of the
# following: distinct(), sum(), count(), and reduce().
#
##########################################################
import datatest
class MySource(datatest.BaseSource):
"""Add class docstring here."""
def __init__(self):
"""Initialize self."""
return NotImplemented
def __repr__(self):
"""Return a string representation of the data source."""
return NotImplemented
def columns(self):
"""Return a list of column names."""
return NotImplemented
def __iter__(self):
"""Return iterator of dictionary rows (like csv.DictReader)."""
return NotImplemented
# IMPLEMENT SOME OR ALL OF THE FOLLOWING METHODS TO IMPROVE PERFORMANCE.
#def distinct(self, column, **filter_by):
# """Returns distinct *column* values as a ResultSet."""
#def sum(self, column, group_by=None, **filter_by):
# """Returns sum of *column* grouped by *group_by* as ResultMapping."""
#def count(self, group_by=None, **filter_by):
# """Returns count of *column* grouped by *group_by* as ResultMapping."""
#def reduce(self, function, column, group_by=None, initializer=None, **filter_by):
# """Apply *function* of two arguments cumulatively to the values in
# *column*, from left to right, so as to reduce the iterable to a single
# value. If *column* is a string, the values are passed to *function*
# unchanged. But if *column* is, itself, a function, it should accept a
# single dict-row and return a single value. If *group_by* is omitted,
# the raw result is returned, otherwise returns a ResultMapping object.
# """
##########################################################
# DEFINE HELPER CLASS FOR UNIT TESTS.
##########################################################
import unittest
class TestCaseHelper(unittest.TestCase):
def setUp(self):
"""Create an instance of your custome source with the following
columns and values:
+-----+-----+-----+
| foo | bar | baz |
+-----+-----+-----+
| a | x | 8 |
| a | y | 4 |
| a | z | |
| b | x | 5 |
| b | | 1 |
| b | x | 2 |
+-----+-----+-----+
"""
data = ...
self.source = MySource(...)
##########################################################
# UNIT TESTS FOR DATA SOURCE CLASS.
#
# For the most part, the following tests should not
# be changed.
#
##########################################################
if __name__ == '__main__':
import unittest
from collections.abc import Iterator
class TestA_Helper(TestCaseHelper):
def test_01_setup(self):
"""TestCaseHelper.setUp() must define a self.source property"""
self.assertTrue(hasattr(self, 'source'))
def test_02_subclass(self):
"""self.source must be subclass of datatest.BaseSource"""
self.assertIsInstance(self.source, datatest.BaseSource)
class TestB_Repr(TestCaseHelper):
def test_03_repr(self):
"""calling __repr__() should return a short string"""
self.assertIsInstance(self.source.__repr__(), str)
class TestC_Columns(TestCaseHelper):
def test_04_sequence(self):
"""columns() should return a list"""
msg = ('if the original source has unordered columns, they should '
'be sorted alphabetically by name')
self.assertIsInstance(self.source.columns(), list, msg=msg)
def test_05_equality(self):
self.assertListEqual(self.source.columns(), ['foo', 'bar', 'baz'])
class TestD_Iter(TestCaseHelper):
def test_06_iterator(self):
"""calling __iter__() should return an iterator"""
self.assertIsInstance(self.source.__iter__(), Iterator)
def test_07_dictrows(self):
"""iterator should yield dict-rows (like csv.DictReader)"""
first_item = next(self.source.__iter__())
self.assertIsInstance(first_item, dict)
msg = 'dict keys should match column names'
self.assertSetEqual(set(first_item.keys()), set(['foo', 'bar', 'baz']), msg=msg)
def test_08_equality(self):
result = self.source.__iter__()
expecting = [
{'foo': 'a', 'bar': 'x', 'baz': '8'},
{'foo': 'a', 'bar': 'y', 'baz': '4'},
{'foo': 'a', 'bar': 'z', 'baz': ''},
{'foo': 'b', 'bar': 'x', 'baz': '5'},
{'foo': 'b', 'bar': '', 'baz': '1'},
{'foo': 'b', 'bar': 'x', 'baz': '2'},
]
compare = lambda itrbl: set(frozenset(x.items()) for x in itrbl)
self.assertSetEqual(compare(result), compare(expecting))
class TestE_Distinct(TestCaseHelper):
def test_09_return_type(self):
"""should return a ResultSet object"""
return_value = self.source.distinct(['foo', 'bar'])
self.assertIsInstance(return_value, datatest.ResultSet)
def test_10_tuple_keys(self):
"""calling with multiple columns should return multi-tuple keys"""
result = self.source.distinct(['foo', 'bar'])
expecting = [('a', 'x'), ('a', 'y'), ('a', 'z'), ('b', 'x'), ('b', '')]
self.assertEqual(self.source.distinct(['foo', 'bar']), expecting)
def test_11_simple_keys(self):
"""calling with single column should return simple keys"""
result = self.source.distinct('foo') # <- one column (as string)
expecting = ['a', 'b']
self.assertEqual(self.source.distinct('foo'), expecting)
expecting = ['a', 'b']
msg = ("Single-item lists (or other non-string containers) should "
"be unwrapped. The ResultSet values should be source "
"values--not 1-tuples.")
self.assertEqual(self.source.distinct(['foo']), expecting, msg=msg)
def test_12_unknown_column(self):
"""selecting an unknown column should raise a LookupError"""
with self.assertRaises(LookupError):
self.source.distinct(['foo', 'qux']) # <- qux is unknown
def test_13_keyword_filters(self):
"""distinct() should support **filter_by keyword behavior"""
result = self.source.distinct(['foo', 'bar'], foo='a')
expecting = [('a', 'x'), ('a', 'y'), ('a', 'z')]
msg = ("\n\n"
"The following statement should return the distinct values "
"of 'foo' and 'baz' for records where 'foo' equals 'a':\n"
"\n"
" source.distinct(['foo', 'baz'], foo='a')")
self.assertEqual(result, expecting, msg=msg)
result = self.source.distinct(['foo', 'baz'], bar=['x', 'y'])
expecting = [('a', '8'), ('a', '4'), ('b', '5'), ('b', '2')]
msg = ("\n\n"
"The following statement should return the distinct values "
"of 'foo' and 'baz' for records where 'bar' equals 'x' or "
"'y':\n"
"\n"
" source.distinct(['foo', 'baz'], bar=['x', 'y'])"
)
self.assertEqual(result, expecting, msg=msg)
result = self.source.distinct(['foo', 'baz'], foo='a', bar=['x', 'y'])
expecting = [('a', '8'), ('a', '4')]
msg = ("\n\n"
"The following statement should return the distinct values "
"of 'foo' and 'baz' for records where 'foo' equals 'a' and "
"'bar' equals 'x' or 'y':\n"
"\n"
" source.distinct(['foo', 'baz'], foo='a', bar=['x', 'y'])"
)
self.assertEqual(result, expecting, msg=msg)
class TestF_Sum(TestCaseHelper):
def test_14_group_by_none(self):
"""if *group_by* is omitted, should return raw result (not ResultMapping)"""
self.assertEqual(self.source.sum('baz', group_by=None), 20)
def test_15_group_by_multiple(self):
"""two *group_by* columns should return ResultMapping with 2-tuple keys"""
expected = {
('a', 'x'): 8,
('a', 'y'): 4,
('a', 'z'): 0, # <- Empty string coerced to 0.
('b', 'x'): 7, # <- 5 + 2
('b', '' ): 1,
}
self.assertDictEqual(self.source.sum('baz', group_by=['foo', 'bar']), expected)
def test_16_group_by_one(self):
"""one *group_by* column should return ResultMapping with simple keys"""
expected = {'a': 12, 'b': 8}
msg = ("Calling sum() with a single *group_by* column should "
"return a ResultMapping with the group_by-column's values "
"as its keys.")
self.assertDictEqual(self.source.sum('baz', group_by='foo'), expected, msg=msg)
expected = {'a': 12, 'b': 8}
msg = ("Single-item lists (or other non-string containers) should "
"be unwrapped. The ResultMapping keys should be source "
"values--not 1-tuples.")
self.assertDictEqual(self.source.sum('baz', group_by=['foo']), expected, msg=msg)
def test_17_keyword_filters(self):
"""sum() should support **filter_by keyword behavior"""
expected = {('a', 'x'): 8, ('a', 'y'): 4, ('a', 'z'): 0}
self.assertDictEqual(self.source.sum('baz', group_by=['foo', 'bar'], foo='a'), expected)
expected = {('a', 'x'): 8, ('a', 'y'): 4, ('b', 'x'): 7}
self.assertDictEqual(self.source.sum('baz', group_by=['foo', 'bar'], bar=['x', 'y']), expected)
expected = {('a', 'x'): 8, ('a', 'y'): 4}
self.assertDictEqual(self.source.sum('baz', group_by=['foo', 'bar'], foo='a', bar=['x', 'y']), expected)
self.assertEqual(self.source.sum('baz', foo='a'), 12)
self.assertEqual(self.source.sum('baz', bar=['x', 'y']), 19)
self.assertEqual(self.source.sum('baz', foo='a', bar=['y', 'z']), 4)
class TestG_Count(TestCaseHelper):
def test_18_group_by_none(self):
"""if *group_by* is omitted, should return raw result (not ResultMapping)"""
self.assertEqual(self.source.count(group_by=None), 6)
def test_19_group_by_multiple(self):
"""two *group_by* columns should return ResultMapping with 2-tuple keys"""
expected = {
('a', 'x'): 1,
('a', 'y'): 1,
('a', 'z'): 1,
('b', 'x'): 2, # <- Two rows where foo equals 'b' and bar equals 'x'.
('b', '' ): 1,
}
self.assertDictEqual(self.source.count(group_by=['foo', 'bar']), expected)
def test_20_group_by_one(self):
"""one *group_by* column should return ResultMapping with simple keys"""
expected = {'a': 3, 'b': 3}
msg = ("Calling count() with a single *group_by* column should "
"return a ResultMapping with the group_by-column's values "
"as its keys.")
self.assertDictEqual(self.source.count(group_by='foo'), expected, msg=msg)
expected = {'a': 3, 'b': 3}
msg = ("Single-item lists (or other non-string containers) should "
"be unwrapped. The ResultMapping keys should be source "
"values--not 1-tuples.")
self.assertDictEqual(self.source.count(group_by=['foo']), expected, msg=msg)
def test_21_keyword_filters(self):
"""count() should support **filter_by keyword behavior"""
expected = {('a', 'x'): 1, ('a', 'y'): 1, ('a', 'z'): 1}
self.assertDictEqual(self.source.count(group_by=['foo', 'bar'], foo='a'), expected)
expected = {('a', 'x'): 1, ('a', 'y'): 1, ('b', 'x'): 2}
self.assertDictEqual(self.source.count(group_by=['foo', 'bar'], bar=['x', 'y']), expected)
expected = {('a', 'x'): 1, ('a', 'y'): 1}
self.assertDictEqual(self.source.count(group_by=['foo', 'bar'], foo='a', bar=['x', 'y']), expected)
self.assertEqual(self.source.count(foo='a'), 3)
self.assertEqual(self.source.count(bar=['x', 'y']), 4)
self.assertEqual(self.source.count(foo='a', bar=['y', 'z']), 2)
class TestH_Reduce(TestCaseHelper):
def setUp(self):
TestCaseHelper.setUp(self)
def maximum(x, y):
if x and y:
return max(x, float(y))
if y:
return float(y)
return x
self.max = maximum
def test_22_group_by_none(self):
"""if *group_by* is omitted, should return raw result (not ResultMapping)"""
self.assertEqual(self.source.reduce(self.max, 'baz', group_by=None), 8)
def test_23_group_by_multiple(self):
"""two *group_by* columns should return ResultMapping with 2-tuple keys"""
expected = {
('a', 'x'): 8,
('a', 'y'): 4,
('a', 'z'): None, # <- Empty string left as None.
('b', 'x'): 5, # <- 5 > 2
('b', '' ): 1,
}
self.assertDictEqual(self.source.reduce(self.max, 'baz', group_by=['foo', 'bar']), expected)
def test_24_group_by_one(self):
"""one *group_by* column should return ResultMapping with simple keys"""
expected = {'a': 8, 'b': 5}
msg = ("Calling reduce() with a single *group_by* column should "
"return a ResultMapping with the group_by-column's values "
"as its keys.")
self.assertDictEqual(self.source.reduce(self.max, 'baz', group_by='foo'), expected, msg=msg)
expected = {'a': 8, 'b': 5}
msg = ("Single-item lists (or other non-string containers) should "
"be unwrapped. The ResultMapping keys should be source "
"values--not 1-tuples.")
self.assertDictEqual(self.source.reduce(self.max, 'baz', group_by=['foo']), expected, msg=msg)
def test_25_mapping_column(self):
"""when *column* is a callable function, it is used to map values"""
def mapfn(row): # <- Maps from "row" to "baz times two".
baz = row['baz']
if baz:
baz = float(baz) * 2
return baz
expected = {
('a', 'x'): 16,
('a', 'y'): 8,
('a', 'z'): None, # <- Empty remains unchanged.
('b', 'x'): 10, # <- Max of 10 and 4 (5 * 2 and 2 * 2).
('b', '' ): 2,
}
msg = ('When *column* is a callable function (instead of just a '
'column name), it must accept a dict-row and return a '
'single value. A callable column is used to map values '
'before running the reduce *function*.')
self.assertDictEqual(self.source.reduce(self.max, mapfn, group_by=['foo', 'bar']), expected, msg=msg)
msg = ('Callable *column* support should also work when group_by '
'is omitted')
self.assertEqual(self.source.reduce(self.max, mapfn, group_by=None), 16, msg=msg)
def test_26_keyword_filters(self):
"""reduce() should support **filter_by keyword behavior"""
expected = {('a', 'x'): 8, ('a', 'y'): 4, ('a', 'z'): None}
self.assertDictEqual(self.source.reduce(self.max, 'baz', group_by=['foo', 'bar'], foo='a'), expected)
expected = {('a', 'x'): 8, ('a', 'y'): 4, ('b', 'x'): 5}
self.assertDictEqual(self.source.reduce(self.max, 'baz', group_by=['foo', 'bar'], bar=['x', 'y']), expected)
expected = {('a', 'x'): 8, ('a', 'y'): 4}
self.assertDictEqual(self.source.reduce(self.max, 'baz', group_by=['foo', 'bar'], foo='a', bar=['x', 'y']), expected)
self.assertEqual(self.source.reduce(self.max, 'baz', foo='a'), 8)
self.assertEqual(self.source.reduce(self.max, 'baz', bar=['x', 'y']), 8)
self.assertEqual(self.source.reduce(self.max, 'baz', foo='a', bar=['y', 'z']), 4)
unittest.main(failfast=True)
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<ArrayOfDVBTTuning xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<DVBTTuning>
<Frequency>562000</Frequency>
<BandWidth>8</BandWidth>
<Offset>125</Offset>
</DVBTTuning>
<DVBTTuning>
<Frequency>650000</Frequency>
<BandWidth>8</BandWidth>
<Offset>125</Offset>
</DVBTTuning>
</ArrayOfDVBTTuning> | {
"pile_set_name": "Github"
} |
From 5d35ff6d904bcbf00bee99ea493db47360e756bc Mon Sep 17 00:00:00 2001
From: Eric Anholt <[email protected]>
Date: Thu, 21 Dec 2017 13:32:09 -0800
Subject: [PATCH 184/454] drm/vc4: Flush the caches before the render jobs, as
well.
If the frame samples from a render target that was just written, its
cache flush during the binning step may have occurred before the
previous frame's RCL was completed. Flush the texture caches again
before starting each RCL job to make sure that the sampling of the
previous RCL's output is correct.
Fixes flickering in the top left of 3DMMES Taiji.
Signed-off-by: Eric Anholt <[email protected]>
Fixes: ca26d28bbaa3 ("drm/vc4: improve throughput by pipelining binning and rendering jobs")
---
drivers/gpu/drm/vc4/vc4_gem.c | 21 +++++++++++++++++++++
1 file changed, 21 insertions(+)
--- a/drivers/gpu/drm/vc4/vc4_gem.c
+++ b/drivers/gpu/drm/vc4/vc4_gem.c
@@ -436,6 +436,19 @@ vc4_flush_caches(struct drm_device *dev)
VC4_SET_FIELD(0xf, V3D_SLCACTL_ICC));
}
+static void
+vc4_flush_texture_caches(struct drm_device *dev)
+{
+ struct vc4_dev *vc4 = to_vc4_dev(dev);
+
+ V3D_WRITE(V3D_L2CACTL,
+ V3D_L2CACTL_L2CCLR);
+
+ V3D_WRITE(V3D_SLCACTL,
+ VC4_SET_FIELD(0xf, V3D_SLCACTL_T1CC) |
+ VC4_SET_FIELD(0xf, V3D_SLCACTL_T0CC));
+}
+
/* Sets the registers for the next job to be actually be executed in
* the hardware.
*
@@ -474,6 +487,14 @@ vc4_submit_next_render_job(struct drm_de
if (!exec)
return;
+ /* A previous RCL may have written to one of our textures, and
+ * our full cache flush at bin time may have occurred before
+ * that RCL completed. Flush the texture cache now, but not
+ * the instructions or uniforms (since we don't write those
+ * from an RCL).
+ */
+ vc4_flush_texture_caches(dev);
+
submit_cl(dev, 1, exec->ct1ca, exec->ct1ea);
}
| {
"pile_set_name": "Github"
} |
## extra.10.conf
# ===========================
#
# This file contains the 20 "also-rans", settings which can be ignored by most
# users, but are probably among the second run of settings that you want to
# adjust given a chance. As with simple.conf, AvRAM is your amount of available
# RAM for Postgres.
# General
# ------------
# authentication_timeout = 20s
# set to the same as your application's connection timeout
# cluster_name = 'postgres-1'
# give this Postgres instance a distinct name in case it is running on the same
# machine as other postgreses.
# event_source = 'postgres-1'
# give it a distinct name in the logs, too
# Performance
# -----------
# synchronous_commit = off
# if limited data loss is acceptable (up to 400ms), and your storage is very
# high-latency, then turning off sync commit can be a big performance win.
# wal_buffers = 128MB
# increasing this to as much as 128MB has been shown to have a performance
# benefit in servers with more than 8 cores and a heavy concurrent workload.
# wal_compression = on
# compressing WAL writes can have a significant performance benefit, especially
# when additional writing for logical replication is turned on. If you are seeing
# IOwaits on the WAL drive, try this.
# stats_temp_directory = '/run/postgresql/pg_stat_tmp'
# moving the stats temp directory to a tmpfs or other ramdisk will reduce
# IOPS from statistics updating. The drawback is losing your update count
# stats on a crash, but that's a good tradeoff for most people.
# autovacuum = off
# most users want autovacuum on and should be prevented from turning it off.
# however, for analytics databases which are updated by bulk load, where
# manual VACUUMs and ANALYZEs are part of the load procedure, can and should
# disable autovacuum.
# cpu_index_tuple_cost = 0.001
# cpu_operator_cost = 0.0005
# If you want to give the query planner a slight nudge towards using more indexes
# especially multi-column indexes, try lowering its estimate of CPU requirements
# for them.
# default_statistics_target = 1000
# for analytics DBs and/or tables over 10 million records, it can be useful
# to increase the sample size for statistics. It's better to do this by table
# and column, however.
# effective_io_concurrency = 4
# have, fast, high-concurrency storage? Let the query planner know.
# replacement_sort_tuples = 0
# this setting's default is anti-performance in 10 due to improvements elsewhere,
# and it will be removed from Postgres 11.
# old_snapshot_threshold = 60min
# this setting is a way of minimizing the impact long-running statements or
# transactions have on the ability of the database to do maintenance and
# update indexes. If set, applications need to be prepared to re-run long
# batch jobs in the event of a "snapshot too old" failure.
# File Security
# --------------
# unix_socket_directories = '/run/postgresql' #varies by OS
# group = 'postgres'
# permissions = 770
# some installers put the PostgreSQL unix socket in /tmp, which offers several
# possible security exploits, even in containers.
# Parallel Query
# --------------
#
# The default settings for parallel query seem like they offer just enough
# parallel to be annoying. You're going to want to either increase them to
# make actual use of parallel query, or disable the feature. Heres's some
# recommendations for either case.
# Disable parallel query by default:
# max_parallel_workers = 0
# Enable parallel query by default for an analytics database:
# max_background_workers = Cores + 2 # more if you have worker extensions
# max_parallel_workers = ( Cores )
# max_parallel_workers_per_gather = ( 2 * Cores ) / ( # of expected sessions )
# Example: 32 core machine, for an analytics database which usually supports
# around 4 parallel reports, and has 2 extra background workers (a
# partition manager):
# max_background_workers = 36
# max_parallel_workers = 32
# max_parallel_workers_per_gather = 16
# Tuple Freezing
# -------------------
# "Freezing" is important long-term maintenance that Postgres does to clean
# out old transactionIDs from the database files. The default settings are
# overly conservative for efficiency, so if you have a database that goes
# through more than a hundred million XIDs per month, you may want to change
# them. More info here:
# http://www.databasesoup.com/2012/09/freezing-your-tuples-off-part-1.html
# This has become less of a concern with the Freeze Map, but still needs
# tweaking.
# autovacuum_freeze_max_age = 500000000
# vacuum_freeze_min_age = 50000 #should really be 1 hour of XIDs
# vacuum_freeze_table_age = 400000000
# vacuum_multixact_freeze_min_age = 100000 #2x vac_freeze_min_age
| {
"pile_set_name": "Github"
} |
HttpPurchaseOrderClientExample
Description
===========
This example shows how to use the service proxy to talk to an HTTP based PurchaseOrder service.
This example also demonstrates how to configure the wsutil tool to generate the stub files with
WS_STRING type instead of WCHAR* for strings in the XML schema and interface definitions in
XSD/WSDL files using /string:WS_STRING command line option. Read the MSDN documentation to
understand how to decide which string type to use.
Security Note
=============
This sample is provided for educational purpose only to demonstrate how to use
Windows Web Services API. It is not intended to be used without modifications
in a production environment and it has not been tested in a production
environment. Microsoft assumes no liability for incidental or consequential
damages should the sample code be used for purposes other than as intended.
Prerequisites
=============
In order to run this sample on Windows XP, Windows Vista, Windows Server 2003
and Windows Server 2008, you may need to install a Windows Update that contains
the runtime DLL for Windows Web Services API. Please consult with the
documentation on MSDN for more information.
Building the Sample
===================
To build the HttpPurchaseOrderClientExample sample
1. Open the solution HttpPurchaseOrderClientExample.sln in Visual Studio.
2. On the Build menu, click Build.
Running the Sample
==================
To run the HttpPurchaseOrderClientExample sample
1. Run HttpPurchaseOrderClientExample by clicking Start Without Debugging on the Debug menu.
� Microsoft Corporation. All rights reserved.
| {
"pile_set_name": "Github"
} |
{
"name": "Vert.x on Heroku",
"description": "A barebones Vert.x app, which can easily be deployed to Heroku.",
"addons": [ "heroku-postgresql" ],
"env": {
"MAVEN_CUSTOM_OPTS": {
"description": "Custom Maven options for the buildpack",
"value": "-DskipTests -pl heroku-example"
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Zoran zr36057/zr36067 PCI controller driver, for the
* Pinnacle/Miro DC10/DC10+/DC30/DC30+, Iomega Buz, Linux
* Media Labs LML33/LML33R10.
*
* This part handles device access (PCI/I2C/codec/...)
*
* Copyright (C) 2000 Serguei Miridonov <[email protected]>
*
* Currently maintained by:
* Ronald Bultje <[email protected]>
* Laurent Pinchart <[email protected]>
* Mailinglist <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#include <linux/types.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/vmalloc.h>
#include <linux/byteorder/generic.h>
#include <linux/interrupt.h>
#include <linux/proc_fs.h>
#include <linux/i2c.h>
#include <linux/i2c-algo-bit.h>
#include <linux/videodev.h>
#include <linux/spinlock.h>
#include <linux/sem.h>
#include <linux/pci.h>
#include <linux/video_decoder.h>
#include <linux/video_encoder.h>
#include <linux/delay.h>
#include <linux/wait.h>
#include <asm/io.h>
#include "videocodec.h"
#include "zoran.h"
#include "zoran_device.h"
#define IRQ_MASK ( ZR36057_ISR_GIRQ0 | \
ZR36057_ISR_GIRQ1 | \
ZR36057_ISR_JPEGRepIRQ )
extern const struct zoran_format zoran_formats[];
extern int *zr_debug;
#define dprintk(num, format, args...) \
do { \
if (*zr_debug >= num) \
printk(format, ##args); \
} while (0)
static int lml33dpath = 0; /* 1 will use digital path in capture
* mode instead of analog. It can be
* used for picture adjustments using
* tool like xawtv while watching image
* on TV monitor connected to the output.
* However, due to absence of 75 Ohm
* load on Bt819 input, there will be
* some image imperfections */
module_param(lml33dpath, bool, 0);
MODULE_PARM_DESC(lml33dpath,
"Use digital path capture mode (on LML33 cards)");
static void
zr36057_init_vfe (struct zoran *zr);
/*
* General Purpose I/O and Guest bus access
*/
/*
* This is a bit tricky. When a board lacks a GPIO function, the corresponding
* GPIO bit number in the card_info structure is set to 0.
*/
void
GPIO (struct zoran *zr,
int bit,
unsigned int value)
{
u32 reg;
u32 mask;
/* Make sure the bit number is legal
* A bit number of -1 (lacking) gives a mask of 0,
* making it harmless */
mask = (1 << (24 + bit)) & 0xff000000;
reg = btread(ZR36057_GPPGCR1) & ~mask;
if (value) {
reg |= mask;
}
btwrite(reg, ZR36057_GPPGCR1);
udelay(1);
}
/*
* Wait til post office is no longer busy
*/
int
post_office_wait (struct zoran *zr)
{
u32 por;
// while (((por = btread(ZR36057_POR)) & (ZR36057_POR_POPen | ZR36057_POR_POTime)) == ZR36057_POR_POPen) {
while ((por = btread(ZR36057_POR)) & ZR36057_POR_POPen) {
/* wait for something to happen */
}
if ((por & ZR36057_POR_POTime) && !zr->card.gws_not_connected) {
/* In LML33/BUZ \GWS line is not connected, so it has always timeout set */
dprintk(1, KERN_INFO "%s: pop timeout %08x\n", ZR_DEVNAME(zr),
por);
return -1;
}
return 0;
}
int
post_office_write (struct zoran *zr,
unsigned int guest,
unsigned int reg,
unsigned int value)
{
u32 por;
por =
ZR36057_POR_PODir | ZR36057_POR_POTime | ((guest & 7) << 20) |
((reg & 7) << 16) | (value & 0xFF);
btwrite(por, ZR36057_POR);
return post_office_wait(zr);
}
int
post_office_read (struct zoran *zr,
unsigned int guest,
unsigned int reg)
{
u32 por;
por = ZR36057_POR_POTime | ((guest & 7) << 20) | ((reg & 7) << 16);
btwrite(por, ZR36057_POR);
if (post_office_wait(zr) < 0) {
return -1;
}
return btread(ZR36057_POR) & 0xFF;
}
/*
* detect guests
*/
static void
dump_guests (struct zoran *zr)
{
if (*zr_debug > 2) {
int i, guest[8];
for (i = 1; i < 8; i++) { // Don't read jpeg codec here
guest[i] = post_office_read(zr, i, 0);
}
printk(KERN_INFO "%s: Guests:", ZR_DEVNAME(zr));
for (i = 1; i < 8; i++) {
printk(" 0x%02x", guest[i]);
}
printk("\n");
}
}
static inline unsigned long
get_time (void)
{
struct timeval tv;
do_gettimeofday(&tv);
return (1000000 * tv.tv_sec + tv.tv_usec);
}
void
detect_guest_activity (struct zoran *zr)
{
int timeout, i, j, res, guest[8], guest0[8], change[8][3];
unsigned long t0, t1;
dump_guests(zr);
printk(KERN_INFO "%s: Detecting guests activity, please wait...\n",
ZR_DEVNAME(zr));
for (i = 1; i < 8; i++) { // Don't read jpeg codec here
guest0[i] = guest[i] = post_office_read(zr, i, 0);
}
timeout = 0;
j = 0;
t0 = get_time();
while (timeout < 10000) {
udelay(10);
timeout++;
for (i = 1; (i < 8) && (j < 8); i++) {
res = post_office_read(zr, i, 0);
if (res != guest[i]) {
t1 = get_time();
change[j][0] = (t1 - t0);
t0 = t1;
change[j][1] = i;
change[j][2] = res;
j++;
guest[i] = res;
}
}
if (j >= 8)
break;
}
printk(KERN_INFO "%s: Guests:", ZR_DEVNAME(zr));
for (i = 1; i < 8; i++) {
printk(" 0x%02x", guest0[i]);
}
printk("\n");
if (j == 0) {
printk(KERN_INFO "%s: No activity detected.\n", ZR_DEVNAME(zr));
return;
}
for (i = 0; i < j; i++) {
printk(KERN_INFO "%s: %6d: %d => 0x%02x\n", ZR_DEVNAME(zr),
change[i][0], change[i][1], change[i][2]);
}
}
/*
* JPEG Codec access
*/
void
jpeg_codec_sleep (struct zoran *zr,
int sleep)
{
GPIO(zr, zr->card.gpio[GPIO_JPEG_SLEEP], !sleep);
if (!sleep) {
dprintk(3,
KERN_DEBUG
"%s: jpeg_codec_sleep() - wake GPIO=0x%08x\n",
ZR_DEVNAME(zr), btread(ZR36057_GPPGCR1));
udelay(500);
} else {
dprintk(3,
KERN_DEBUG
"%s: jpeg_codec_sleep() - sleep GPIO=0x%08x\n",
ZR_DEVNAME(zr), btread(ZR36057_GPPGCR1));
udelay(2);
}
}
int
jpeg_codec_reset (struct zoran *zr)
{
/* Take the codec out of sleep */
jpeg_codec_sleep(zr, 0);
if (zr->card.gpcs[GPCS_JPEG_RESET] != 0xff) {
post_office_write(zr, zr->card.gpcs[GPCS_JPEG_RESET], 0,
0);
udelay(2);
} else {
GPIO(zr, zr->card.gpio[GPIO_JPEG_RESET], 0);
udelay(2);
GPIO(zr, zr->card.gpio[GPIO_JPEG_RESET], 1);
udelay(2);
}
return 0;
}
/*
* Set the registers for the size we have specified. Don't bother
* trying to understand this without the ZR36057 manual in front of
* you [AC].
*
* PS: The manual is free for download in .pdf format from
* www.zoran.com - nicely done those folks.
*/
static void
zr36057_adjust_vfe (struct zoran *zr,
enum zoran_codec_mode mode)
{
u32 reg;
switch (mode) {
case BUZ_MODE_MOTION_DECOMPRESS:
btand(~ZR36057_VFESPFR_ExtFl, ZR36057_VFESPFR);
reg = btread(ZR36057_VFEHCR);
if ((reg & (1 << 10)) && zr->card.type != LML33R10) {
reg += ((1 << 10) | 1);
}
btwrite(reg, ZR36057_VFEHCR);
break;
case BUZ_MODE_MOTION_COMPRESS:
case BUZ_MODE_IDLE:
default:
if (zr->norm == VIDEO_MODE_NTSC ||
(zr->card.type == LML33R10 &&
zr->norm == VIDEO_MODE_PAL))
btand(~ZR36057_VFESPFR_ExtFl, ZR36057_VFESPFR);
else
btor(ZR36057_VFESPFR_ExtFl, ZR36057_VFESPFR);
reg = btread(ZR36057_VFEHCR);
if (!(reg & (1 << 10)) && zr->card.type != LML33R10) {
reg -= ((1 << 10) | 1);
}
btwrite(reg, ZR36057_VFEHCR);
break;
}
}
/*
* set geometry
*/
static void
zr36057_set_vfe (struct zoran *zr,
int video_width,
int video_height,
const struct zoran_format *format)
{
struct tvnorm *tvn;
unsigned HStart, HEnd, VStart, VEnd;
unsigned DispMode;
unsigned VidWinWid, VidWinHt;
unsigned hcrop1, hcrop2, vcrop1, vcrop2;
unsigned Wa, We, Ha, He;
unsigned X, Y, HorDcm, VerDcm;
u32 reg;
unsigned mask_line_size;
tvn = zr->timing;
Wa = tvn->Wa;
Ha = tvn->Ha;
dprintk(2, KERN_INFO "%s: set_vfe() - width = %d, height = %d\n",
ZR_DEVNAME(zr), video_width, video_height);
if (zr->norm != VIDEO_MODE_PAL &&
zr->norm != VIDEO_MODE_NTSC &&
zr->norm != VIDEO_MODE_SECAM) {
dprintk(1,
KERN_ERR "%s: set_vfe() - norm = %d not valid\n",
ZR_DEVNAME(zr), zr->norm);
return;
}
if (video_width < BUZ_MIN_WIDTH ||
video_height < BUZ_MIN_HEIGHT ||
video_width > Wa || video_height > Ha) {
dprintk(1, KERN_ERR "%s: set_vfe: w=%d h=%d not valid\n",
ZR_DEVNAME(zr), video_width, video_height);
return;
}
/**** zr36057 ****/
/* horizontal */
VidWinWid = video_width;
X = (VidWinWid * 64 + tvn->Wa - 1) / tvn->Wa;
We = (VidWinWid * 64) / X;
HorDcm = 64 - X;
hcrop1 = 2 * ((tvn->Wa - We) / 4);
hcrop2 = tvn->Wa - We - hcrop1;
HStart = tvn->HStart ? tvn->HStart : 1;
/* (Ronald) Original comment:
* "| 1 Doesn't have any effect, tested on both a DC10 and a DC10+"
* this is false. It inverses chroma values on the LML33R10 (so Cr
* suddenly is shown as Cb and reverse, really cool effect if you
* want to see blue faces, not useful otherwise). So don't use |1.
* However, the DC10 has '0' as HStart, but does need |1, so we
* use a dirty check...
*/
HEnd = HStart + tvn->Wa - 1;
HStart += hcrop1;
HEnd -= hcrop2;
reg = ((HStart & ZR36057_VFEHCR_Hmask) << ZR36057_VFEHCR_HStart)
| ((HEnd & ZR36057_VFEHCR_Hmask) << ZR36057_VFEHCR_HEnd);
if (zr->card.vfe_pol.hsync_pol)
reg |= ZR36057_VFEHCR_HSPol;
btwrite(reg, ZR36057_VFEHCR);
/* Vertical */
DispMode = !(video_height > BUZ_MAX_HEIGHT / 2);
VidWinHt = DispMode ? video_height : video_height / 2;
Y = (VidWinHt * 64 * 2 + tvn->Ha - 1) / tvn->Ha;
He = (VidWinHt * 64) / Y;
VerDcm = 64 - Y;
vcrop1 = (tvn->Ha / 2 - He) / 2;
vcrop2 = tvn->Ha / 2 - He - vcrop1;
VStart = tvn->VStart;
VEnd = VStart + tvn->Ha / 2; // - 1; FIXME SnapShot times out with -1 in 768*576 on the DC10 - LP
VStart += vcrop1;
VEnd -= vcrop2;
reg = ((VStart & ZR36057_VFEVCR_Vmask) << ZR36057_VFEVCR_VStart)
| ((VEnd & ZR36057_VFEVCR_Vmask) << ZR36057_VFEVCR_VEnd);
if (zr->card.vfe_pol.vsync_pol)
reg |= ZR36057_VFEVCR_VSPol;
btwrite(reg, ZR36057_VFEVCR);
/* scaler and pixel format */
reg = 0;
reg |= (HorDcm << ZR36057_VFESPFR_HorDcm);
reg |= (VerDcm << ZR36057_VFESPFR_VerDcm);
reg |= (DispMode << ZR36057_VFESPFR_DispMode);
if (format->palette != VIDEO_PALETTE_YUV422 && format->palette != VIDEO_PALETTE_YUYV)
reg |= ZR36057_VFESPFR_LittleEndian;
/* RJ: I don't know, why the following has to be the opposite
* of the corresponding ZR36060 setting, but only this way
* we get the correct colors when uncompressing to the screen */
//reg |= ZR36057_VFESPFR_VCLKPol; /**/
/* RJ: Don't know if that is needed for NTSC also */
if (zr->norm != VIDEO_MODE_NTSC)
reg |= ZR36057_VFESPFR_ExtFl; // NEEDED!!!!!!! Wolfgang
reg |= ZR36057_VFESPFR_TopField;
switch (format->palette) {
case VIDEO_PALETTE_YUYV:
case VIDEO_PALETTE_YUV422:
reg |= ZR36057_VFESPFR_YUV422;
break;
case VIDEO_PALETTE_RGB555:
reg |= ZR36057_VFESPFR_RGB555 | ZR36057_VFESPFR_ErrDif;
break;
case VIDEO_PALETTE_RGB565:
reg |= ZR36057_VFESPFR_RGB565 | ZR36057_VFESPFR_ErrDif;
break;
case VIDEO_PALETTE_RGB24:
reg |= ZR36057_VFESPFR_RGB888 | ZR36057_VFESPFR_Pack24;
break;
case VIDEO_PALETTE_RGB32:
reg |= ZR36057_VFESPFR_RGB888;
break;
default:
dprintk(1,
KERN_INFO "%s: set_vfe() - unknown color_fmt=%x\n",
ZR_DEVNAME(zr), format->palette);
return;
}
if (HorDcm >= 48) {
reg |= 3 << ZR36057_VFESPFR_HFilter; /* 5 tap filter */
} else if (HorDcm >= 32) {
reg |= 2 << ZR36057_VFESPFR_HFilter; /* 4 tap filter */
} else if (HorDcm >= 16) {
reg |= 1 << ZR36057_VFESPFR_HFilter; /* 3 tap filter */
}
btwrite(reg, ZR36057_VFESPFR);
/* display configuration */
reg = (16 << ZR36057_VDCR_MinPix)
| (VidWinHt << ZR36057_VDCR_VidWinHt)
| (VidWinWid << ZR36057_VDCR_VidWinWid);
if (pci_pci_problems & PCIPCI_TRITON)
// || zr->revision < 1) // Revision 1 has also Triton support
reg &= ~ZR36057_VDCR_Triton;
else
reg |= ZR36057_VDCR_Triton;
btwrite(reg, ZR36057_VDCR);
/* (Ronald) don't write this if overlay_mask = NULL */
if (zr->overlay_mask) {
/* Write overlay clipping mask data, but don't enable overlay clipping */
/* RJ: since this makes only sense on the screen, we use
* zr->overlay_settings.width instead of video_width */
mask_line_size = (BUZ_MAX_WIDTH + 31) / 32;
reg = virt_to_bus(zr->overlay_mask);
btwrite(reg, ZR36057_MMTR);
reg = virt_to_bus(zr->overlay_mask + mask_line_size);
btwrite(reg, ZR36057_MMBR);
reg =
mask_line_size - (zr->overlay_settings.width +
31) / 32;
if (DispMode == 0)
reg += mask_line_size;
reg <<= ZR36057_OCR_MaskStride;
btwrite(reg, ZR36057_OCR);
}
zr36057_adjust_vfe(zr, zr->codec_mode);
}
/*
* Switch overlay on or off
*/
void
zr36057_overlay (struct zoran *zr,
int on)
{
u32 reg;
if (on) {
/* do the necessary settings ... */
btand(~ZR36057_VDCR_VidEn, ZR36057_VDCR); /* switch it off first */
zr36057_set_vfe(zr,
zr->overlay_settings.width,
zr->overlay_settings.height,
zr->overlay_settings.format);
/* Start and length of each line MUST be 4-byte aligned.
* This should be allready checked before the call to this routine.
* All error messages are internal driver checking only! */
/* video display top and bottom registers */
reg = (long) zr->buffer.base +
zr->overlay_settings.x *
((zr->overlay_settings.format->depth + 7) / 8) +
zr->overlay_settings.y *
zr->buffer.bytesperline;
btwrite(reg, ZR36057_VDTR);
if (reg & 3)
dprintk(1,
KERN_ERR
"%s: zr36057_overlay() - video_address not aligned\n",
ZR_DEVNAME(zr));
if (zr->overlay_settings.height > BUZ_MAX_HEIGHT / 2)
reg += zr->buffer.bytesperline;
btwrite(reg, ZR36057_VDBR);
/* video stride, status, and frame grab register */
reg = zr->buffer.bytesperline -
zr->overlay_settings.width *
((zr->overlay_settings.format->depth + 7) / 8);
if (zr->overlay_settings.height > BUZ_MAX_HEIGHT / 2)
reg += zr->buffer.bytesperline;
if (reg & 3)
dprintk(1,
KERN_ERR
"%s: zr36057_overlay() - video_stride not aligned\n",
ZR_DEVNAME(zr));
reg = (reg << ZR36057_VSSFGR_DispStride);
reg |= ZR36057_VSSFGR_VidOvf; /* clear overflow status */
btwrite(reg, ZR36057_VSSFGR);
/* Set overlay clipping */
if (zr->overlay_settings.clipcount > 0)
btor(ZR36057_OCR_OvlEnable, ZR36057_OCR);
/* ... and switch it on */
btor(ZR36057_VDCR_VidEn, ZR36057_VDCR);
} else {
/* Switch it off */
btand(~ZR36057_VDCR_VidEn, ZR36057_VDCR);
}
}
/*
* The overlay mask has one bit for each pixel on a scan line,
* and the maximum window size is BUZ_MAX_WIDTH * BUZ_MAX_HEIGHT pixels.
*/
void
write_overlay_mask (struct file *file,
struct video_clip *vp,
int count)
{
struct zoran_fh *fh = file->private_data;
struct zoran *zr = fh->zr;
unsigned mask_line_size = (BUZ_MAX_WIDTH + 31) / 32;
u32 *mask;
int x, y, width, height;
unsigned i, j, k;
u32 reg;
/* fill mask with one bits */
memset(fh->overlay_mask, ~0, mask_line_size * 4 * BUZ_MAX_HEIGHT);
reg = 0;
for (i = 0; i < count; ++i) {
/* pick up local copy of clip */
x = vp[i].x;
y = vp[i].y;
width = vp[i].width;
height = vp[i].height;
/* trim clips that extend beyond the window */
if (x < 0) {
width += x;
x = 0;
}
if (y < 0) {
height += y;
y = 0;
}
if (x + width > fh->overlay_settings.width) {
width = fh->overlay_settings.width - x;
}
if (y + height > fh->overlay_settings.height) {
height = fh->overlay_settings.height - y;
}
/* ignore degenerate clips */
if (height <= 0) {
continue;
}
if (width <= 0) {
continue;
}
/* apply clip for each scan line */
for (j = 0; j < height; ++j) {
/* reset bit for each pixel */
/* this can be optimized later if need be */
mask = fh->overlay_mask + (y + j) * mask_line_size;
for (k = 0; k < width; ++k) {
mask[(x + k) / 32] &=
~((u32) 1 << (x + k) % 32);
}
}
}
}
/* Enable/Disable uncompressed memory grabbing of the 36057 */
void
zr36057_set_memgrab (struct zoran *zr,
int mode)
{
if (mode) {
if (btread(ZR36057_VSSFGR) &
(ZR36057_VSSFGR_SnapShot | ZR36057_VSSFGR_FrameGrab))
dprintk(1,
KERN_WARNING
"%s: zr36057_set_memgrab(1) with SnapShot or FrameGrab on!?\n",
ZR_DEVNAME(zr));
/* switch on VSync interrupts */
btwrite(IRQ_MASK, ZR36057_ISR); // Clear Interrupts
btor(zr->card.vsync_int, ZR36057_ICR); // SW
/* enable SnapShot */
btor(ZR36057_VSSFGR_SnapShot, ZR36057_VSSFGR);
/* Set zr36057 video front end and enable video */
zr36057_set_vfe(zr, zr->v4l_settings.width,
zr->v4l_settings.height,
zr->v4l_settings.format);
zr->v4l_memgrab_active = 1;
} else {
zr->v4l_memgrab_active = 0;
/* switch off VSync interrupts */
btand(~zr->card.vsync_int, ZR36057_ICR); // SW
/* reenable grabbing to screen if it was running */
if (zr->v4l_overlay_active) {
zr36057_overlay(zr, 1);
} else {
btand(~ZR36057_VDCR_VidEn, ZR36057_VDCR);
btand(~ZR36057_VSSFGR_SnapShot, ZR36057_VSSFGR);
}
}
}
int
wait_grab_pending (struct zoran *zr)
{
unsigned long flags;
/* wait until all pending grabs are finished */
if (!zr->v4l_memgrab_active)
return 0;
wait_event_interruptible(zr->v4l_capq,
(zr->v4l_pend_tail == zr->v4l_pend_head));
if (signal_pending(current))
return -ERESTARTSYS;
spin_lock_irqsave(&zr->spinlock, flags);
zr36057_set_memgrab(zr, 0);
spin_unlock_irqrestore(&zr->spinlock, flags);
return 0;
}
/*****************************************************************************
* *
* Set up the Buz-specific MJPEG part *
* *
*****************************************************************************/
static inline void
set_frame (struct zoran *zr,
int val)
{
GPIO(zr, zr->card.gpio[GPIO_JPEG_FRAME], val);
}
static void
set_videobus_dir (struct zoran *zr,
int val)
{
switch (zr->card.type) {
case LML33:
case LML33R10:
if (lml33dpath == 0)
GPIO(zr, 5, val);
else
GPIO(zr, 5, 1);
break;
default:
GPIO(zr, zr->card.gpio[GPIO_VID_DIR],
zr->card.gpio_pol[GPIO_VID_DIR] ? !val : val);
break;
}
}
static void
init_jpeg_queue (struct zoran *zr)
{
int i;
/* re-initialize DMA ring stuff */
zr->jpg_que_head = 0;
zr->jpg_dma_head = 0;
zr->jpg_dma_tail = 0;
zr->jpg_que_tail = 0;
zr->jpg_seq_num = 0;
zr->JPEG_error = 0;
zr->num_errors = 0;
zr->jpg_err_seq = 0;
zr->jpg_err_shift = 0;
zr->jpg_queued_num = 0;
for (i = 0; i < zr->jpg_buffers.num_buffers; i++) {
zr->jpg_buffers.buffer[i].state = BUZ_STATE_USER; /* nothing going on */
}
for (i = 0; i < BUZ_NUM_STAT_COM; i++) {
zr->stat_com[i] = cpu_to_le32(1); /* mark as unavailable to zr36057 */
}
}
static void
zr36057_set_jpg (struct zoran *zr,
enum zoran_codec_mode mode)
{
struct tvnorm *tvn;
u32 reg;
tvn = zr->timing;
/* assert P_Reset, disable code transfer, deassert Active */
btwrite(0, ZR36057_JPC);
/* MJPEG compression mode */
switch (mode) {
case BUZ_MODE_MOTION_COMPRESS:
default:
reg = ZR36057_JMC_MJPGCmpMode;
break;
case BUZ_MODE_MOTION_DECOMPRESS:
reg = ZR36057_JMC_MJPGExpMode;
reg |= ZR36057_JMC_SyncMstr;
/* RJ: The following is experimental - improves the output to screen */
//if(zr->jpg_settings.VFIFO_FB) reg |= ZR36057_JMC_VFIFO_FB; // No, it doesn't. SM
break;
case BUZ_MODE_STILL_COMPRESS:
reg = ZR36057_JMC_JPGCmpMode;
break;
case BUZ_MODE_STILL_DECOMPRESS:
reg = ZR36057_JMC_JPGExpMode;
break;
}
reg |= ZR36057_JMC_JPG;
if (zr->jpg_settings.field_per_buff == 1)
reg |= ZR36057_JMC_Fld_per_buff;
btwrite(reg, ZR36057_JMC);
/* vertical */
btor(ZR36057_VFEVCR_VSPol, ZR36057_VFEVCR);
reg = (6 << ZR36057_VSP_VsyncSize) |
(tvn->Ht << ZR36057_VSP_FrmTot);
btwrite(reg, ZR36057_VSP);
reg = ((zr->jpg_settings.img_y + tvn->VStart) << ZR36057_FVAP_NAY) |
(zr->jpg_settings.img_height << ZR36057_FVAP_PAY);
btwrite(reg, ZR36057_FVAP);
/* horizontal */
if (zr->card.vfe_pol.hsync_pol)
btor(ZR36057_VFEHCR_HSPol, ZR36057_VFEHCR);
else
btand(~ZR36057_VFEHCR_HSPol, ZR36057_VFEHCR);
reg = ((tvn->HSyncStart) << ZR36057_HSP_HsyncStart) |
(tvn->Wt << ZR36057_HSP_LineTot);
btwrite(reg, ZR36057_HSP);
reg = ((zr->jpg_settings.img_x +
tvn->HStart + 4) << ZR36057_FHAP_NAX) |
(zr->jpg_settings.img_width << ZR36057_FHAP_PAX);
btwrite(reg, ZR36057_FHAP);
/* field process parameters */
if (zr->jpg_settings.odd_even)
reg = ZR36057_FPP_Odd_Even;
else
reg = 0;
btwrite(reg, ZR36057_FPP);
/* Set proper VCLK Polarity, else colors will be wrong during playback */
//btor(ZR36057_VFESPFR_VCLKPol, ZR36057_VFESPFR);
/* code base address */
reg = virt_to_bus(zr->stat_com);
btwrite(reg, ZR36057_JCBA);
/* FIFO threshold (FIFO is 160. double words) */
/* NOTE: decimal values here */
switch (mode) {
case BUZ_MODE_STILL_COMPRESS:
case BUZ_MODE_MOTION_COMPRESS:
if (zr->card.type != BUZ)
reg = 140;
else
reg = 60;
break;
case BUZ_MODE_STILL_DECOMPRESS:
case BUZ_MODE_MOTION_DECOMPRESS:
reg = 20;
break;
default:
reg = 80;
break;
}
btwrite(reg, ZR36057_JCFT);
zr36057_adjust_vfe(zr, mode);
}
void
print_interrupts (struct zoran *zr)
{
int res, noerr = 0;
printk(KERN_INFO "%s: interrupts received:", ZR_DEVNAME(zr));
if ((res = zr->field_counter) < -1 || res > 1) {
printk(" FD:%d", res);
}
if ((res = zr->intr_counter_GIRQ1) != 0) {
printk(" GIRQ1:%d", res);
noerr++;
}
if ((res = zr->intr_counter_GIRQ0) != 0) {
printk(" GIRQ0:%d", res);
noerr++;
}
if ((res = zr->intr_counter_CodRepIRQ) != 0) {
printk(" CodRepIRQ:%d", res);
noerr++;
}
if ((res = zr->intr_counter_JPEGRepIRQ) != 0) {
printk(" JPEGRepIRQ:%d", res);
noerr++;
}
if (zr->JPEG_max_missed) {
printk(" JPEG delays: max=%d min=%d", zr->JPEG_max_missed,
zr->JPEG_min_missed);
}
if (zr->END_event_missed) {
printk(" ENDs missed: %d", zr->END_event_missed);
}
//if (zr->jpg_queued_num) {
printk(" queue_state=%ld/%ld/%ld/%ld", zr->jpg_que_tail,
zr->jpg_dma_tail, zr->jpg_dma_head, zr->jpg_que_head);
//}
if (!noerr) {
printk(": no interrupts detected.");
}
printk("\n");
}
void
clear_interrupt_counters (struct zoran *zr)
{
zr->intr_counter_GIRQ1 = 0;
zr->intr_counter_GIRQ0 = 0;
zr->intr_counter_CodRepIRQ = 0;
zr->intr_counter_JPEGRepIRQ = 0;
zr->field_counter = 0;
zr->IRQ1_in = 0;
zr->IRQ1_out = 0;
zr->JPEG_in = 0;
zr->JPEG_out = 0;
zr->JPEG_0 = 0;
zr->JPEG_1 = 0;
zr->END_event_missed = 0;
zr->JPEG_missed = 0;
zr->JPEG_max_missed = 0;
zr->JPEG_min_missed = 0x7fffffff;
}
static u32
count_reset_interrupt (struct zoran *zr)
{
u32 isr;
if ((isr = btread(ZR36057_ISR) & 0x78000000)) {
if (isr & ZR36057_ISR_GIRQ1) {
btwrite(ZR36057_ISR_GIRQ1, ZR36057_ISR);
zr->intr_counter_GIRQ1++;
}
if (isr & ZR36057_ISR_GIRQ0) {
btwrite(ZR36057_ISR_GIRQ0, ZR36057_ISR);
zr->intr_counter_GIRQ0++;
}
if (isr & ZR36057_ISR_CodRepIRQ) {
btwrite(ZR36057_ISR_CodRepIRQ, ZR36057_ISR);
zr->intr_counter_CodRepIRQ++;
}
if (isr & ZR36057_ISR_JPEGRepIRQ) {
btwrite(ZR36057_ISR_JPEGRepIRQ, ZR36057_ISR);
zr->intr_counter_JPEGRepIRQ++;
}
}
return isr;
}
/* hack */
extern void zr36016_write (struct videocodec *codec,
u16 reg,
u32 val);
void
jpeg_start (struct zoran *zr)
{
int reg;
zr->frame_num = 0;
/* deassert P_reset, disable code transfer, deassert Active */
btwrite(ZR36057_JPC_P_Reset, ZR36057_JPC);
/* stop flushing the internal code buffer */
btand(~ZR36057_MCTCR_CFlush, ZR36057_MCTCR);
/* enable code transfer */
btor(ZR36057_JPC_CodTrnsEn, ZR36057_JPC);
/* clear IRQs */
btwrite(IRQ_MASK, ZR36057_ISR);
/* enable the JPEG IRQs */
btwrite(zr->card.jpeg_int |
ZR36057_ICR_JPEGRepIRQ |
ZR36057_ICR_IntPinEn,
ZR36057_ICR);
set_frame(zr, 0); // \FRAME
/* set the JPEG codec guest ID */
reg = (zr->card.gpcs[1] << ZR36057_JCGI_JPEGuestID) |
(0 << ZR36057_JCGI_JPEGuestReg);
btwrite(reg, ZR36057_JCGI);
if (zr->card.video_vfe == CODEC_TYPE_ZR36016 &&
zr->card.video_codec == CODEC_TYPE_ZR36050) {
/* Enable processing on the ZR36016 */
if (zr->vfe)
zr36016_write(zr->vfe, 0, 1);
/* load the address of the GO register in the ZR36050 latch */
post_office_write(zr, 0, 0, 0);
}
/* assert Active */
btor(ZR36057_JPC_Active, ZR36057_JPC);
/* enable the Go generation */
btor(ZR36057_JMC_Go_en, ZR36057_JMC);
udelay(30);
set_frame(zr, 1); // /FRAME
dprintk(3, KERN_DEBUG "%s: jpeg_start\n", ZR_DEVNAME(zr));
}
void
zr36057_enable_jpg (struct zoran *zr,
enum zoran_codec_mode mode)
{
static int zero = 0;
static int one = 1;
struct vfe_settings cap;
int field_size =
zr->jpg_buffers.buffer_size / zr->jpg_settings.field_per_buff;
zr->codec_mode = mode;
cap.x = zr->jpg_settings.img_x;
cap.y = zr->jpg_settings.img_y;
cap.width = zr->jpg_settings.img_width;
cap.height = zr->jpg_settings.img_height;
cap.decimation =
zr->jpg_settings.HorDcm | (zr->jpg_settings.VerDcm << 8);
cap.quality = zr->jpg_settings.jpg_comp.quality;
switch (mode) {
case BUZ_MODE_MOTION_COMPRESS: {
struct jpeg_app_marker app;
struct jpeg_com_marker com;
/* In motion compress mode, the decoder output must be enabled, and
* the video bus direction set to input.
*/
set_videobus_dir(zr, 0);
decoder_command(zr, DECODER_ENABLE_OUTPUT, &one);
encoder_command(zr, ENCODER_SET_INPUT, &zero);
/* Take the JPEG codec and the VFE out of sleep */
jpeg_codec_sleep(zr, 0);
/* set JPEG app/com marker */
app.appn = zr->jpg_settings.jpg_comp.APPn;
app.len = zr->jpg_settings.jpg_comp.APP_len;
memcpy(app.data, zr->jpg_settings.jpg_comp.APP_data, 60);
zr->codec->control(zr->codec, CODEC_S_JPEG_APP_DATA,
sizeof(struct jpeg_app_marker), &app);
com.len = zr->jpg_settings.jpg_comp.COM_len;
memcpy(com.data, zr->jpg_settings.jpg_comp.COM_data, 60);
zr->codec->control(zr->codec, CODEC_S_JPEG_COM_DATA,
sizeof(struct jpeg_com_marker), &com);
/* Setup the JPEG codec */
zr->codec->control(zr->codec, CODEC_S_JPEG_TDS_BYTE,
sizeof(int), &field_size);
zr->codec->set_video(zr->codec, zr->timing, &cap,
&zr->card.vfe_pol);
zr->codec->set_mode(zr->codec, CODEC_DO_COMPRESSION);
/* Setup the VFE */
if (zr->vfe) {
zr->vfe->control(zr->vfe, CODEC_S_JPEG_TDS_BYTE,
sizeof(int), &field_size);
zr->vfe->set_video(zr->vfe, zr->timing, &cap,
&zr->card.vfe_pol);
zr->vfe->set_mode(zr->vfe, CODEC_DO_COMPRESSION);
}
init_jpeg_queue(zr);
zr36057_set_jpg(zr, mode); // \P_Reset, ... Video param, FIFO
clear_interrupt_counters(zr);
dprintk(2, KERN_INFO "%s: enable_jpg(MOTION_COMPRESS)\n",
ZR_DEVNAME(zr));
break;
}
case BUZ_MODE_MOTION_DECOMPRESS:
/* In motion decompression mode, the decoder output must be disabled, and
* the video bus direction set to output.
*/
decoder_command(zr, DECODER_ENABLE_OUTPUT, &zero);
set_videobus_dir(zr, 1);
encoder_command(zr, ENCODER_SET_INPUT, &one);
/* Take the JPEG codec and the VFE out of sleep */
jpeg_codec_sleep(zr, 0);
/* Setup the VFE */
if (zr->vfe) {
zr->vfe->set_video(zr->vfe, zr->timing, &cap,
&zr->card.vfe_pol);
zr->vfe->set_mode(zr->vfe, CODEC_DO_EXPANSION);
}
/* Setup the JPEG codec */
zr->codec->set_video(zr->codec, zr->timing, &cap,
&zr->card.vfe_pol);
zr->codec->set_mode(zr->codec, CODEC_DO_EXPANSION);
init_jpeg_queue(zr);
zr36057_set_jpg(zr, mode); // \P_Reset, ... Video param, FIFO
clear_interrupt_counters(zr);
dprintk(2, KERN_INFO "%s: enable_jpg(MOTION_DECOMPRESS)\n",
ZR_DEVNAME(zr));
break;
case BUZ_MODE_IDLE:
default:
/* shut down processing */
btand(~(zr->card.jpeg_int | ZR36057_ICR_JPEGRepIRQ),
ZR36057_ICR);
btwrite(zr->card.jpeg_int | ZR36057_ICR_JPEGRepIRQ,
ZR36057_ISR);
btand(~ZR36057_JMC_Go_en, ZR36057_JMC); // \Go_en
msleep(50);
set_videobus_dir(zr, 0);
set_frame(zr, 1); // /FRAME
btor(ZR36057_MCTCR_CFlush, ZR36057_MCTCR); // /CFlush
btwrite(0, ZR36057_JPC); // \P_Reset,\CodTrnsEn,\Active
btand(~ZR36057_JMC_VFIFO_FB, ZR36057_JMC);
btand(~ZR36057_JMC_SyncMstr, ZR36057_JMC);
jpeg_codec_reset(zr);
jpeg_codec_sleep(zr, 1);
zr36057_adjust_vfe(zr, mode);
decoder_command(zr, DECODER_ENABLE_OUTPUT, &one);
encoder_command(zr, ENCODER_SET_INPUT, &zero);
dprintk(2, KERN_INFO "%s: enable_jpg(IDLE)\n", ZR_DEVNAME(zr));
break;
}
}
/* when this is called the spinlock must be held */
void
zoran_feed_stat_com (struct zoran *zr)
{
/* move frames from pending queue to DMA */
int frame, i, max_stat_com;
max_stat_com =
(zr->jpg_settings.TmpDcm ==
1) ? BUZ_NUM_STAT_COM : (BUZ_NUM_STAT_COM >> 1);
while ((zr->jpg_dma_head - zr->jpg_dma_tail) < max_stat_com &&
zr->jpg_dma_head < zr->jpg_que_head) {
frame = zr->jpg_pend[zr->jpg_dma_head & BUZ_MASK_FRAME];
if (zr->jpg_settings.TmpDcm == 1) {
/* fill 1 stat_com entry */
i = (zr->jpg_dma_head -
zr->jpg_err_shift) & BUZ_MASK_STAT_COM;
if (!(zr->stat_com[i] & cpu_to_le32(1)))
break;
zr->stat_com[i] =
cpu_to_le32(zr->jpg_buffers.buffer[frame].frag_tab_bus);
} else {
/* fill 2 stat_com entries */
i = ((zr->jpg_dma_head -
zr->jpg_err_shift) & 1) * 2;
if (!(zr->stat_com[i] & cpu_to_le32(1)))
break;
zr->stat_com[i] =
cpu_to_le32(zr->jpg_buffers.buffer[frame].frag_tab_bus);
zr->stat_com[i + 1] =
cpu_to_le32(zr->jpg_buffers.buffer[frame].frag_tab_bus);
}
zr->jpg_buffers.buffer[frame].state = BUZ_STATE_DMA;
zr->jpg_dma_head++;
}
if (zr->codec_mode == BUZ_MODE_MOTION_DECOMPRESS)
zr->jpg_queued_num++;
}
/* when this is called the spinlock must be held */
static void
zoran_reap_stat_com (struct zoran *zr)
{
/* move frames from DMA queue to done queue */
int i;
u32 stat_com;
unsigned int seq;
unsigned int dif;
struct zoran_jpg_buffer *buffer;
int frame;
/* In motion decompress we don't have a hardware frame counter,
* we just count the interrupts here */
if (zr->codec_mode == BUZ_MODE_MOTION_DECOMPRESS) {
zr->jpg_seq_num++;
}
while (zr->jpg_dma_tail < zr->jpg_dma_head) {
if (zr->jpg_settings.TmpDcm == 1)
i = (zr->jpg_dma_tail -
zr->jpg_err_shift) & BUZ_MASK_STAT_COM;
else
i = ((zr->jpg_dma_tail -
zr->jpg_err_shift) & 1) * 2 + 1;
stat_com = le32_to_cpu(zr->stat_com[i]);
if ((stat_com & 1) == 0) {
return;
}
frame = zr->jpg_pend[zr->jpg_dma_tail & BUZ_MASK_FRAME];
buffer = &zr->jpg_buffers.buffer[frame];
do_gettimeofday(&buffer->bs.timestamp);
if (zr->codec_mode == BUZ_MODE_MOTION_COMPRESS) {
buffer->bs.length = (stat_com & 0x7fffff) >> 1;
/* update sequence number with the help of the counter in stat_com */
seq = ((stat_com >> 24) + zr->jpg_err_seq) & 0xff;
dif = (seq - zr->jpg_seq_num) & 0xff;
zr->jpg_seq_num += dif;
} else {
buffer->bs.length = 0;
}
buffer->bs.seq =
zr->jpg_settings.TmpDcm ==
2 ? (zr->jpg_seq_num >> 1) : zr->jpg_seq_num;
buffer->state = BUZ_STATE_DONE;
zr->jpg_dma_tail++;
}
}
static void
error_handler (struct zoran *zr,
u32 astat,
u32 stat)
{
/* This is JPEG error handling part */
if ((zr->codec_mode != BUZ_MODE_MOTION_COMPRESS) &&
(zr->codec_mode != BUZ_MODE_MOTION_DECOMPRESS)) {
//dprintk(1, KERN_ERR "%s: Internal error: error handling request in mode %d\n", ZR_DEVNAME(zr), zr->codec_mode);
return;
}
if ((stat & 1) == 0 &&
zr->codec_mode == BUZ_MODE_MOTION_COMPRESS &&
zr->jpg_dma_tail - zr->jpg_que_tail >=
zr->jpg_buffers.num_buffers) {
/* No free buffers... */
zoran_reap_stat_com(zr);
zoran_feed_stat_com(zr);
wake_up_interruptible(&zr->jpg_capq);
zr->JPEG_missed = 0;
return;
}
if (zr->JPEG_error != 1) {
/*
* First entry: error just happened during normal operation
*
* In BUZ_MODE_MOTION_COMPRESS:
*
* Possible glitch in TV signal. In this case we should
* stop the codec and wait for good quality signal before
* restarting it to avoid further problems
*
* In BUZ_MODE_MOTION_DECOMPRESS:
*
* Bad JPEG frame: we have to mark it as processed (codec crashed
* and was not able to do it itself), and to remove it from queue.
*/
btand(~ZR36057_JMC_Go_en, ZR36057_JMC);
udelay(1);
stat = stat | (post_office_read(zr, 7, 0) & 3) << 8;
btwrite(0, ZR36057_JPC);
btor(ZR36057_MCTCR_CFlush, ZR36057_MCTCR);
jpeg_codec_reset(zr);
jpeg_codec_sleep(zr, 1);
zr->JPEG_error = 1;
zr->num_errors++;
/* Report error */
if (*zr_debug > 1 && zr->num_errors <= 8) {
long frame;
frame =
zr->jpg_pend[zr->jpg_dma_tail & BUZ_MASK_FRAME];
printk(KERN_ERR
"%s: JPEG error stat=0x%08x(0x%08x) queue_state=%ld/%ld/%ld/%ld seq=%ld frame=%ld. Codec stopped. ",
ZR_DEVNAME(zr), stat, zr->last_isr,
zr->jpg_que_tail, zr->jpg_dma_tail,
zr->jpg_dma_head, zr->jpg_que_head,
zr->jpg_seq_num, frame);
printk("stat_com frames:");
{
int i, j;
for (j = 0; j < BUZ_NUM_STAT_COM; j++) {
for (i = 0;
i < zr->jpg_buffers.num_buffers;
i++) {
if (le32_to_cpu(zr->stat_com[j]) ==
zr->jpg_buffers.
buffer[i].
frag_tab_bus) {
printk("% d->%d",
j, i);
}
}
}
printk("\n");
}
}
/* Find an entry in stat_com and rotate contents */
{
int i;
if (zr->jpg_settings.TmpDcm == 1)
i = (zr->jpg_dma_tail -
zr->jpg_err_shift) & BUZ_MASK_STAT_COM;
else
i = ((zr->jpg_dma_tail -
zr->jpg_err_shift) & 1) * 2;
if (zr->codec_mode == BUZ_MODE_MOTION_DECOMPRESS) {
/* Mimic zr36067 operation */
zr->stat_com[i] |= cpu_to_le32(1);
if (zr->jpg_settings.TmpDcm != 1)
zr->stat_com[i + 1] |= cpu_to_le32(1);
/* Refill */
zoran_reap_stat_com(zr);
zoran_feed_stat_com(zr);
wake_up_interruptible(&zr->jpg_capq);
/* Find an entry in stat_com again after refill */
if (zr->jpg_settings.TmpDcm == 1)
i = (zr->jpg_dma_tail -
zr->jpg_err_shift) &
BUZ_MASK_STAT_COM;
else
i = ((zr->jpg_dma_tail -
zr->jpg_err_shift) & 1) * 2;
}
if (i) {
/* Rotate stat_comm entries to make current entry first */
int j;
u32 bus_addr[BUZ_NUM_STAT_COM];
/* Here we are copying the stat_com array, which
* is already in little endian format, so
* no endian conversions here
*/
memcpy(bus_addr, zr->stat_com,
sizeof(bus_addr));
for (j = 0; j < BUZ_NUM_STAT_COM; j++) {
zr->stat_com[j] =
bus_addr[(i + j) &
BUZ_MASK_STAT_COM];
}
zr->jpg_err_shift += i;
zr->jpg_err_shift &= BUZ_MASK_STAT_COM;
}
if (zr->codec_mode == BUZ_MODE_MOTION_COMPRESS)
zr->jpg_err_seq = zr->jpg_seq_num; /* + 1; */
}
}
/* Now the stat_comm buffer is ready for restart */
do {
int status, mode;
if (zr->codec_mode == BUZ_MODE_MOTION_COMPRESS) {
decoder_command(zr, DECODER_GET_STATUS, &status);
mode = CODEC_DO_COMPRESSION;
} else {
status = 0;
mode = CODEC_DO_EXPANSION;
}
if (zr->codec_mode == BUZ_MODE_MOTION_DECOMPRESS ||
(status & DECODER_STATUS_GOOD)) {
/********** RESTART code *************/
jpeg_codec_reset(zr);
zr->codec->set_mode(zr->codec, mode);
zr36057_set_jpg(zr, zr->codec_mode);
jpeg_start(zr);
if (zr->num_errors <= 8)
dprintk(2, KERN_INFO "%s: Restart\n",
ZR_DEVNAME(zr));
zr->JPEG_missed = 0;
zr->JPEG_error = 2;
/********** End RESTART code ***********/
}
} while (0);
}
irqreturn_t
zoran_irq (int irq,
void *dev_id)
{
u32 stat, astat;
int count;
struct zoran *zr;
unsigned long flags;
zr = dev_id;
count = 0;
if (zr->testing) {
/* Testing interrupts */
spin_lock_irqsave(&zr->spinlock, flags);
while ((stat = count_reset_interrupt(zr))) {
if (count++ > 100) {
btand(~ZR36057_ICR_IntPinEn, ZR36057_ICR);
dprintk(1,
KERN_ERR
"%s: IRQ lockup while testing, isr=0x%08x, cleared int mask\n",
ZR_DEVNAME(zr), stat);
wake_up_interruptible(&zr->test_q);
}
}
zr->last_isr = stat;
spin_unlock_irqrestore(&zr->spinlock, flags);
return IRQ_HANDLED;
}
spin_lock_irqsave(&zr->spinlock, flags);
while (1) {
/* get/clear interrupt status bits */
stat = count_reset_interrupt(zr);
astat = stat & IRQ_MASK;
if (!astat) {
break;
}
dprintk(4,
KERN_DEBUG
"zoran_irq: astat: 0x%08x, mask: 0x%08x\n",
astat, btread(ZR36057_ICR));
if (astat & zr->card.vsync_int) { // SW
if (zr->codec_mode == BUZ_MODE_MOTION_DECOMPRESS ||
zr->codec_mode == BUZ_MODE_MOTION_COMPRESS) {
/* count missed interrupts */
zr->JPEG_missed++;
}
//post_office_read(zr,1,0);
/* Interrupts may still happen when
* zr->v4l_memgrab_active is switched off.
* We simply ignore them */
if (zr->v4l_memgrab_active) {
/* A lot more checks should be here ... */
if ((btread(ZR36057_VSSFGR) &
ZR36057_VSSFGR_SnapShot) == 0)
dprintk(1,
KERN_WARNING
"%s: BuzIRQ with SnapShot off ???\n",
ZR_DEVNAME(zr));
if (zr->v4l_grab_frame != NO_GRAB_ACTIVE) {
/* There is a grab on a frame going on, check if it has finished */
if ((btread(ZR36057_VSSFGR) &
ZR36057_VSSFGR_FrameGrab) ==
0) {
/* it is finished, notify the user */
zr->v4l_buffers.buffer[zr->v4l_grab_frame].state = BUZ_STATE_DONE;
zr->v4l_buffers.buffer[zr->v4l_grab_frame].bs.seq = zr->v4l_grab_seq;
do_gettimeofday(&zr->v4l_buffers.buffer[zr->v4l_grab_frame].bs.timestamp);
zr->v4l_grab_frame = NO_GRAB_ACTIVE;
zr->v4l_pend_tail++;
}
}
if (zr->v4l_grab_frame == NO_GRAB_ACTIVE)
wake_up_interruptible(&zr->v4l_capq);
/* Check if there is another grab queued */
if (zr->v4l_grab_frame == NO_GRAB_ACTIVE &&
zr->v4l_pend_tail != zr->v4l_pend_head) {
int frame = zr->v4l_pend[zr->v4l_pend_tail &
V4L_MASK_FRAME];
u32 reg;
zr->v4l_grab_frame = frame;
/* Set zr36057 video front end and enable video */
/* Buffer address */
reg =
zr->v4l_buffers.buffer[frame].
fbuffer_bus;
btwrite(reg, ZR36057_VDTR);
if (zr->v4l_settings.height >
BUZ_MAX_HEIGHT / 2)
reg +=
zr->v4l_settings.
bytesperline;
btwrite(reg, ZR36057_VDBR);
/* video stride, status, and frame grab register */
reg = 0;
if (zr->v4l_settings.height >
BUZ_MAX_HEIGHT / 2)
reg +=
zr->v4l_settings.
bytesperline;
reg =
(reg <<
ZR36057_VSSFGR_DispStride);
reg |= ZR36057_VSSFGR_VidOvf;
reg |= ZR36057_VSSFGR_SnapShot;
reg |= ZR36057_VSSFGR_FrameGrab;
btwrite(reg, ZR36057_VSSFGR);
btor(ZR36057_VDCR_VidEn,
ZR36057_VDCR);
}
}
/* even if we don't grab, we do want to increment
* the sequence counter to see lost frames */
zr->v4l_grab_seq++;
}
#if (IRQ_MASK & ZR36057_ISR_CodRepIRQ)
if (astat & ZR36057_ISR_CodRepIRQ) {
zr->intr_counter_CodRepIRQ++;
IDEBUG(printk
(KERN_DEBUG "%s: ZR36057_ISR_CodRepIRQ\n",
ZR_DEVNAME(zr)));
btand(~ZR36057_ICR_CodRepIRQ, ZR36057_ICR);
}
#endif /* (IRQ_MASK & ZR36057_ISR_CodRepIRQ) */
#if (IRQ_MASK & ZR36057_ISR_JPEGRepIRQ)
if (astat & ZR36057_ISR_JPEGRepIRQ) {
if (zr->codec_mode == BUZ_MODE_MOTION_DECOMPRESS ||
zr->codec_mode == BUZ_MODE_MOTION_COMPRESS) {
if (*zr_debug > 1 &&
(!zr->frame_num || zr->JPEG_error)) {
printk(KERN_INFO
"%s: first frame ready: state=0x%08x odd_even=%d field_per_buff=%d delay=%d\n",
ZR_DEVNAME(zr), stat,
zr->jpg_settings.odd_even,
zr->jpg_settings.
field_per_buff,
zr->JPEG_missed);
{
char sc[] = "0000";
char sv[5];
int i;
strcpy(sv, sc);
for (i = 0; i < 4; i++) {
if (le32_to_cpu(zr->stat_com[i]) & 1)
sv[i] = '1';
}
sv[4] = 0;
printk(KERN_INFO
"%s: stat_com=%s queue_state=%ld/%ld/%ld/%ld\n",
ZR_DEVNAME(zr), sv,
zr->jpg_que_tail,
zr->jpg_dma_tail,
zr->jpg_dma_head,
zr->jpg_que_head);
}
} else {
if (zr->JPEG_missed > zr->JPEG_max_missed) // Get statistics
zr->JPEG_max_missed =
zr->JPEG_missed;
if (zr->JPEG_missed <
zr->JPEG_min_missed)
zr->JPEG_min_missed =
zr->JPEG_missed;
}
if (*zr_debug > 2 && zr->frame_num < 6) {
int i;
printk("%s: seq=%ld stat_com:",
ZR_DEVNAME(zr), zr->jpg_seq_num);
for (i = 0; i < 4; i++) {
printk(" %08x",
le32_to_cpu(zr->stat_com[i]));
}
printk("\n");
}
zr->frame_num++;
zr->JPEG_missed = 0;
zr->JPEG_error = 0;
zoran_reap_stat_com(zr);
zoran_feed_stat_com(zr);
wake_up_interruptible(&zr->jpg_capq);
} /*else {
dprintk(1,
KERN_ERR
"%s: JPEG interrupt while not in motion (de)compress mode!\n",
ZR_DEVNAME(zr));
}*/
}
#endif /* (IRQ_MASK & ZR36057_ISR_JPEGRepIRQ) */
/* DATERR, too many fields missed, error processing */
if ((astat & zr->card.jpeg_int) ||
zr->JPEG_missed > 25 ||
zr->JPEG_error == 1 ||
((zr->codec_mode == BUZ_MODE_MOTION_DECOMPRESS) &&
(zr->frame_num & (zr->JPEG_missed >
zr->jpg_settings.field_per_buff)))) {
error_handler(zr, astat, stat);
}
count++;
if (count > 10) {
dprintk(2, KERN_WARNING "%s: irq loop %d\n",
ZR_DEVNAME(zr), count);
if (count > 20) {
btand(~ZR36057_ICR_IntPinEn, ZR36057_ICR);
dprintk(2,
KERN_ERR
"%s: IRQ lockup, cleared int mask\n",
ZR_DEVNAME(zr));
break;
}
}
zr->last_isr = stat;
}
spin_unlock_irqrestore(&zr->spinlock, flags);
return IRQ_HANDLED;
}
void
zoran_set_pci_master (struct zoran *zr,
int set_master)
{
if (set_master) {
pci_set_master(zr->pci_dev);
} else {
u16 command;
pci_read_config_word(zr->pci_dev, PCI_COMMAND, &command);
command &= ~PCI_COMMAND_MASTER;
pci_write_config_word(zr->pci_dev, PCI_COMMAND, command);
}
}
void
zoran_init_hardware (struct zoran *zr)
{
int j, zero = 0;
/* Enable bus-mastering */
zoran_set_pci_master(zr, 1);
/* Initialize the board */
if (zr->card.init) {
zr->card.init(zr);
}
j = zr->card.input[zr->input].muxsel;
decoder_command(zr, 0, NULL);
decoder_command(zr, DECODER_SET_NORM, &zr->norm);
decoder_command(zr, DECODER_SET_INPUT, &j);
encoder_command(zr, 0, NULL);
encoder_command(zr, ENCODER_SET_NORM, &zr->norm);
encoder_command(zr, ENCODER_SET_INPUT, &zero);
/* toggle JPEG codec sleep to sync PLL */
jpeg_codec_sleep(zr, 1);
jpeg_codec_sleep(zr, 0);
/* set individual interrupt enables (without GIRQ1)
* but don't global enable until zoran_open() */
//btwrite(IRQ_MASK & ~ZR36057_ISR_GIRQ1, ZR36057_ICR); // SW
// It looks like using only JPEGRepIRQEn is not always reliable,
// may be when JPEG codec crashes it won't generate IRQ? So,
/*CP*/ // btwrite(IRQ_MASK, ZR36057_ICR); // Enable Vsync interrupts too. SM WHY ? LP
zr36057_init_vfe(zr);
zr36057_enable_jpg(zr, BUZ_MODE_IDLE);
btwrite(IRQ_MASK, ZR36057_ISR); // Clears interrupts
}
void
zr36057_restart (struct zoran *zr)
{
btwrite(0, ZR36057_SPGPPCR);
mdelay(1);
btor(ZR36057_SPGPPCR_SoftReset, ZR36057_SPGPPCR);
mdelay(1);
/* assert P_Reset */
btwrite(0, ZR36057_JPC);
/* set up GPIO direction - all output */
btwrite(ZR36057_SPGPPCR_SoftReset | 0, ZR36057_SPGPPCR);
/* set up GPIO pins and guest bus timing */
btwrite((0x81 << 24) | 0x8888, ZR36057_GPPGCR1);
}
/*
* initialize video front end
*/
static void
zr36057_init_vfe (struct zoran *zr)
{
u32 reg;
reg = btread(ZR36057_VFESPFR);
reg |= ZR36057_VFESPFR_LittleEndian;
reg &= ~ZR36057_VFESPFR_VCLKPol;
reg |= ZR36057_VFESPFR_ExtFl;
reg |= ZR36057_VFESPFR_TopField;
btwrite(reg, ZR36057_VFESPFR);
reg = btread(ZR36057_VDCR);
if (pci_pci_problems & PCIPCI_TRITON)
// || zr->revision < 1) // Revision 1 has also Triton support
reg &= ~ZR36057_VDCR_Triton;
else
reg |= ZR36057_VDCR_Triton;
btwrite(reg, ZR36057_VDCR);
}
/*
* Interface to decoder and encoder chips using i2c bus
*/
int
decoder_command (struct zoran *zr,
int cmd,
void *data)
{
if (zr->decoder == NULL)
return -EIO;
if (zr->card.type == LML33 &&
(cmd == DECODER_SET_NORM || DECODER_SET_INPUT)) {
int res;
// Bt819 needs to reset its FIFO buffer using #FRST pin and
// LML33 card uses GPIO(7) for that.
GPIO(zr, 7, 0);
res = zr->decoder->driver->command(zr->decoder, cmd, data);
// Pull #FRST high.
GPIO(zr, 7, 1);
return res;
} else
return zr->decoder->driver->command(zr->decoder, cmd,
data);
}
int
encoder_command (struct zoran *zr,
int cmd,
void *data)
{
if (zr->encoder == NULL)
return -1;
return zr->encoder->driver->command(zr->encoder, cmd, data);
}
| {
"pile_set_name": "Github"
} |
/*
* Catroid: An on-device visual programming system for Android devices
* Copyright (C) 2010-2018 The Catrobat Team
* (<http://developer.catrobat.org/credits>)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* An additional term exception under section 7 of the GNU Affero
* General Public License, version 3, is available at
* http://developer.catrobat.org/license_additional_term
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.catrobat.catroid.test.embroidery;
import android.content.Intent;
import android.net.Uri;
import org.catrobat.catroid.common.Constants;
import org.catrobat.catroid.stage.StageActivity;
import org.catrobat.catroid.ui.ExportEmbroideryFileLauncher;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import java.io.File;
import androidx.core.content.FileProvider;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import static junit.framework.Assert.assertEquals;
@RunWith(AndroidJUnit4.class)
public class EmbroideryFileExporterTest {
private StageActivity stageActivity;
private String filename = EmbroideryFileExporterTest.class.getName() + ".dst";
@Before
public void setUp() {
stageActivity = Mockito.mock(StageActivity.class);
Mockito.when(stageActivity.getPackageManager()).thenReturn(ApplicationProvider.getApplicationContext().getPackageManager());
Mockito.when(stageActivity.getPackageName()).thenReturn(ApplicationProvider.getApplicationContext().getPackageName());
}
@Test
public void testShareSimpleFile() {
File dstFile = new File(Constants.CACHE_DIR, filename);
Uri uriForFile = FileProvider.getUriForFile(stageActivity, stageActivity.getPackageName() + ".fileProvider", dstFile);
new ExportEmbroideryFileLauncher(stageActivity, dstFile).startActivity();
ArgumentCaptor<Intent> captor = ArgumentCaptor.forClass(Intent.class);
Mockito.verify(stageActivity, Mockito.times(1)).startActivity(captor.capture());
Intent actualChooserIntent = captor.getValue();
Intent actualShareIntent = actualChooserIntent.getParcelableExtra(Intent.EXTRA_INTENT);
Intent expectedShareIntent = new Intent(Intent.ACTION_SEND, uriForFile);
expectedShareIntent.setType("application/octet-stream");
expectedShareIntent.putExtra(Intent.EXTRA_STREAM, uriForFile);
expectedShareIntent.putExtra(Intent.EXTRA_SUBJECT, dstFile.getName());
assertEquals(expectedShareIntent.toUri(0), actualShareIntent.toUri(0));
Intent expectedChooserIntent = new Intent(Intent.ACTION_CHOOSER);
expectedChooserIntent.putExtra(Intent.EXTRA_INTENT, expectedShareIntent);
expectedChooserIntent.putExtra(Intent.EXTRA_TITLE, "Share embroidery file");
assertEquals(expectedChooserIntent.toUri(0), actualChooserIntent.toUri(0));
}
}
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Jun 9 2015 22:53:21).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2014 by Steve Nygard.
//
#import "TViewController.h"
@class TLayoutBinderTestBox;
@interface TLayoutBinderTestSubview_Test1_Controller : TViewController
{
TLayoutBinderTestBox *_slaveBox;
TLayoutBinderTestBox *_subview_A_MasterBox;
struct CGRect _initialSlaveFrame;
struct CGRect _initialSubview_A_MasterFrame;
}
@property(readonly, retain) TLayoutBinderTestBox *subview_A_MasterBox; // @synthesize subview_A_MasterBox=_subview_A_MasterBox;
@property(readonly, retain) TLayoutBinderTestBox *slaveBox; // @synthesize slaveBox=_slaveBox;
- (void)reset;
- (void)setIsFlipped:(_Bool)arg1;
- (void)viewLoaded;
@end
| {
"pile_set_name": "Github"
} |
package com.netflix.conductor.tests.utils;
public class TestEnvironment {
private TestEnvironment() {}
private static void setupSystemProperties() {
System.setProperty("EC2_REGION", "us-east-1");
System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c");
System.setProperty("workflow.elasticsearch.index.name", "conductor");
System.setProperty("workflow.namespace.prefix", "integration-test");
System.setProperty("db", "memory");
}
public static void setup() {
setupSystemProperties();
}
public static void teardown() {
System.setProperties(null);
}
}
| {
"pile_set_name": "Github"
} |
// Boost.Assign library
//
// Copyright Thorsten Ottosen 2003-2004. Use, modification and
// distribution is subject to the Boost Software License, Version
// 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// For more information, see http://www.boost.org/libs/assign/
//
#ifndef BOOST_ASSIGN_STD_STACK_HPP
#define BOOST_ASSIGN_STD_STACK_HPP
#if defined(_MSC_VER)
# pragma once
#endif
#include <boost/assign/list_inserter.hpp>
#include <boost/config.hpp>
#include <boost/move/utility.hpp>
#include <stack>
namespace boost
{
namespace assign
{
#if defined(BOOST_NO_CXX11_RVALUE_REFERENCES)
template< class V, class C, class V2 >
inline list_inserter< assign_detail::call_push< std::stack<V,C> >, V >
operator+=( std::stack<V,C>& c, V2 v )
{
return push( c )( v );
}
#else
template< class V, class C, class V2 >
inline list_inserter< assign_detail::call_push< std::stack<V, C> >, V >
operator+=(std::stack<V, C>& c, V2&& v)
{
return push(c)(boost::forward<V2>(v));
}
#endif
}
}
#endif
| {
"pile_set_name": "Github"
} |
/// @ref ext_scalar_packing
/// @file glm/ext/scalar_packing.hpp
///
/// @see core (dependence)
///
/// @defgroup ext_scalar_packing GLM_EXT_scalar_packing
/// @ingroup ext
///
/// Include <glm/ext/scalar_packing.hpp> to use the features of this extension.
///
/// This extension provides a set of function to convert scalar values to packed
/// formats.
#pragma once
// Dependency:
#include "../detail/qualifier.hpp"
#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED)
# pragma message("GLM: GLM_EXT_scalar_packing extension included")
#endif
namespace glm
{
/// @addtogroup ext_scalar_packing
/// @{
/// @}
}// namespace glm
#include "scalar_packing.inl"
| {
"pile_set_name": "Github"
} |
/* -*- mode: C++; c-basic-offset: 4; indent-tabs-mode: nil -*- */
// vim: ft=cpp:expandtab:ts=8:sw=4:softtabstop=4:
#ident "$Id$"
/*======
This file is part of PerconaFT.
Copyright (c) 2006, 2015, Percona and/or its affiliates. All rights reserved.
PerconaFT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2,
as published by the Free Software Foundation.
PerconaFT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PerconaFT. If not, see <http://www.gnu.org/licenses/>.
----------------------------------------
PerconaFT is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License, version 3,
as published by the Free Software Foundation.
PerconaFT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with PerconaFT. If not, see <http://www.gnu.org/licenses/>.
======= */
#ident "Copyright (c) 2006, 2015, Percona and/or its affiliates. All rights reserved."
#include <toku_portability.h>
#include <memory.h>
#include <toku_portability.h>
#include <db.h>
#include <errno.h>
#include <sys/stat.h>
#include "test.h"
// TOKU_TEST_FILENAME is defined in the Makefile
#define FNAME "foo.tokudb"
const char *name = NULL;
#define NUM 3
#define MAX_LENGTH (1<<16)
int order[NUM+1];
uint32_t length[NUM];
uint8_t data[NUM][MAX_LENGTH];
DBT descriptors[NUM];
DB_ENV *env;
enum {NUM_DBS=2};
DB *dbs[NUM_DBS];
DB_TXN *txn = NULL;
DB_TXN *null_txn;
int last_open_descriptor = -1;
int abort_type;
int get_table_lock;
uint64_t num_called = 0;
static void
verify_db_matches(void) {
DB *db;
int which;
for (which = 0; which < NUM_DBS; which++) {
db = dbs[which];
if (db) {
const DBT * dbt = &db->descriptor->dbt;
if (last_open_descriptor<0) {
assert(dbt->size == 0 && dbt->data == NULL);
}
else {
assert(last_open_descriptor < NUM);
assert(dbt->size == descriptors[last_open_descriptor].size);
assert(!memcmp(dbt->data, descriptors[last_open_descriptor].data, dbt->size));
assert(dbt->data != descriptors[last_open_descriptor].data);
}
}
}
}
static int
verify_int_cmp (DB *dbp, const DBT *a, const DBT *b) {
num_called++;
verify_db_matches();
int r = int_dbt_cmp(dbp, a, b);
return r;
}
static void
open_db(int descriptor, int which) {
/* create the dup database file */
assert(dbs[which]==NULL);
DB *db;
int r = db_create(&db, env, 0);
CKERR(r);
dbs[which] = db;
assert(abort_type >=0 && abort_type <= 2);
if (abort_type==2 && !txn) {
r = env->txn_begin(env, null_txn, &txn, 0);
CKERR(r);
last_open_descriptor = -1; //DB was destroyed at end of last close, did not hang around.
}
r = db->open(db, txn, FNAME, name, DB_BTREE, DB_CREATE, 0666);
CKERR(r);
if (descriptor >= 0) {
assert(descriptor < NUM);
if (txn) {
{ int chk_r = db->change_descriptor(db, txn, &descriptors[descriptor], 0); CKERR(chk_r); }
}
else {
IN_TXN_COMMIT(env, NULL, txn_desc, 0, {
{ int chk_r = db->change_descriptor(db, txn_desc, &descriptors[descriptor], 0); CKERR(chk_r); }
});
}
last_open_descriptor = descriptor;
}
verify_db_matches();
if (abort_type!=2 && !txn) {
r = env->txn_begin(env, null_txn, &txn, 0);
CKERR(r);
}
assert(txn);
if (get_table_lock) {
r = db->pre_acquire_table_lock(db, txn);
CKERR(r);
}
}
static void
delete_db(void) {
int which;
for (which = 0; which < NUM_DBS; which++) {
assert(dbs[which] == NULL);
}
int r = env->dbremove(env, NULL, FNAME, name, 0);
if (abort_type==2) {
CKERR2(r, ENOENT); //Abort deleted it
}
else CKERR(r);
last_open_descriptor = -1;
}
static void
close_db(int which) {
assert(dbs[which]!=NULL);
DB *db = dbs[which];
dbs[which] = NULL;
int r;
if (which==1) {
r = db->close(db, 0);
CKERR(r);
return;
}
if (abort_type>0) {
if (abort_type==2 && dbs[1]) {
close_db(1);
}
r = db->close(db, 0);
CKERR(r);
r = txn->abort(txn);
CKERR(r);
}
else {
r = txn->commit(txn, 0);
CKERR(r);
r = db->close(db, 0);
CKERR(r);
}
txn = NULL;
}
static void
setup_data(void) {
int r = db_env_create(&env, 0); CKERR(r);
r = env->set_default_bt_compare(env, verify_int_cmp); CKERR(r);
const int envflags = DB_CREATE|DB_INIT_MPOOL|DB_INIT_TXN|DB_INIT_LOCK |DB_THREAD |DB_PRIVATE;
r = env->open(env, TOKU_TEST_FILENAME, envflags, S_IRWXU+S_IRWXG+S_IRWXO); CKERR(r);
int i;
for (i=0; i < NUM; i++) {
length[i] = i * MAX_LENGTH / (NUM-1);
uint32_t j;
for (j = 0; j < length[i]; j++) {
data[i][j] = (uint8_t)(random() & 0xFF);
}
memset(&descriptors[i], 0, sizeof(descriptors[i]));
descriptors[i].size = length[i];
descriptors[i].data = &data[i][0];
}
last_open_descriptor = -1;
txn = NULL;
}
static void
permute_order(void) {
int i;
for (i=0; i < NUM; i++) {
order[i] = i;
}
for (i=0; i < NUM; i++) {
int which = (random() % (NUM-i)) + i;
int temp = order[i];
order[i] = order[which];
order[which] = temp;
}
}
static void
test_insert (int n, int which) {
if (which == -1) {
for (which = 0; which < NUM_DBS; which++) {
if (dbs[which]) {
test_insert(n, which);
}
}
return;
}
assert(dbs[which]!=NULL);
DB *db = dbs[which];
int i;
static int last = 0;
for (i=0; i<n; i++) {
int k = last++;
DBT key, val;
uint64_t called = num_called;
int r = db->put(db, txn, dbt_init(&key, &k, sizeof k), dbt_init(&val, &i, sizeof i), 0);
if (i>0) assert(num_called > called);
CKERR(r);
}
}
static void
runtest(void) {
int r;
toku_os_recursive_delete(TOKU_TEST_FILENAME);
r=toku_os_mkdir(TOKU_TEST_FILENAME, S_IRWXU+S_IRWXG+S_IRWXO); assert(r==0);
setup_data();
permute_order();
int i;
/* Subsumed by rest of test.
for (i=0; i < NUM; i++) {
open_db(-1, 0);
test_insert(i, 0);
close_db(0);
open_db(-1, 0);
test_insert(i, 0);
close_db(0);
delete_db();
}
for (i=0; i < NUM; i++) {
open_db(order[i], 0);
test_insert(i, 0);
close_db(0);
open_db(-1, 0);
test_insert(i, 0);
close_db(0);
open_db(order[i], 0);
test_insert(i, 0);
close_db(0);
delete_db();
}
*/
//Upgrade descriptors along the way. Need version to increase, so do not use 'order[i]'
for (i=0; i < NUM; i++) {
open_db(i, 0);
test_insert(i, 0);
close_db(0);
open_db(-1, 0);
test_insert(i, 0);
close_db(0);
open_db(i, 0);
test_insert(i, 0);
close_db(0);
}
delete_db();
//Upgrade descriptors along the way. With two handles
open_db(-1, 1);
for (i=0; i < NUM; i++) {
open_db(i, 0);
test_insert(i, -1);
close_db(0);
open_db(-1, 0);
test_insert(i, -1);
close_db(0);
open_db(i, 0);
test_insert(i, -1);
close_db(0);
}
if (dbs[1]) {
close_db(1);
}
delete_db();
env->close(env, 0);
}
int
test_main(int argc, char *const argv[]) {
parse_args(argc, argv);
for (abort_type = 0; abort_type < 3; abort_type++) {
for (get_table_lock = 0; get_table_lock < 2; get_table_lock++) {
name = NULL;
runtest();
name = "bar";
runtest();
}
}
return 0;
}
| {
"pile_set_name": "Github"
} |
"use strict";
module.exports = function (t, a) {
a(typeof t, "number");
};
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.samples.cronet_sample.data;
public class ImageRepository {
private static String[] imageUrls= {
"https://storage.googleapis.com/cronet/sun.jpg",
"https://storage.googleapis.com/cronet/flower.jpg",
"https://storage.googleapis.com/cronet/chair.jpg",
"https://storage.googleapis.com/cronet/white.jpg",
"https://storage.googleapis.com/cronet/moka.jpg",
"https://storage.googleapis.com/cronet/walnut.jpg"
};
public static int numberOfImages() {
return imageUrls.length;
}
public static String getImage(int position) {
return imageUrls[position];
}
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.