content
stringlengths
10
4.9M
<reponame>iti-luebeck/HANSE2011 #include "form_uid.h" #include "ui_form_uid.h" FormUID::FormUID(Module_UID* module, QWidget *parent) : QWidget(parent), ui(new Ui::Form_UID) { ui->setupUi(this); this->module = module; ui->uidId->setText(module->getSettingsValue("uidId").toString()); ui->timeout->setText(module->getSettingsValue("timeout").toString()); } FormUID::~FormUID() { delete ui; } void FormUID::changeEvent(QEvent *e) { QWidget::changeEvent(e); switch (e->type()) { case QEvent::LanguageChange: ui->retranslateUi(this); break; default: break; } } void FormUID::on_save_clicked() { module->setSettingsValue("uidId", ui->uidId->text()); module->setSettingsValue("timeout", ui->timeout->text()); } void FormUID::on_scan_clicked() { QVector<unsigned char> slaves = module->I2C_Scan(); ui->slaves->clear(); foreach (unsigned int slave, slaves) { ui->slaves->addItem("0x"+QString::number(slave,16)); } }
with open('src/day11/input.txt') as f: input = [[int(i) for i in list(l.strip())] for l in f.readlines()] class Octopus: def __init__(self, level) -> None: self.level = level self.neighbors = [] self.flashed = False self.flash_count = 0 def add_neighbor(self, neighbor) -> None: self.neighbors.append(neighbor) def step(self) -> None: self.level += 1 def increment(self) -> None: self.level += 1 def flash(self) -> None: if (self.level >= 10) and self.flashed is False: self.flashed = True self.flash_count += 1 for n in self.neighbors: n.increment() n.flash() def reset(self) -> None: if self.level >= 10: self.level = 0 self.flashed = False def get_adjacents(row, col, max_rows, max_cols): adjacents = [] up_row = row - 1 if up_row >= 0: adjacents.append((up_row, col)) down_row = row + 1 if down_row < max_rows: adjacents.append((down_row, col)) left_col = col - 1 if left_col >= 0: adjacents.append((row, left_col)) right_col = col + 1 if right_col < max_cols: adjacents.append((row, right_col)) if up_row >= 0 and left_col >= 0: adjacents.append((up_row, left_col)) if up_row >= 0 and right_col < max_cols: adjacents.append((up_row, right_col)) if down_row < max_rows and left_col >= 0: adjacents.append((down_row, left_col)) if down_row < max_rows and right_col < max_cols: adjacents.append((down_row, right_col)) return adjacents def create_grid(input): max_rows = len(input) max_cols = len(input[0]) octopi = [] for i in range(max_rows): cols = [] for j in range(max_cols): cols.append(Octopus(input[i][j])) octopi.append(cols) # Add Neighbors for row in range(max_rows): for col in range(max_cols): current_oct = octopi[row][col] adjacent_idxs = get_adjacents(row, col, max_rows, max_cols) for adj in adjacent_idxs: current_oct.add_neighbor(octopi[adj[0]][adj[1]]) return octopi def count_flashes(octopi, n_steps): max_rows = len(octopi) max_cols = len(octopi[0]) for _ in range(n_steps): for i in range(max_rows): for j in range(max_cols): octopi[i][j].step() for i in range(max_rows): for j in range(max_cols): octopi[i][j].flash() for i in range(max_rows): for j in range(max_cols): octopi[i][j].reset() return sum([oct.flash_count for row in octopi for oct in row]) def get_first_simultaneous(octopi): max_rows = len(octopi) max_cols = len(octopi[0]) total_octopi = max_rows*max_cols step_count = 0 while True: for i in range(max_rows): for j in range(max_cols): octopi[i][j].step() for i in range(max_rows): for j in range(max_cols): octopi[i][j].flash() step_count += 1 simultaneous_cnt = sum([oct.flashed for row in octopi for oct in row]) if total_octopi == simultaneous_cnt: return step_count for i in range(max_rows): for j in range(max_cols): octopi[i][j].reset() def solve_p1(input): octopi = create_grid(input) total_flashes = count_flashes(octopi, n_steps=100) return total_flashes def solve_p2(input): octopi = create_grid(input) first_sim = get_first_simultaneous(octopi) return first_sim print("Part 1:", solve_p1(input)) print("Part 2:", solve_p2(input))
def grid_search(dataset, alpha_values, rho_values, beta): train = cdnet.read_sequence('week3', dataset, 'train', colorspace='gray', annotated=False) test, gt = cdnet.read_sequence('week3', dataset, 'test', colorspace='gray', annotated=True) model = bg_subtraction.create_model(train) f_results = np.zeros((len(rho_values), len(alpha_values)), dtype='float32') for i, rho in enumerate(rho_values): for j, alpha in enumerate(alpha_values): pred = bg_subtraction.predict(test, model, alpha, rho=rho) summary = metrics.eval_from_mask(pred, gt[:,0], gt[:,1]) f_results[i, j] = metrics.f_score(summary, beta=beta) print('- alpha {:0.2f}, rho {:0.2f}: {:0.4f}'.format( alpha, rho, f_results[i, j])) return f_results
Former FBI Director James Comey's testimony on Thursday backed up some of the anonymous-sourced news reports about the FBI, but Comey took exception to one specific New York Times story from February. "In the main, it was not true," Comey told the Senate Intelligence Commitee, disputing a February 14 story titled "Trump Campaign Aides Had Repeated Contacts With Russian Intelligence." Comey's comment was part of a broader media critique. But The Times shot back a few hours after his testimony, saying it has found "no evidence that any prior reporting was inaccurate." Comey never specified what portions of the story were supposedly wrong. In a statement, The Times said, "Neither the F.B.I., nor Mr. Comey would comment or elaborate on what Mr. Comey believes to be incorrect. Should they provide more information, we would review that as well." At issue is the reliability of anonymous sources and the judgment of news organizations who report information from these sources. Media critics, particularly pro-Trump voices on the right, have been skeptical and sometimes downright hostile toward news outlets that have relied on anonymous sources for information about ongoing probes into Russian interference in the 2016 election. The news organizations say they have to protect their sources in some situations. But journalists have occasionally been led astray by sources, resulting in corrections or clarifications to stories. The Times article on February 14 had new details about Trump campaign officials having "repeated contacts with senior Russian intelligence officials in the year before the election." It was attributed to "four current and former American officials, all of whom spoke on the condition of anonymity because the information was classified." The story evidently set off alarm bells in Washington. Sen. James Risch, Republican of Idaho, said at Thursday's hearing that Comey talked with some lawmakers shortly after the story came out and told them the story was off-base. Risch said: "You sought out both Republican and Democrat senators to tell them that, hey, I don't know where this is coming from, but this is not the -- this is not factual. Do you recall that?" "Yes," Comey confirmed. "In the main, it was not true," Comey added. "And, again, all of you know this, maybe the American people don't. The challenge -- and I'm not picking on reporters about writing stories about classified information -- is that people talking about it often don't really know what's going on. And those of us who actually know what's going on are not talking about it. And we don't call the press to say, 'Hey, you got that thing wrong about this sensitive topic.' We just have to leave it there." Before the hearing even ended, some conservative media commentators were using Comey's comments to disparage The Times story as "fake news." Related: Pro-Trump media claims vindication in Comey hearing Later in the day, the Republican National Committee circulated a message to reporters titled "The New York Times Has Some Explaining To Do." The Times published a detailed followup around 5 p.m. on Thursday. It noted that "multiple news outlets have since published accounts that support the main elements of The Times's article, including information about phone calls and in-person meetings between Mr. Trump's advisers and Russians, some believed to be connected to Russian intelligence." Part of the dispute might be about the definition of Russian "intelligence officers." CNN published a similar report about communication during the campaign between advisers close to Trump and Russians known to U.S. intelligence. CNN stands by the story.
// Created from Raspberry Pi Pico example: // https://github.com/raspberrypi/pico-examples/tree/master/pio/ws2812 /** * Copyright (c) 2020 Raspberry Pi (Trading) Ltd. * * SPDX-License-Identifier: BSD-3-Clause */ // Modified by <NAME> 2022 #include <stdio.h> #include <stdlib.h> #include "ws2812.h" static inline void put_pixel_ring(uint32_t pixel_grb) { pio_sm_put_blocking(pio0, RING_SM, pixel_grb << 8u); } static inline void put_pixel_matrix(uint32_t pixel_grb) { pio_sm_put_blocking(pio0, MATRIX_SM, pixel_grb << 8u); } static inline uint32_t urgb_u32(uint8_t r, uint8_t g, uint8_t b) { return ((uint32_t) (r) << 8) | ((uint32_t) (g) << 16) | (uint32_t) (b); } void solidRingColor(Color *color, uint8_t numLEDs) { uint32_t red = (color->red * color->brt) >> 8; uint32_t grn = (color->grn * color->brt) >> 8; uint32_t blu = (color->blu * color->brt) >> 8; for (uint8_t i = 0; i < numLEDs; i++) { put_pixel_ring(urgb_u32((uint8_t) red, (uint8_t) grn, (uint8_t) blu)); } } void solidMatrixColor(Color *color, uint8_t numLEDs) { uint32_t red = (color->red * color->brt) >> 8; uint32_t grn = (color->grn * color->brt) >> 8; uint32_t blu = (color->blu * color->brt) >> 8; for (uint8_t i = 0; i < numLEDs; i++) { put_pixel_matrix(urgb_u32((uint8_t) red, (uint8_t) grn, (uint8_t) blu)); } } void matrixMono(Color *color, uint8_t *size) { uint32_t red = (color->red * color->brt) >> 8; uint32_t grn = (color->grn * color->brt) >> 8; uint32_t blu = (color->blu * color->brt) >> 8; uint8_t rows = size[0]; uint8_t cols = size[1]; uint8_t firstMonoPixel = (rows >> 1)*cols; uint8_t monoCols = cols >> 2; uint8_t monoRows = rows = (rows >> 1); for (uint8_t i = 0; i < firstMonoPixel; i++) { put_pixel_matrix(urgb_u32((uint8_t) red, (uint8_t) grn, (uint8_t) blu)); } for (uint8_t i = 0; i < monoRows; i++) { for (uint8_t j = 0; j < 4; j++) { for (uint8_t k = 0; k < monoCols; k++ ) { uint8_t brtInit = color->brt *3; uint8_t brtFactor = j*(color->brt); red = (color->red * (brtInit - brtFactor)) >> 8; grn = (color->grn * (brtInit - brtFactor)) >> 8; blu = (color->blu * (brtInit - brtFactor)) >> 8; put_pixel_matrix(urgb_u32((uint8_t) red, (uint8_t) grn, (uint8_t) blu)); } } } }
Understanding Members’ Active Participation in Online Question-and-Answer Communities: A Theory and Empirical Analysis Abstract Community-based question-and-answer (Q&A) websites have become increasingly popular in recent years as an alternative to general-purpose Web search engines for open-ended complex questions. Despite their unique contextual characteristics, only a handful of Q&A websites have been successful in sustaining members’ active participation that, unlike lurking, consists of not only posting questions but also answering others’ inquiries. Because the specific design of the information technology artifacts on Q&A websites can influence their level of success, studying leading Q&A communities such as Yahoo! Answers (YA) provides insights into more effective design mechanisms. We tested a goal-oriented action framework using data from 2,920 YA users, and found that active online participation is largely driven by artifacts (e.g., incentives), membership (e.g., levels of membership and tenure), and habit (e.g., past behavior). This study contributes to the information systems literature by showing that active participation can be understood as the setting, pursuit, and automatic activation of goals.
A Discrete Event Simulation Model for Awarding of Works Contract in the Government – A Case Stud Government departments procure a large variety of products and services in or carryout its normal operational responsibilities and to implement various plan policies. For effectively execution of these tasks the government asks for quo (Request For Quotation) from different contractors. After receiving the quotation contractors a sequence of procedure are followed before the award of contract is Which is a lengthy process that even takes months to finally awarding of contra there is a need of analysis of the existing system to know the causes and effects high turnaround time of contract awarding process. This paper gives the insi contract awarding procedure in government.
package pokecube.core.interfaces.capabilities.impl; import java.util.UUID; import java.util.logging.Level; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.nbt.NBTTagString; import pokecube.core.interfaces.PokecubeMod; import pokecube.core.interfaces.pokemob.ai.CombatStates; import pokecube.core.interfaces.pokemob.ai.GeneralStates; import pokecube.core.interfaces.pokemob.ai.LogicStates; import pokecube.core.utils.TagNames; import thut.lib.CompatWrapper; public abstract class PokemobSaves extends PokemobOwned implements TagNames { private void cleanLoadedAIStates() { // First clear out any non-persistant ai states from logic states for (LogicStates state : LogicStates.values()) { if (!state.persists()) this.setLogicState(state, false); } // Then clean up general states for (GeneralStates state : GeneralStates.values()) { if (!state.persists()) this.setGeneralState(state, false); } // Finally cleanup combat states for (CombatStates state : CombatStates.values()) { if (!state.persists()) this.setCombatState(state, false); } } @Override public void readPokemobData(NBTTagCompound tag) { NBTTagCompound ownerShipTag = tag.getCompoundTag(OWNERSHIPTAG); NBTTagCompound statsTag = tag.getCompoundTag(STATSTAG); NBTTagCompound movesTag = tag.getCompoundTag(MOVESTAG); NBTTagCompound inventoryTag = tag.getCompoundTag(INVENTORYTAG); NBTTagCompound breedingTag = tag.getCompoundTag(BREEDINGTAG); NBTTagCompound visualsTag = tag.getCompoundTag(VISUALSTAG); NBTTagCompound aiTag = tag.getCompoundTag(AITAG); NBTTagCompound miscTag = tag.getCompoundTag(MISCTAG); // Read Ownership Tag if (!ownerShipTag.hasNoTags()) { this.setPokemobTeam(ownerShipTag.getString(TEAM)); this.setPokemonNickname(ownerShipTag.getString(NICKNAME)); this.players = ownerShipTag.getBoolean(PLAYERS); try { if (ownerShipTag.hasKey(OT)) this.setOriginalOwnerUUID(UUID.fromString(ownerShipTag.getString(OT))); } catch (Exception e) { e.printStackTrace(); } try { if (ownerShipTag.hasKey(OWNER)) this.setPokemonOwner(UUID.fromString(ownerShipTag.getString(OWNER))); } catch (Exception e) { e.printStackTrace(); } } // Read stats tag if (!statsTag.hasNoTags()) { this.setExp(statsTag.getInteger(EXP), false); this.setStatus(statsTag.getByte(STATUS)); addHappiness(statsTag.getInteger(HAPPY)); } // Read moves tag if (!movesTag.hasNoTags()) { getMoveStats().newMoves.clear(); if (movesTag.hasKey(NEWMOVES)) { try { NBTTagList newMoves = (NBTTagList) movesTag.getTag(NEWMOVES); for (int i = 0; i < newMoves.tagCount(); i++) if (!getMoveStats().newMoves.contains(newMoves.getStringTagAt(i))) getMoveStats().newMoves.add(newMoves.getStringTagAt(i)); } catch (Exception e) { PokecubeMod.log(Level.WARNING, "Error loading new moves for " + getEntity().getName(), e); } } this.setMoveIndex(movesTag.getInteger(MOVEINDEX)); this.setAttackCooldown(movesTag.getInteger(COOLDOWN)); int[] disables = movesTag.getIntArray(DISABLED); if (disables.length == 4) for (int i = 0; i < 4; i++) { setDisableTimer(i, disables[i]); } } // Read Inventory tag if (!inventoryTag.hasNoTags()) { NBTTagList nbttaglist = inventoryTag.getTagList(ITEMS, 10); for (int i = 0; i < nbttaglist.tagCount(); ++i) { NBTTagCompound nbttagcompound1 = nbttaglist.getCompoundTagAt(i); int j = nbttagcompound1.getByte("Slot") & 255; if (j < this.getPokemobInventory().getSizeInventory()) { this.getPokemobInventory().setInventorySlotContents(j, new ItemStack(nbttagcompound1)); } this.setHeldItem(this.getPokemobInventory().getStackInSlot(1)); } } // Read Breeding tag if (!breedingTag.hasNoTags()) { this.loveTimer = breedingTag.getInteger(SEXETIME); } // Read visuals tag if (!visualsTag.hasNoTags()) { dataSync().set(params.SPECIALINFO, visualsTag.getInteger(SPECIALTAG)); setSize((float) (getSize() / PokecubeMod.core.getConfig().scalefactor)); int[] flavourAmounts = visualsTag.getIntArray(FLAVOURSTAG); if (flavourAmounts.length == 5) for (int i = 0; i < flavourAmounts.length; i++) { setFlavourAmount(i, flavourAmounts[i]); } if (visualsTag.hasKey(POKECUBE)) { NBTTagCompound pokecubeTag = visualsTag.getCompoundTag(POKECUBE); this.setPokecube(new ItemStack(pokecubeTag)); } } // Read AI if (!aiTag.hasNoTags()) { setTotalCombatState(aiTag.getInteger(COMBATSTATE)); setTotalGeneralState(aiTag.getInteger(GENERALSTATE)); setTotalLogicState(aiTag.getInteger(LOGICSTATE)); cleanLoadedAIStates(); setHungerTime(aiTag.getInteger(HUNGER)); NBTTagCompound routines = aiTag.getCompoundTag(AIROUTINES); for (String s : routines.getKeySet()) { // try/catch block incase addons add more routines to the enum. try { AIRoutine routine = AIRoutine.valueOf(s); setRoutineState(routine, routines.getBoolean(s)); } catch (Exception e) { } } } // Read Misc other if (!miscTag.hasNoTags()) { this.setRNGValue(miscTag.getInteger(RNGVAL)); this.uid = miscTag.getInteger(UID); this.wasShadow = miscTag.getBoolean(WASSHADOW); } } @Override public NBTTagCompound writePokemobData() { NBTTagCompound pokemobTag = new NBTTagCompound(); pokemobTag.setInteger(VERSION, 1); // Write Ownership tag NBTTagCompound ownerShipTag = new NBTTagCompound(); // This is still written for pokecubes to read from. Actual number is // stored in genes. ownerShipTag.setInteger(POKEDEXNB, this.getPokedexNb()); ownerShipTag.setString(NICKNAME, getPokemonNickname()); ownerShipTag.setBoolean(PLAYERS, isPlayerOwned()); ownerShipTag.setString(TEAM, getPokemobTeam()); if (getOriginalOwnerUUID() != null) ownerShipTag.setString(OT, getOriginalOwnerUUID().toString()); if (getPokemonOwnerID() != null) ownerShipTag.setString(OWNER, getPokemonOwnerID().toString()); // Write stats tag NBTTagCompound statsTag = new NBTTagCompound(); statsTag.setInteger(EXP, getExp()); statsTag.setByte(STATUS, getStatus()); statsTag.setInteger(HAPPY, bonusHappiness); // Write moves tag NBTTagCompound movesTag = new NBTTagCompound(); movesTag.setInteger(MOVEINDEX, getMoveIndex()); if (!getMoveStats().newMoves.isEmpty()) { NBTTagList newMoves = new NBTTagList(); for (String s : getMoveStats().newMoves) { newMoves.appendTag(new NBTTagString(s)); } movesTag.setTag(NEWMOVES, newMoves); } movesTag.setInteger(COOLDOWN, getAttackCooldown()); int[] disables = new int[4]; boolean tag = false; for (int i = 0; i < 4; i++) { disables[i] = getDisableTimer(i); tag = tag || disables[i] > 0; } if (tag) { movesTag.setIntArray(DISABLED, disables); } // Write Inventory tag NBTTagCompound inventoryTag = new NBTTagCompound(); NBTTagList nbttaglist = new NBTTagList(); this.getPokemobInventory().setInventorySlotContents(1, this.getHeldItem()); for (int i = 0; i < this.getPokemobInventory().getSizeInventory(); ++i) { ItemStack itemstack = this.getPokemobInventory().getStackInSlot(i); if (CompatWrapper.isValid(itemstack)) { NBTTagCompound nbttagcompound1 = new NBTTagCompound(); nbttagcompound1.setByte("Slot", (byte) i); itemstack.writeToNBT(nbttagcompound1); nbttaglist.appendTag(nbttagcompound1); } } inventoryTag.setTag(ITEMS, nbttaglist); // Write Breeding tag NBTTagCompound breedingTag = new NBTTagCompound(); breedingTag.setInteger(SEXETIME, loveTimer); // Write visuals tag NBTTagCompound visualsTag = new NBTTagCompound(); // This is still written for pokecubes to read from. Actual form is // stored in genes. visualsTag.setString(FORME, getPokedexEntry().getTrimmedName()); visualsTag.setInteger(SPECIALTAG, dataSync().get(params.SPECIALINFO)); int[] flavourAmounts = new int[5]; for (int i = 0; i < flavourAmounts.length; i++) { flavourAmounts[i] = getFlavourAmount(i); } visualsTag.setIntArray(FLAVOURSTAG, flavourAmounts); if (CompatWrapper.isValid(getPokecube())) { NBTTagCompound pokecubeTag = getPokecube().writeToNBT(new NBTTagCompound()); visualsTag.setTag(POKECUBE, pokecubeTag); } // Misc AI NBTTagCompound aiTag = new NBTTagCompound(); aiTag.setInteger(GENERALSTATE, getTotalGeneralState()); aiTag.setInteger(LOGICSTATE, getTotalLogicState()); aiTag.setInteger(COMBATSTATE, getTotalCombatState()); aiTag.setInteger(HUNGER, getHungerTime()); NBTTagCompound aiRoutineTag = new NBTTagCompound(); for (AIRoutine routine : AIRoutine.values()) { aiRoutineTag.setBoolean(routine.toString(), isRoutineEnabled(routine)); } aiTag.setTag(AIROUTINES, aiRoutineTag); // Misc other NBTTagCompound miscTag = new NBTTagCompound(); miscTag.setInteger(RNGVAL, getRNGValue()); miscTag.setInteger(UID, getPokemonUID()); miscTag.setBoolean(WASSHADOW, wasShadow); // Set tags to the pokemob tag. pokemobTag.setTag(OWNERSHIPTAG, ownerShipTag); pokemobTag.setTag(STATSTAG, statsTag); pokemobTag.setTag(MOVESTAG, movesTag); pokemobTag.setTag(INVENTORYTAG, inventoryTag); pokemobTag.setTag(BREEDINGTAG, breedingTag); pokemobTag.setTag(VISUALSTAG, visualsTag); pokemobTag.setTag(AITAG, aiTag); pokemobTag.setTag(MISCTAG, miscTag); return pokemobTag; } }
If It Took Seven Years And An Employee Confession To Reveal Intentional NSA Abuse, How Can NSA Say It Knows All Abuses? from the how-can-anyone-take-them-seriously dept "The assumption is our people are just out there wheeling and dealing. Nothing could be further from the truth. We have tremendous oversight over these programmes. We can audit the actions of our people 100%, and we do that," he said. Addressing the Black Hat convention in Las Vegas, an annual gathering for the information security industry, he gave a personal example: "I have four daughters. Can I go and intercept their emails? No. The technical limitations are in there." Should anyone in the NSA try to circumvent that, in defiance of policy, they would be held accountable, he said: "There is 100% audibility." We partly made this point last week, but I'm kind of in shock that so few people have paid attention to it, it seems worth highlighting again: the NSA revelations last week about the supposed "only" cases of intentional abuse show that there's likely a ton of abuse that went undiscovered. After all, remember that NSA boss Keith Alexander has insisted that its auditing is near perfect Given that, you'd assume those twelve cases of(and at times flagrant) abuse of the system, often to spy on "love interests" would have been caught by those audits. But no. By our count, onlywere caught by audits. And four of the revelations appear to have been self-reported . And one of the abuses (one of the self-reported ones) happenedbefore the confession.Given all of this, how can anyone (especially those in charge of the NSA and its oversight) argue that those are the only intentional abuses -- or that their audits can catch everyone? That's clearly untrue Filed Under: audit, intentional abuses, keith alexander, loveint, nsa, nsa surveillance
Rapid analysis of guanidino compounds in serum from nephritic patients using column-switching with isocratic elution. The rapid method for baseline separation of ten guanidino compounds in serum from nephritic patients was designed using a single eluent with a column switching system. A porous graphitic carbon column and an octadecyl-bonded silica gel columns were used, (50 mm x 4.6 mm i.d.). Separation was completed within 15 min. The stable baseline permitted highly sensitive detection with excellent reproducibility. The system was applied to analyze guanidino compounds in sera from 175 nephritic patients. The hemodialysis process could not completely eliminate guanidino compounds, and the degree of removal varied between patients. The correlation among metabolites indicated the differences in disease.
/** * The <code>ControlFlowInstructions</code> handles instructions related to control flow in the java byte * code format. This includes return statements and branching, as well as transformations that need to be * applied to re-engineer the syntax tree from the binary code format. */ public final class ControlFlowInstructions implements DecompilerDelegation { @Override public void configure(DecompilerConfigurationBuilder configurationBuilder) { assert configurationBuilder != null : "Configuration builder can't be null"; configurationBuilder.on(ByteCode.return_).then(return_()); configurationBuilder.on(ByteCode.ireturn).then(ireturn()); configurationBuilder.on(ByteCode.lreturn).then(lreturn()); configurationBuilder.on(ByteCode.freturn).then(freturn()); configurationBuilder.on(ByteCode.dreturn).then(dreturn()); configurationBuilder.on(ByteCode.areturn).then(areturn()); configurationBuilder.on(ByteCode.goto_).then(goto_()); configurationBuilder.after(ByteCode.goto_) .withPriority(Priority.HIGH) .then(forQuery(lastStatement().as(Goto.class)).apply(tryCatch())); } private static DecompilerElementDelegate<Goto> tryCatch() { return new DecompilerElementDelegate<Goto>() { @Override public void apply(DecompilationContext context, CodeStream codeStream, int byteCode, Goto gotoElement) throws IOException { final Optional<ExceptionTableEntry> exceptionTableEntry = Methods.getExceptionTableEntryForCatchLocation( context.getMethod(), gotoElement.getMetaData().getProgramCounter()); if (exceptionTableEntry.isPresent()) { if (exceptionTableEntry.get().getStartPC() <= context.getStartPC() + 1) { // Decompiling a try-catch body. In this case, the try exists prior to the code being decompiled. // This occurs when decompiling a line surrounded by a try-catch. It's not possible to decompile // past this jump, so we need to escape. context.getStatements().last().remove(); context.abort(); return; } // throw new UnsupportedOperationException("Try-catch not implemented"); // TODO Implement try-catch } } }; } public static DecompilerDelegate goto_() { return new DecompilerDelegate() { @Override public void apply(DecompilationContext context, CodeStream codeStream, int byteCode) throws IOException { final int programCounter = context.getProgramCounter().get(); final int relativeOffset = codeStream.nextSignedShort(); final int absoluteOffset = programCounter + relativeOffset; if (absoluteOffset < context.getStartPC()) { // This occurs when decompiling e.g. the body of a loop. The end of the loop will jump back to // the start of the body, which will be before the decompilation when a.g. a particular line // is being decompiled. It is not possible to proceed ahead of the jump back. context.abort(); return; } context.enlist(new GotoImpl(absoluteOffset)); } }; } public static DecompilerDelegate return_() { return new DecompilerDelegate() { @Override public void apply(DecompilationContext context, CodeStream codeStream, int byteCode) throws IOException { context.reduceAll(); context.enlist(new ReturnImpl()); } }; } public static DecompilerDelegate ireturn() { return new DecompilerDelegate() { @Override public void apply(DecompilationContext context, CodeStream codeStream, int byteCode) throws IOException { final Expression returnValue = context.getStack().pop(); switch (returnValue.getType().getTypeName()) { case "boolean": case "byte": case "short": case "char": case "int": context.enlist(new ReturnValueImpl(returnValue)); break; default: throw invalidReturnValue(byteCode, returnValue); } } }; } public static DecompilerDelegate lreturn() { return xreturn(long.class); } public static DecompilerDelegate freturn() { return xreturn(float.class); } public static DecompilerDelegate dreturn() { return xreturn(double.class); } public static DecompilerDelegate areturn() { return new DecompilerDelegate() { @Override public void apply(DecompilationContext context, CodeStream codeStream, int byteCode) throws IOException { final Expression returnValue = context.getStack().pop(); if (Types.isPrimitive(returnValue.getType())) { throw invalidReturnValue(byteCode, returnValue); } context.enlist(new ReturnValueImpl(returnValue)); } }; } private static DecompilerDelegate xreturn(Type type) { return new DecompilerDelegate() { @Override public void apply(DecompilationContext context, CodeStream codeStream, int byteCode) throws IOException { final Expression returnValue = context.getStack().pop(); if (!returnValue.getType().equals(type)) { throw invalidReturnValue(byteCode, returnValue); } context.enlist(new ReturnValueImpl(returnValue)); } }; } private static ClassFileFormatException invalidReturnValue(int byteCode, Expression returnValue) { return new ClassFileFormatException("Invalid return value on stack: " + ByteCode.toString(byteCode) + " can't return " + returnValue); } }
Prime Minister Narendra Modi addresses during a function for the launch of various developmental projects, in Varanasi. (PTI) So did prime minister Modi kill the economy by following up demonetisation with GST as many seem to be suggesting? Even if the government does not agree with former prime minister Manmohan Singh’s prediction of a DeMo-induced disaster, the fact that it is talking of the need for a stimulus package means it agrees the economy is sluggish and that jobs are not getting created. There are, however, many caveats.The economy was slowing when Modi took over and with inflation at 9.4% and the fiscal deficit at 4.5% in FY14, India’s macros were poor (see graphic). Investment, which was growing at 16% in FY08, fell to 1.6% in FY14, private consumption slowed from 9.4% to 7.3% and government expenditure from 9.6% to 0.6%. Also, India was knee-deep in the twin balance sheet problem—both bank and corporate balance sheets were deeply in red—and getting out of this takes at least 5-6 years as the global experience shows. None of this is to say Modi didn’t make major mistakes, he did. This newspaper has catalogued his wasted three years in fixing telecom, gas-pricing, freeing agriculture—this aggravated the rural crisis, and prolonged the slowdown in a big way —and poor progress in reforming labour laws or genuinely easing business or resolving the Pranab-era retrospective taxes… Indeed, Modi went the other way by imposing more controls in drugs pricing and even cotton-seeds and in creating bigger PSUs instead of privatising them… Even simple solutions such as the apparel package put together by chief economic advisor Arvind Subramanian took forever to put in place. Yet, there has been good progress in fixing subsidies other than food, and even in that case, there is some progress. The pro-poor measures like JanDhan accounts, direct benefit transfers and low-cost life/accident/crop insurance are by all accounts a success. And at a time when there were few growth drivers, it was prudent budgeting that saw the government raise petroleum taxes and use the money to dramatically hike government capex in roads/rail—though Suresh Prabhu had to resign due to railway accidents, he began major reforms. In FY14-17, thanks to inflation collapsing, nominal GDP has slowed from 13% to 11% but tax-GDP still rose from 10.1 to 11.2, very unusual in low-growth periods. While most macros like inflation or the twin deficits improved under Modi, it goes beyond good luck with lower global inflation—by that logic, the go-go years of the UPA were surely influenced by high global growth. Exports growth that averaged 26.4% in FY05-08 played a big role in the average GDP growth of 9.1% — in FY14-17, exports contracted 3.8% on average. The role of RBI in keeping interest rates high—and thereby the rupee getting stronger—is also a big factor in sluggish GDP growth as it restricted local production and encouraged greater imports — Modi is guilty of accepting inflation-targeting (IT) that made RBI behave this way, but IT was pushed by the UPA. There can be little doubt that demonetisation added to the problem by badly hitting supply chains, especially in the informal sector, and it could be years before they revive, if at all—this is Manmohan Singh’s point. Demonetisation hit the real estate sector the most since that was a large user of black money, and this has large implications for jobs creation as well—along with the new RERA rules, though, reforming real estate will be a big plus if it makes housing affordable in the medium-term. But, at a fundamental level, demonetisation and GST are not too different in their impact on the cash-rich informal sector—in both cases, informal sector units have no option but to get into formal chains and start paying taxes and, as happens with greater formalisation, pay higher wages, etc. Bringing in GST so soon after demonetisation surely accelerated the pain but which politician, including Manmohan Singh, has not been pushing for GST for years due to its long-term benefits? And we can quibble about the timing, but GST had to be brought in 2-3 years before the elections so that the pain associated with it was absorbed over time. That is, if DeMo hadn’t hit the informal sector, GST would have, albeit at a more gradual and more manageable pace. Once DeMo or GST were brought in, MSMEs lost their tax-arbitrage-driven USP. The only way to fix this was to raise their efficiency —that requires big reforms in labour rules, tax policies … what are called ease-of-doing-business (EoDB) rules. Modi’s failure is that he felt EoDB had been fixed and was unwilling to accept any criticism of this. Nothing cures hubris better than a slowing economy though Modi’s 2019 plank will be more money in the hands of the poor and much lower inflation.
/** * @return the transpose of current matrix */ public SparseStringMatrix transpose() { SparseStringMatrix tr = new SparseStringMatrix(numColumns, numRows); tr.copyCRS(this.rowData, this.rowPtr, this.colInd); tr.copyCCS(this.colData, this.colPtr, this.rowInd); return tr; }
<gh_stars>0 //! Data and live trading API pub mod data; pub mod transactions;
The White House made clear yesterday that they oppose any independent investigation of the recent US attack on a Doctors Without Borders hospital in Kunduz, Afghanistan, and the Pentagon seems to be doubling down, ensuring that there’s not much evidence left for any independent investigators when they get through. Doctors Without Borders today reported they were informed that the US smashed into the wreckage of the hospital with a tank, forcing their way in and destroying potential evidence that would be used in a war crimes investigation. US officials claimed that the tank was “carrying investigators” from the official military inquiry into the matter, though they likely could’ve gotten into the hospital, closed after the attack, without using a tank if they’d simply asked Doctors Without Borders to let them in. The latest incident emerged amid reports that the Pentagon not only knew the facility was a protected hospital when they ordered it attacked, but that military analysts are continuing to argue that the attack was “justified” based on speculation a Pakistani spy may have been within. Last 5 posts by Jason Ditz
<reponame>mvhenten/ace-edit import { createAceManager } from "./component/ace-editor"; import { createLayout } from "./component/layout"; import { createFileSystem as createMockFileSystem } from "./component/file-system/file-system-mock"; import { createFileSystem } from "./component/file-system/file-system-web"; import { createApplicationState } from "./component/application-state"; import { createKeyboardManager } from "./component/keyboard"; import { createTabManager } from "./component/tab-manager"; import { setupTabPane } from "./component/tab-pane"; import { setupBoxResizable } from "./component/box-resizable"; import { setupAceTree } from "./component/file-tree"; import "./style/index"; const hostElementFactory = () => { const el = document.querySelector("body"); if (!el) throw new Error("Missing dom element!"); return el; }; const useMock = /mock=1/.test(document.location.search); const main = () => { const applicationState = createApplicationState(); const { fileSystemStore, optionsStore } = applicationState; const fileSystem = useMock ? createMockFileSystem(fileSystemStore) : createFileSystem(fileSystemStore); const aceEditorManager = createAceManager(optionsStore); const tabManager = createTabManager( aceEditorManager, fileSystem, optionsStore ); createKeyboardManager(aceEditorManager, fileSystem); createLayout({ aceEditorManager, tabManager, hostElementFactory, fileSystem, fileSystemStore, optionsStore, }); }; setupTabPane(); setupBoxResizable(); setupAceTree(); main();
// discoverSMTP performs a DNS-based SMTP submission service discovery, as // defined in RFC 6186 section 3.1. RFC 8314 section 5.1 adds a new service for // SMTP submission with implicit TLS. func discoverSMTP(domain string) (*url.URL, error) { smtpsHost, err := discoverTCP("submissions", domain) if err != nil { return nil, err } if smtpsHost != "" { return &url.URL{Scheme: "smtps", Host: smtpsHost}, nil } smtpHost, err := discoverTCP("submission", domain) if err != nil { return nil, err } if smtpHost != "" { return &url.URL{Scheme: "smtp", Host: smtpHost}, nil } return nil, fmt.Errorf("SMTP service discovery not configured for domain %q", domain) }
<reponame>idsec-solutions/signservice-ref /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.aaasec.sigserv.sigauthsp.opensaml; import java.util.List; import org.joda.time.DateTime; import org.opensaml.saml2.core.Audience; import org.opensaml.saml2.core.AudienceRestriction; import org.opensaml.saml2.core.Conditions; /** * * @author stefan */ public class ApConditions extends AbstractOpenSamlObj<Conditions>{ public ApConditions() { super(Conditions.DEFAULT_ELEMENT_NAME); } public ApConditions(Conditions obj) { super(obj, Conditions.DEFAULT_ELEMENT_NAME); } public ApConditions setConditions(String requesterEntityId, int timeScewSec, int validitySec){ obj.setNotBefore(new DateTime(System.currentTimeMillis()-timeScewSec*1000)); obj.setNotOnOrAfter(new DateTime(System.currentTimeMillis()+validitySec*1000)); List<AudienceRestriction> audienceRestrictions = obj.getAudienceRestrictions(); AudienceRestriction audRestr = Builder.audienceRestrictionBuilder.buildObject(); audienceRestrictions.add(audRestr); List<Audience> audiences = audRestr.getAudiences(); Audience audience = Builder.audienceBuilder.buildObject(); audiences.add(audience); audience.setAudienceURI(requesterEntityId); return this; } }
#include <llvm/Pass.h> #include <llvm/IR/Function.h> #include <llvm/Support/raw_ostream.h> #include <llvm/IR/LegacyPassManager.h> #include <llvm/Analysis/CallGraph.h> #include <llvm/Analysis/LoopInfo.h> #include <llvm/Analysis/CFG.h> #include <llvm/IR/Instructions.h> #include <llvm/IR/ValueSymbolTable.h> #include <llvm/Support/CommandLine.h> #include <llvm/Support/Debug.h> #include <llvm/IR/Module.h> #include <iostream> #include <iomanip> #include <fstream> using namespace llvm; namespace APIFuzzErr { /*** * Pass that integrates various techniques to detect error * guarding branches in the given bitcode file. */ struct ModuleLevelPass : public ModulePass { public: static char ID; ModuleLevelPass() : ModulePass(ID) { } ~ModuleLevelPass() { } bool runOnModule(Module &m) override { return false; } void getAnalysisUsage(AnalysisUsage &AU) const override { } }; char ModuleLevelPass::ID = 0; // pass arg, pass desc, cfg_only, analysis only static RegisterPass<ModuleLevelPass> x("errdetector", "Pass that tries to detect error guarding branches.", false, true); }
#ifndef _LIBRE_FUNCTIONS_HPP #define _LIBRE_FUNCTIONS_HPP // Copyright <NAME> 2015. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file ../LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include <string> #include <boost/filesystem.hpp> #include <com/sun/star/frame/XComponentLoader.hpp> #include <com/sun/star/lang/XMultiServiceFactory.hpp> namespace boost { namespace doc { namespace libre_functions { std::string convert_extension_to_pdf_filter(const std::string extension); //::com::sun::star::uno::Reference<com::sun::star::lang::XMultiServiceFactory> connect_to_libre_server(); ::com::sun::star::uno::Reference<com::sun::star::frame::XComponentLoader> connect_to_libre_server(); void set_bootstrap_offapi(); ::rtl::OUString get_url_from_path(const boost::filesystem::path& path); ::com::sun::star::uno::Reference< com::sun::star::lang::XComponent> create_libre(const boost::filesystem::path& path); void export_libre(const boost::filesystem::path &inputPath, boost::document_file_format::type format,::com::sun::star::uno::Reference< com::sun::star::lang::XComponent > xComponent, const boost::filesystem::path &outPath); void close_libre(const boost::filesystem::path &inputPath,bool save,::com::sun::star::uno::Reference< com::sun::star::lang::XComponent > xComponent); void save_libre(const boost::filesystem::path &inputPath,::com::sun::star::uno::Reference< com::sun::star::lang::XComponent > xComponent); ::com::sun::star::uno::Reference< com::sun::star::lang::XComponent> get_xComponent_from_path(const boost::filesystem::path& inputPath); }}} #endif
<filename>projects/egeo/src/lib/directives/st-click-outside/st-click-outside.directive.ts /* * © 2017 Stratio Big Data Inc., Sucursal en España. All rights reserved. * * This software – including all its source code – contains proprietary * information of Stratio Big Data Inc., Sucursal en España and * may not be revealed, sold, transferred, modified, distributed or * otherwise made available, licensed or sublicensed to third parties; * nor reverse engineered, disassembled or decompiled, without express * written authorization from Stratio Big Data Inc., Sucursal en España. */ import { Directive, EventEmitter, ElementRef, HostListener, Output } from '@angular/core'; @Directive({ selector: '[clickOutside]' }) export class StClickOutside { @Output() clickOutside: EventEmitter<MouseEvent> = new EventEmitter<MouseEvent>(); constructor(private elementRef: ElementRef) {} @HostListener('document:click', ['$event']) public onDocumentClick(event: MouseEvent): void { const targetElement = event.target as HTMLElement; if (targetElement && !this.elementRef.nativeElement.contains(targetElement)) { this.clickOutside.emit(event); } } }
def GetXY_Segment(self, N = 100 ): x = np.linspace(self.XYStart[0], self.XYEnd[0],N) y = self.Line_Tan.m*x +self.Line_Tan.q return x,y
/** * @brief Calculates the potential difference based on the velocity deviation. * * @param desire The desire of the agent. * @param vehicle The vehicle of the agent. * @return float The difference. */ float CostLinearCooperative::featureVelocityDeviation(const Desire& desire, const Vehicle& vehicle) const { return std::max(1.0f - std::abs(desire.m_desiredVelocity - vehicle.m_velocityX) / (std::abs(desire.m_desiredVelocity) / 10.0f), -1.0f) / episodeLength; }
def merge_winds_and_station_metadata(wind_table, station_metadata_table, station_id): error_checking.assert_is_string(station_id) station_id_list = [station_id] * len(wind_table.index) argument_dict = {raw_wind_io.STATION_ID_COLUMN: station_id_list} wind_table = wind_table.assign(**argument_dict) return wind_table.merge(station_metadata_table[METADATA_COLUMNS_TO_MERGE], on=raw_wind_io.STATION_ID_COLUMN, how='inner')
#![no_main] #![no_std] #![feature(abi_efiapi)] #![feature(int_roundings)] #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_slice)] use anyhow::{anyhow, bail, Context as _, Error, Result}; use core::{arch::asm, fmt::Write, mem::MaybeUninit}; use object::{elf, read::elf::ProgramHeader as _, Endianness}; use pomelo_common::{ graphics::{GraphicConfig, PixelFormat}, memory_mapping::{MemoryDescriptor, MemoryMapping}, BootInfo, KernelMain, }; use uefi::{ prelude::*, proto::{ console::gop::GraphicsOutput, media::file::{Directory, File, FileAttribute, FileInfo, FileMode, FileType, RegularFile}, }, table::boot, }; const MAX_MEMORY_MAP_ENTRY_COUNT: usize = 128; const MEMORY_MAP_BUF_SIZE: usize = 16 * 1024; const FILE_INFO_BUF_SIZE: usize = 8 * 1024; #[entry] fn main(handle: Handle, st: SystemTable<Boot>) -> Status { actual_main(handle, st).expect("Failed!"); Status::SUCCESS } fn actual_main(handle: Handle, mut st: SystemTable<Boot>) -> Result<()> { uefi_services::init(&mut st).expect_success("Failed to initialize utilities"); st.stdout() .reset(false) .expect_success("Failed to reset stdout"); writeln!(st.stdout(), "Hello, world!!!!").expect("Failed to write to stdout"); let mut root = open_root_dir(handle, st.boot_services()) .warning_as_error() .map_err(|_| anyhow!("Failed to open a file to write the memory mapping"))?; write_memory_map_file(st.boot_services(), &mut root, "\\memmap")?; writeln!(st.stdout(), "Wrote memory map file").expect("Failed to write to stdout"); let kernel_main = prepare_kernel::<elf::FileHeader64<Endianness>>(st.boot_services(), &mut root, "\\kernel")?; writeln!(st.stdout(), "Loaded kernel").expect("Failed to write to stdout"); let graphic_config = read_graphic_config(&mut st)?; static mut MEMORY_MAP: [u8; MEMORY_MAP_BUF_SIZE] = [0; MEMORY_MAP_BUF_SIZE]; let (st, memory_descriptor_iter) = st .exit_boot_services(handle, unsafe { &mut MEMORY_MAP }) .expect_success("Failed to exit boot services"); static mut DESCRIPTORS: [MaybeUninit<MemoryDescriptor>; MAX_MEMORY_MAP_ENTRY_COUNT] = MaybeUninit::uninit_array(); let mut initialized_count = 0; for descriptor in memory_descriptor_iter { assert!( initialized_count < unsafe { DESCRIPTORS }.len(), "Reached to the max count of memory mapping descriptors." ); unsafe { &mut DESCRIPTORS[initialized_count] }.write(*descriptor); initialized_count += 1; } let initialized_descriptors = unsafe { MaybeUninit::slice_assume_init_ref(&DESCRIPTORS[0..initialized_count]) }; // We'd like to store the arguments to the kernel main in the heap instead of the stack. static mut BOOT_INFO: MaybeUninit<BootInfo> = MaybeUninit::uninit(); let acpi2_rsdp = st .config_table() .iter() .filter(|e| e.guid == uefi::table::cfg::ACPI2_GUID) .map(|e| e.address) .next(); unsafe { &mut BOOT_INFO }.write(BootInfo::new( graphic_config, MemoryMapping::new(initialized_descriptors), acpi2_rsdp, )); let boot_info = unsafe { BOOT_INFO.assume_init_ref() }; kernel_main(boot_info); #[allow(clippy::empty_loop)] loop { unsafe { asm!("hlt") } } } fn open_root_dir(handle: Handle, bs: &BootServices) -> uefi::Result<Directory> { let fs = bs.get_image_file_system(handle).warning_as_error()?; let fs = unsafe { &mut *fs.interface.get() }; fs.open_volume() } fn read_graphic_config(st: &mut SystemTable<Boot>) -> Result<GraphicConfig> { let go = st .boot_services() .locate_protocol::<GraphicsOutput>() .warning_as_error() .map_err(|_| anyhow!("Unable to get graphics output"))?; let go = unsafe { &mut *go.get() }; let mode = go .modes() .filter_map(|mode| { let mode = mode.expect("Unable to get mode"); let format = mode.info().pixel_format(); match format { uefi::proto::console::gop::PixelFormat::Rgb => { Option::Some((mode, PixelFormat::Rgb)) } uefi::proto::console::gop::PixelFormat::Bgr => { Option::Some((mode, PixelFormat::Bgr)) } _ => Option::None, } }) .min_by_key(|(mode, _)| (mode.info().resolution().0 as isize - 1440).abs()); let (mode, pixel_format) = mode.ok_or_else(|| anyhow!("Unable to find supported pixel format (RGB | BGR)"))?; go.set_mode(&mode) .warning_as_error() .map_err(|_| anyhow!("Unable to set mode"))?; let info = mode.info(); let (horisontal_resolution, vertical_resolution) = info.resolution(); let pixels_per_row = info.stride(); let mut fb = go.frame_buffer(); let config = GraphicConfig { frame_buffer_base: fb.as_mut_ptr(), frame_buffer_size: fb.size(), pixel_format, horisontal_resolution, vertical_resolution, pixels_per_row, }; Ok(config) } fn prepare_kernel<Elf: object::read::elf::FileHeader<Endian = Endianness>>( bs: &BootServices, root: &mut Directory, filename: &str, ) -> Result<KernelMain> { let kernel_file = root .open(filename, FileMode::Read, FileAttribute::empty()) .warning_as_error() .map_err(|_| anyhow!("Failed to open the kernel file"))?; let mut kernel_file = match kernel_file .into_type() .expect_success("Failed to get type of a file") { FileType::Regular(f) => f, _ => bail!("kernel file exists as non-regular-file"), }; let mut file_info_buffer = [0; FILE_INFO_BUF_SIZE]; let kernel_file_info = kernel_file .get_info::<FileInfo>(&mut file_info_buffer) .expect_success("Failed to get file info"); let kernel_file_size = kernel_file_info.file_size() as usize; struct AllocatedMemory<'a>(&'a BootServices, &'a mut [u8]); impl<'a> Drop for AllocatedMemory<'a> { fn drop(&mut self) { self.0 .free_pool(self.1.as_mut_ptr()) .expect_success("Failed to free an allocated pool"); } } let kernel_content = { let ptr = bs .allocate_pool(boot::MemoryType::LOADER_DATA, kernel_file_size) .warning_as_error() .map_err(|_| anyhow!("Unable to allocate temporary memory to read the kernel"))?; AllocatedMemory(bs, unsafe { core::slice::from_raw_parts_mut(ptr, kernel_file_size) }) }; kernel_file .read(kernel_content.1) .expect_success("Unable to read kernel file content"); let elf = Elf::parse(&kernel_content.1[..]) .map_err(|_| anyhow!("Unable to parse the kernel file as elf"))?; let endian = elf .endian() .map_err(|_| anyhow!("Unable to determin endian of the kernel file"))?; let entry_point = elf.e_entry(endian).into() as usize; let (kernel_base_address, kernel_length) = { let mut start = u64::MAX; let mut end = u64::MIN; for segment in elf .program_headers(endian, &kernel_content.1[..]) .map_err(|_| anyhow!("Unable to parse program headers of the kernel"))? { if segment.p_type(endian) == elf::PT_LOAD { let start_pos = segment.p_vaddr(endian).into(); let end_pos = start_pos + segment.p_memsz(endian).into(); start = start.min(start_pos); end = end.max(end_pos); } } (start as usize, (end - start) as usize) }; const PAGE_SIZE: usize = 0x1000; let allocate_page_count = kernel_length.div_ceil(PAGE_SIZE); bs.allocate_pages( boot::AllocateType::Address(kernel_base_address), boot::MemoryType::LOADER_DATA, allocate_page_count, ) .expect_success("Failed to allocate pages"); let allocated_slice = unsafe { core::slice::from_raw_parts_mut( kernel_base_address as *mut u8, allocate_page_count * PAGE_SIZE, ) }; for segment in elf .program_headers(endian, &kernel_content.1[..]) .map_err(|_| anyhow!("Unable to parse program headers of the kernel"))? { if segment.p_type(endian) == elf::PT_LOAD { let start_pos = segment.p_vaddr(endian).into() as usize - kernel_base_address; let end_pos = start_pos + segment.p_memsz(endian).into() as usize; let data = segment .data(endian, &kernel_content.1[..]) .map_err(|_| anyhow!("Unable to read segment from kernel"))?; let copy_from_file_end_pos = start_pos + data.len(); allocated_slice[start_pos..copy_from_file_end_pos].copy_from_slice(data); allocated_slice[copy_from_file_end_pos..end_pos].fill(0); } } drop(kernel_content); let entry_point: KernelMain = unsafe { core::mem::transmute(entry_point) }; Ok(entry_point) } fn write_memory_map_file(bs: &BootServices, root: &mut Directory, filename: &str) -> Result<()> { let mut memory_map = [0; MEMORY_MAP_BUF_SIZE]; let (_map_key, desc_iter) = bs .memory_map(&mut memory_map) .warning_as_error() .map_err(|_| anyhow!("Failed to get memory mapping"))?; let memory_map_file = root .open(filename, FileMode::CreateReadWrite, FileAttribute::empty()) .warning_as_error() .map_err(|_| anyhow!("Failed to open a file to write the memory mapping"))?; struct FileWrapper(RegularFile); impl Write for FileWrapper { fn write_str(&mut self, s: &str) -> core::fmt::Result { self.0 .write(s.as_bytes()) .warning_as_error() .map_err(|_| core::fmt::Error) } } let mut file = match memory_map_file .into_type() .expect_success("Failed to get type of a file") { FileType::Regular(f) => FileWrapper(f), _ => bail!("memmap file exists as non-regular-file"), }; writeln!( file, "Index, TYpe, Type(name), PhysicalStart, NumberOfPages, Attribute" ) .map_err(Error::msg) .with_context(|| "Failed to write to the memory mapping file")?; for (i, desc) in desc_iter.enumerate() { writeln!( file, "{}, {:x}, {:?}, {:08x}, {}, {:x}", i, desc.ty.0, desc.ty, desc.phys_start, desc.page_count, desc.att ) .map_err(Error::msg) .with_context(|| "Failed to write to the memory mapping file")?; } Ok(()) }
def add(self, origin, rel, target, attrs=None, index=None): if not origin: raise ValueError('Relationship origin cannot be null') if not rel: raise ValueError('Relationship ID cannot be null') if type(attrs) != type(self._attr_cls): attrs = self._attr_cls(attrs or {}) item = (origin, rel, target, attrs) if index is not None: rid = index self._relationships.insert(index, item) else: rid = self.size() self._relationships.append(item) return rid
/* Update VSAN properties associated with a VSAN ID */ int bcm_petra_fcoe_vsan_set( int unit, uint32 vsan_id, bcm_fcoe_vsan_t *vsan) { return BCM_E_UNAVAIL; }
India Bans Film About Infamous 2012 Gang Rape Enlarge this image toggle caption Altaf Qadri/AP Altaf Qadri/AP India is banning a documentary about the deadly gang rape of a young woman in 2012 amid concerns over remarks made by one of her convicted rapists. The government also says it will investigate how the film crew gained access to him on death row. At issue is British filmmaker Leslee Udwin film India's Daughter, made for the BBC and India's NDTV, about the rape and killing of a 23-year-old student aboard a bus in New Delhi, the Indian capital. The case horrified the world and put a global spotlight on attitudes toward and violence against women in India. In excerpts of the film, scheduled to be broadcast in full on March 8, International Women's Day, Mukesh Singh, one of four men sentenced to death for the young woman's rape and murder, places the onus of rape on women. Singh says: "A girl is far more responsible for rape than a boy" is for rape. And, he adds: "A decent girl won't roam around at 9 o'clock at night. ... Housework and housekeeping is for girls, not roaming in discos and bars at night doing wrong things, wearing wrong clothes." Those comments prompted outrage in India, though as The Associated Press notes, Singh was "repeating something community and religious leaders in this nation of 1.2 billion routinely say." The film also interviewed the other convicted rapists, all of whom are appealing their death sentences. India's government chimed in. Home Minister Rajnath Singh — no relation to the convicted rapist — said the film will not be aired in India and he promised an inquiry into how the filmmakers gained access to the rapists on death row. "It was noticed the documentary film depicts the comments of the convict which are highly derogatory and are an affront to the dignity of women," he told Parliament. "How was permission given to interview a rapist? It is shocking. I will get this investigated." And he said the filmmakers had violated part of the agreement by not showing officials all of the unedited interviews. NDTV pointed out that the filmmakers had taken all the required permission before conducting the interview. And, it said, it "plans to broadcast the documentary on March 8." Police officials in New Delhi said Mukesh Singh's comments in the film created "fear and tension" and risked public anger. But some lawmakers in India's upper house of Parliament criticized the blocking of the film. "[T]he reality is what the man spoke reflects the view of many men in India and why are we shying away from that? In glorifying India and (saying) we are perfect we are not confronting the issues that need to be confronted," said businesswoman Anu Aga, a member of the chamber. Writing on NDTV's website, Udwin said she was "deeply saddened" by the controversy, and she urged Indian Prime Minister Narendra Modi "to deal with this unceremonious silencing of the film." She added:
// // SectionSViewController.h // TableViewControllerDemo // // Created by kuroky on 2017/7/19. // Copyright © 2017年 kuroky. All rights reserved. // #import "MXBaseTableViewController.h" @interface SectionSViewController : MXBaseTableViewController @end
<reponame>ndouglas/ReactiveNetService<gh_stars>1-10 // // RNSDefinitions.h // ReactiveNetService // // Created by <NAME> on 02/04/15. // Released into the public domain. // See LICENSE for details. // #import <Foundation/Foundation.h> #import <ReactiveCocoa/ReactiveCocoa.h> #import "YOLO.h" /** Returns an error constructed from an error dictionary. @param _errorDictionary The error dictionary. @return An error constructed from an error dictionary. */ NSError *RNSErrorForErrorDictionary(NSDictionary *errorDictionary);
b=[100,20,10,5,1] n=int(input()) s=0 x=n%10 if x<5: s=s+x elif x>5: s=s+1+(x-5) else: s=s+1 n=n-x x=n%100 n=n-x while x>0: s=s+1 if x>=20: x=x-20 else: x=x-10 s=s+(n/100) print(int(s))
/* $NetBSD: dnssec.c,v 1.4 2006/09/09 16:22:09 manu Exp $ */ /* $KAME: dnssec.c,v 1.2 2001/08/05 18:46:07 itojun Exp $ */ /* * Copyright (C) 1995, 1996, 1997, and 1998 WIDE Project. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the project nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE PROJECT OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ #include "config.h" #include <sys/types.h> #include <sys/param.h> #include <stdlib.h> #include <string.h> #include "var.h" #include "vmbuf.h" #include "misc.h" #include "plog.h" #include "debug.h" #include "isakmp_var.h" #include "isakmp.h" #include "ipsec_doi.h" #include "oakley.h" #include "netdb_dnssec.h" #include "strnames.h" #include "dnssec.h" #include "gcmalloc.h" extern int h_errno; cert_t * dnssec_getcert(id) vchar_t *id; { cert_t *cert = NULL; struct certinfo *res = NULL; struct ipsecdoi_id_b *id_b; int type; char *name = NULL; int namelen; int error; id_b = (struct ipsecdoi_id_b *)id->v; namelen = id->l - sizeof(*id_b); name = racoon_malloc(namelen + 1); if (!name) { plog(LLV_ERROR, LOCATION, NULL, "failed to get buffer.\n"); return NULL; } memcpy(name, id_b + 1, namelen); name[namelen] = '\0'; switch (id_b->type) { case IPSECDOI_ID_FQDN: error = getcertsbyname(name, &res); if (error != 0) { plog(LLV_ERROR, LOCATION, NULL, "getcertsbyname(\"%s\") failed.\n", name); goto err; } break; case IPSECDOI_ID_IPV4_ADDR: case IPSECDOI_ID_IPV6_ADDR: /* XXX should be processed to query PTR ? */ default: plog(LLV_ERROR, LOCATION, NULL, "inpropper ID type passed %s " "though getcert method is dnssec.\n", s_ipsecdoi_ident(id_b->type)); goto err; } /* check response */ if (res->ci_next != NULL) { plog(LLV_WARNING, LOCATION, NULL, "not supported multiple CERT RR.\n"); } switch (res->ci_type) { case DNSSEC_TYPE_PKIX: /* XXX is it enough condition to set this type ? */ type = ISAKMP_CERT_X509SIGN; break; default: plog(LLV_ERROR, LOCATION, NULL, "not supported CERT RR type %d.\n", res->ci_type); goto err; } /* create cert holder */ cert = oakley_newcert(); if (cert == NULL) { plog(LLV_ERROR, LOCATION, NULL, "failed to get cert buffer.\n"); goto err; } cert->pl = vmalloc(res->ci_certlen + 1); if (cert->pl == NULL) { plog(LLV_ERROR, LOCATION, NULL, "failed to get cert buffer.\n"); goto err; } memcpy(cert->pl->v + 1, res->ci_cert, res->ci_certlen); cert->pl->v[0] = type; cert->cert.v = cert->pl->v + 1; cert->cert.l = cert->pl->l - 1; plog(LLV_DEBUG, LOCATION, NULL, "created CERT payload:\n"); plogdump(LLV_DEBUG, cert->pl->v, cert->pl->l); end: if (res) freecertinfo(res); return cert; err: if (name) racoon_free(name); if (cert) { oakley_delcert(cert); cert = NULL; } goto end; }
def vacuum_analyze_checkpoint(self, full=False): logging.info("Vacuum analyzing db now") self.execute("VACUUM ANALYZE;") self.execute("CHECKPOINT;") if full: self.execute("VACUUM FULL ANALYZE;")
def intersection_profile(H, order=None, index=False): if index: I, _, coldict = incidence_matrix(H, order=order, index=True) else: I = incidence_matrix(H, order=order, index=False) P = I.T.dot(I) if index: return P, coldict else: return P
/** * Packrat parser for grammar <code>xtc.parser.PGrammar</code>. * * <p />This class has been generated by the <i>Rats!</i> parser * generator, version 1.14.3, (C) 2004-2008 Robert Grimm. */ public final class PParser extends ParserBase { // ========================================================================= /** Chunk 1 of memoized results. */ static final class Chunk1 { Result fModuleList; Result fModuleList$$Star1; Result fModuleTarget; Result fModuleName; Result fProductionAttributes; Result fChoice; Result fChoice$$Star1; Result fSequence; Result fSequence$$Star1; Result fSequenceName; } /** Chunk 2 of memoized results. */ static final class Chunk2 { Result fVoided; Result fSuffix; Result fPrimary; Result fUnqualifiedNonTerminal; Result fAction; Result fAttribute; Result fTypeName; Result fTypeNameCharacters; Result fTypeNameCharacters$$Star1; Result fName; } /** Chunk 3 of memoized results. */ static final class Chunk3 { Result fWord; Result fNameCharacters; Result fNameCharacters$$Star1; Result fWordCharacters; Result fWordCharacters$$Plus1; Result fWordCharacters$$Star1; Result fCharacterLiteral; Result fStringLiteral; Result fStringLiteral$$Star1; Result fClassChar; } /** Chunk 4 of memoized results. */ static final class Chunk4 { Result fSymbol; } // ========================================================================= /** Memoization table column. */ static final class PParserColumn extends Column { Chunk1 chunk1; Chunk2 chunk2; Chunk3 chunk3; Chunk4 chunk4; } // ========================================================================= /** The global state object. */ protected final PParserState yyState; // ========================================================================= /** * Create a new packrat parser. * * @param reader The reader. * @param file The file name. */ public PParser(final Reader reader, final String file) { super(reader, file); yyState = new PParserState(); } /** * Create a new packrat parser. * * @param reader The file reader. * @param file The file name. * @param size The file size. */ public PParser(final Reader reader, final String file, final int size) { super(reader, file, size); yyState = new PParserState(); } // ========================================================================= protected Column newColumn() { return new PParserColumn(); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Module. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ public Result pModule(final int yyStart) throws IOException { Result yyResult; int yyBase; int yyRepetition1; Pair<Object> yyRepValue1; int yyOption1; Object yyOpValue1; Module yyValue; ParseError yyError = ParseError.DUMMY; // Reset the global state object. yyState.reset(column(yyStart).file); // Alternative 1. yyResult = pSpacing(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pCodeComment(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final String v$el$1 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } { // Start scope for documentation. final String documentation = cast(yyOpValue1); yyResult = pSpacing(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyBase = yyResult.index; yyResult = pWord(yyBase); if (yyResult.hasValue("module")) { yyResult = pModuleName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final ModuleName name = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pModuleList(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final ModuleList v$el$2 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$2; } { // Start scope for params. final ModuleList params = cast(yyOpValue1); yyBase = yyOption1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyRepetition1 = yyResult.index; yyRepValue1 = Pair.empty(); while (true) { yyResult = pModuleDependency(yyRepetition1); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { final ModuleDependency v$el$3 = yyResult.semanticValue(); yyRepetition1 = yyResult.index; yyRepValue1 = new Pair<Object>(v$el$3, yyRepValue1); continue; } break; } { // Start scope for deps. final Pair<ModuleDependency> deps = cast(yyRepValue1.reverse()); yyOption1 = yyRepetition1; yyOpValue1 = null; yyResult = pHeader(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final Action v$el$4 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$4; } { // Start scope for header. final Action header = cast(yyOpValue1); yyOpValue1 = null; yyResult = pBody(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final Action v$el$5 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$5; } { // Start scope for body. final Action body = cast(yyOpValue1); yyOpValue1 = null; yyResult = pFooter(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final Action v$el$6 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$6; } { // Start scope for footer. final Action footer = cast(yyOpValue1); yyOpValue1 = null; yyResult = pOptions(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final List<Attribute> v$el$7 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$7; } { // Start scope for options. final List<Attribute> options = cast(yyOpValue1); yyRepetition1 = yyOption1; yyRepValue1 = Pair.empty(); while (true) { yyResult = pProduction(yyRepetition1); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { final Production v$el$8 = yyResult.semanticValue(); yyRepetition1 = yyResult.index; yyRepValue1 = new Pair<Object>(v$el$8, yyRepValue1); continue; } break; } { // Start scope for productions. final Pair<Production> productions = cast(yyRepValue1.reverse()); yyResult = pEndOfFile(yyRepetition1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = new Module(Comment.documentation(documentation), name, params, deps.list(), header, body, footer, options, productions.list()); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // End scope for productions. } // End scope for options. } // End scope for footer. } // End scope for body. } // End scope for header. } // End scope for deps. } else { yyError = yyError.select("';' expected", yyBase); } } // End scope for params. } } else { yyError = yyError.select("'module' expected", yyBase); } } } // End scope for documentation. } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ModuleDependency. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleDependency(final int yyStart) throws IOException { Result yyResult; ModuleDependency yyValue; ParseError yyError = ParseError.DUMMY; // Alternative <Modification>. yyResult = pModuleModification(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative <Instantiation>. yyResult = pModuleInstantiation(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative <Import>. yyResult = pModuleImport(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ModuleModification. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleModification(final int yyStart) throws IOException { Result yyResult; int yyBase; int yyOption1; Object yyOpValue1; ModuleModification yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("modify")) { yyResult = pModuleName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final ModuleName name = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pModuleList(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final ModuleList v$el$1 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } { // Start scope for args. final ModuleList args = cast(yyOpValue1); yyOpValue1 = null; yyResult = pModuleTarget(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final ModuleName v$el$2 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$2; } { // Start scope for target. final ModuleName target = cast(yyOpValue1); yyBase = yyOption1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new ModuleModification(name, args, target); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } // End scope for target. } // End scope for args. } } // Done. yyError = yyError.select("module modification expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ModuleInstantiation. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleInstantiation(final int yyStart) throws IOException { Result yyResult; int yyBase; int yyOption1; Object yyOpValue1; ModuleInstantiation yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("instantiate")) { yyResult = pModuleName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final ModuleName name = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pModuleList(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final ModuleList v$el$1 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } { // Start scope for args. final ModuleList args = cast(yyOpValue1); yyOpValue1 = null; yyResult = pModuleTarget(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final ModuleName v$el$2 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$2; } { // Start scope for target. final ModuleName target = cast(yyOpValue1); yyBase = yyOption1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new ModuleInstantiation(name, args, target); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } // End scope for target. } // End scope for args. } } // Done. yyError = yyError.select("module instantiation expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ModuleImport. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleImport(final int yyStart) throws IOException { Result yyResult; int yyBase; int yyOption1; Object yyOpValue1; ModuleImport yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("import")) { yyResult = pModuleName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final ModuleName name = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pModuleList(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final ModuleList v$el$1 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } { // Start scope for args. final ModuleList args = cast(yyOpValue1); yyOpValue1 = null; yyResult = pModuleTarget(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final ModuleName v$el$2 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$2; } { // Start scope for target. final ModuleName target = cast(yyOpValue1); yyBase = yyOption1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new ModuleImport(name, args, target); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } // End scope for target. } // End scope for args. } } // Done. yyError = yyError.select("module import expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ModuleList. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleList(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fModuleList) yyColumn.chunk1.fModuleList = pModuleList$1(yyStart); return yyColumn.chunk1.fModuleList; } /** Actually parse xtc.parser.PGrammar.ModuleList. */ private Result pModuleList$1(final int yyStart) throws IOException { Result yyResult; int yyBase; ModuleList yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pSymbol(yyStart); if (yyResult.hasValue("(")) { final int yyChoice1 = yyResult.index; // Nested alternative 1. yyResult = pModuleName(yyChoice1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final ModuleName name = yyResult.semanticValue(); yyResult = pModuleList$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<ModuleName> names = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(")")) { yyValue = new ModuleList(new Pair<ModuleName>(name, names).list()); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("')' expected", yyBase); } } } // Nested alternative 2. yyBase = yyChoice1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(")")) { yyValue = new ModuleList(new ArrayList<ModuleName>(0)); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("')' expected", yyBase); } } // Done. yyError = yyError.select("module list expected", yyStart); return yyError; } // ========================================================================= /** * Parse synthetic nonterminal xtc.parser.PGrammar.ModuleList$$Star1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleList$$Star1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fModuleList$$Star1) yyColumn.chunk1.fModuleList$$Star1 = pModuleList$$Star1$1(yyStart); return yyColumn.chunk1.fModuleList$$Star1; } /** Actually parse xtc.parser.PGrammar.ModuleList$$Star1. */ private Result pModuleList$$Star1$1(final int yyStart) throws IOException { Result yyResult; Pair<ModuleName> yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pSymbol(yyStart); if (yyResult.hasValue(",")) { yyResult = pModuleName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final ModuleName v$el$1 = yyResult.semanticValue(); yyResult = pModuleList$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<ModuleName> v$2 = yyResult.semanticValue(); yyValue = new Pair<ModuleName>(v$el$1, v$2); return yyResult.createValue(yyValue, yyError); } } } // Alternative 2. yyValue = Pair.empty(); return new SemanticValue(yyValue, yyStart, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ModuleTarget. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleTarget(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fModuleTarget) yyColumn.chunk1.fModuleTarget = pModuleTarget$1(yyStart); return yyColumn.chunk1.fModuleTarget; } /** Actually parse xtc.parser.PGrammar.ModuleTarget. */ private Result pModuleTarget$1(final int yyStart) throws IOException { Result yyResult; ModuleName yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("as")) { yyResult = pModuleName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } } // Done. yyError = yyError.select("module target expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ModuleName. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pModuleName(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fModuleName) yyColumn.chunk1.fModuleName = pModuleName$1(yyStart); return yyColumn.chunk1.fModuleName; } /** Actually parse xtc.parser.PGrammar.ModuleName. */ private Result pModuleName$1(final int yyStart) throws IOException { Result yyResult; ModuleName yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pName(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String name = yyResult.semanticValue(); yyValue = new ModuleName(name); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Header. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pHeader(final int yyStart) throws IOException { Result yyResult; Action yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("header")) { yyResult = pAction(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } } // Done. yyError = yyError.select("header expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Body. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pBody(final int yyStart) throws IOException { Result yyResult; Action yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("body")) { yyResult = pAction(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } } // Done. yyError = yyError.select("body expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Footer. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pFooter(final int yyStart) throws IOException { Result yyResult; Action yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("footer")) { yyResult = pAction(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } } // Done. yyError = yyError.select("footer expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Options. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pOptions(final int yyStart) throws IOException { Result yyResult; int yyBase; int yyRepetition1; Pair<Attribute> yyRepValue1; List<Attribute> yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("option")) { yyResult = pAttribute(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Attribute att = yyResult.semanticValue(); yyRepetition1 = yyResult.index; yyRepValue1 = Pair.empty(); while (true) { yyBase = yyRepetition1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(",")) { yyResult = pAttribute(yyResult.index); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { final Attribute v$el$1 = yyResult.semanticValue(); yyRepetition1 = yyResult.index; yyRepValue1 = new Pair<Attribute>(v$el$1, yyRepValue1); continue; } } else { yyError = yyError.select("',' expected", yyBase); } break; } { // Start scope for atts. final Pair<Attribute> atts = yyRepValue1.reverse(); yyBase = yyRepetition1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new ArrayList<Attribute>(atts.size() + 1); new Pair<Attribute>(att, atts).addTo(yyValue); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } // End scope for atts. } } // Done. yyError = yyError.select("options expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Production. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pProduction(final int yyStart) throws IOException { Result yyResult; Production yyValue; ParseError yyError = ParseError.DUMMY; // Alternative <Full>. yyResult = pFullProduction(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative <Addition>. yyResult = pAlternativeAddition(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative <Removal>. yyResult = pAlternativeRemoval(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative <Override>. yyResult = pProductionOverride(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.FullProduction. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pFullProduction(final int yyStart) throws IOException { Result yyResult; int yyBase; FullProduction yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pProductionAttributes(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<Attribute> attributes = yyResult.semanticValue(); yyResult = pTypeName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String type = yyResult.semanticValue(); yyResult = pUnqualifiedNonTerminal(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final NonTerminal nt = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("=")) { yyResult = pChoice(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final OrderedChoice choice = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { List<Attribute> list = new ArrayList<Attribute>(attributes.size()); attributes.addTo(list); yyValue = new FullProduction(list, type, nt, choice); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } } else { yyError = yyError.select("'=' expected", yyBase); } } } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.AlternativeAddition. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pAlternativeAddition(final int yyStart) throws IOException { Result yyResult; int yyBase; AlternativeAddition yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pTypeName(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String type = yyResult.semanticValue(); yyResult = pUnqualifiedNonTerminal(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final NonTerminal nt = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("+=")) { final int yyChoice1 = yyResult.index; // Nested alternative 1. yyResult = pSequenceName(yyChoice1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final SequenceName s = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("...")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("/")) { yyResult = pChoice(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final OrderedChoice choice = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new AlternativeAddition(type, nt, choice, s, false); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } } else { yyError = yyError.select("'/' expected", yyBase); } } else { yyError = yyError.select("'...' expected", yyBase); } } // Nested alternative 2. yyResult = pChoice(yyChoice1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final OrderedChoice choice = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("/")) { yyResult = pSequenceName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final SequenceName s = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("...")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new AlternativeAddition(type, nt, choice, s, true); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } else { yyError = yyError.select("'...' expected", yyBase); } } } else { yyError = yyError.select("'/' expected", yyBase); } } } else { yyError = yyError.select("'+=' expected", yyBase); } } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.AlternativeRemoval. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pAlternativeRemoval(final int yyStart) throws IOException { Result yyResult; int yyBase; int yyRepetition1; Pair<SequenceName> yyRepValue1; AlternativeRemoval yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pTypeName(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String type = yyResult.semanticValue(); yyResult = pUnqualifiedNonTerminal(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final NonTerminal nt = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("-=")) { yyResult = pSequenceName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final SequenceName s = yyResult.semanticValue(); yyRepetition1 = yyResult.index; yyRepValue1 = Pair.empty(); while (true) { yyBase = yyRepetition1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(",")) { yyResult = pSequenceName(yyResult.index); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { final SequenceName v$el$1 = yyResult.semanticValue(); yyRepetition1 = yyResult.index; yyRepValue1 = new Pair<SequenceName>(v$el$1, yyRepValue1); continue; } } else { yyError = yyError.select("',' expected", yyBase); } break; } { // Start scope for ss. final Pair<SequenceName> ss = yyRepValue1.reverse(); yyBase = yyRepetition1; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new AlternativeRemoval(type, nt, new Pair<SequenceName>(s, ss).list()); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } // End scope for ss. } } else { yyError = yyError.select("'-=' expected", yyBase); } } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ProductionOverride. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pProductionOverride(final int yyStart) throws IOException { Result yyResult; int yyBase; ProductionOverride yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pTypeName(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String type = yyResult.semanticValue(); yyResult = pUnqualifiedNonTerminal(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final NonTerminal nt = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(":=")) { final int yyChoice1 = yyResult.index; // Nested alternative 1. yyResult = pChoice(yyChoice1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final OrderedChoice choice = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new ProductionOverride(type, nt, choice, true); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } // Nested alternative 2. yyBase = yyChoice1; yyResult = pSymbol(yyBase); if (yyResult.hasValue("...")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("/")) { yyResult = pChoice(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final OrderedChoice choice = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new ProductionOverride(type, nt, choice, false); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } } else { yyError = yyError.select("'/' expected", yyBase); } } else { yyError = yyError.select("'...' expected", yyBase); } // Nested alternative 3. yyResult = pChoice(yyChoice1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final OrderedChoice choice = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("/")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("...")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { yyValue = new ProductionOverride(type, nt, choice, false); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } else { yyError = yyError.select("'...' expected", yyBase); } } else { yyError = yyError.select("'/' expected", yyBase); } } } else { yyError = yyError.select("':=' expected", yyBase); } } } // Alternative 2. yyResult = pProductionAttributes(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<Attribute> attributes = yyResult.semanticValue(); yyResult = pTypeName(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String type = yyResult.semanticValue(); yyResult = pUnqualifiedNonTerminal(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final NonTerminal nt = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(":=")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("...")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(";")) { List<Attribute> list = new ArrayList<Attribute>(attributes.size()); attributes.addTo(list); yyValue = new ProductionOverride(list, type, nt); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("';' expected", yyBase); } } else { yyError = yyError.select("'...' expected", yyBase); } } else { yyError = yyError.select("':=' expected", yyBase); } } } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ProductionAttributes. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pProductionAttributes(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fProductionAttributes) yyColumn.chunk1.fProductionAttributes = pProductionAttributes$1(yyStart); return yyColumn.chunk1.fProductionAttributes; } /** Actually parse xtc.parser.PGrammar.ProductionAttributes. */ private Result pProductionAttributes$1(final int yyStart) throws IOException { Result yyResult; Result yyPredResult; Pair<Attribute> yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyPredResult = pTypeName(yyStart); yyError = yyPredResult.select(yyError); if (yyPredResult.hasValue()) { yyPredResult = pUnqualifiedNonTerminal(yyPredResult.index); yyError = yyPredResult.select(yyError); if (yyPredResult.hasValue()) { yyPredResult = pSymbol(yyPredResult.index); yyError = yyPredResult.select(yyError); if (yyPredResult.hasValue()) { final String s = yyPredResult.semanticValue(); if ("=".equals(s) || ":=".equals(s)) { yyValue = Pair.empty(); return new SemanticValue(yyValue, yyStart, yyError); } } } } // Alternative 2. yyResult = pAttribute(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Attribute att = yyResult.semanticValue(); yyResult = pProductionAttributes(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<Attribute> atts = yyResult.semanticValue(); yyValue = new Pair<Attribute>(att, atts); return yyResult.createValue(yyValue, yyError); } } // Done. yyError = yyError.select("production attributes expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Choice. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pChoice(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fChoice) yyColumn.chunk1.fChoice = pChoice$1(yyStart); return yyColumn.chunk1.fChoice; } /** Actually parse xtc.parser.PGrammar.Choice. */ private Result pChoice$1(final int yyStart) throws IOException { Result yyResult; OrderedChoice yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pSequence(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Sequence s = yyResult.semanticValue(); yyResult = pChoice$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<Sequence> ss = yyResult.semanticValue(); yyValue = new OrderedChoice(new Pair<Sequence>(s, ss).list()); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Done. return yyError; } // ========================================================================= /** * Parse synthetic nonterminal xtc.parser.PGrammar.Choice$$Star1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pChoice$$Star1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fChoice$$Star1) yyColumn.chunk1.fChoice$$Star1 = pChoice$$Star1$1(yyStart); return yyColumn.chunk1.fChoice$$Star1; } /** Actually parse xtc.parser.PGrammar.Choice$$Star1. */ private Result pChoice$$Star1$1(final int yyStart) throws IOException { Result yyResult; Pair<Sequence> yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pSymbol(yyStart); if (yyResult.hasValue("/")) { yyResult = pSequence(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Sequence v$el$1 = yyResult.semanticValue(); yyResult = pChoice$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<Sequence> v$2 = yyResult.semanticValue(); yyValue = new Pair<Sequence>(v$el$1, v$2); return yyResult.createValue(yyValue, yyError); } } } // Alternative 2. yyValue = Pair.empty(); return new SemanticValue(yyValue, yyStart, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Sequence. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSequence(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fSequence) yyColumn.chunk1.fSequence = pSequence$1(yyStart); return yyColumn.chunk1.fSequence; } /** Actually parse xtc.parser.PGrammar.Sequence. */ private Result pSequence$1(final int yyStart) throws IOException { Result yyResult; Result yyPredResult; boolean yyPredMatched; int yyOption1; SequenceName yyOpValue1; Sequence yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyPredMatched = false; yyPredResult = pEllipsis(yyStart); if (yyPredResult.hasValue()) { yyPredMatched = true; } if (! yyPredMatched) { yyOption1 = yyStart; yyOpValue1 = null; yyResult = pSequenceName(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final SequenceName v$el$1 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } { // Start scope for n. final SequenceName n = yyOpValue1; yyResult = pSequence$$Star1(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<Element> l = yyResult.semanticValue(); yyValue = new Sequence(n, l.list()); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // End scope for n. } else { yyError = yyError.select("sequence expected", yyStart); } // Done. return yyError; } // ========================================================================= /** * Parse synthetic nonterminal xtc.parser.PGrammar.Sequence$$Star1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSequence$$Star1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fSequence$$Star1) yyColumn.chunk1.fSequence$$Star1 = pSequence$$Star1$1(yyStart); return yyColumn.chunk1.fSequence$$Star1; } /** Actually parse xtc.parser.PGrammar.Sequence$$Star1. */ private Result pSequence$$Star1$1(final int yyStart) throws IOException { Result yyResult; Pair<Element> yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pVoided(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element v$el$2 = yyResult.semanticValue(); yyResult = pSequence$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Pair<Element> v$3 = yyResult.semanticValue(); yyValue = new Pair<Element>(v$el$2, v$3); return yyResult.createValue(yyValue, yyError); } } // Alternative 2. yyValue = Pair.empty(); return new SemanticValue(yyValue, yyStart, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.SequenceName. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSequenceName(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk1) yyColumn.chunk1 = new Chunk1(); if (null == yyColumn.chunk1.fSequenceName) yyColumn.chunk1.fSequenceName = pSequenceName$1(yyStart); return yyColumn.chunk1.fSequenceName; } /** Actually parse xtc.parser.PGrammar.SequenceName. */ private Result pSequenceName$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; int yyBase; SequenceName yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('<' == yyC) { yyIndex = yyStart + 1; yyResult = pWordCharacters(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String name = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(">")) { yyValue = new SequenceName(name); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("'>' expected", yyBase); } } } // Done. yyError = yyError.select("sequence name expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Ellipsis. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pEllipsis(final int yyStart) throws IOException { Result yyResult; int yyBase; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pSequenceName(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue("...")) { yyValue = null; return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("'...' expected", yyBase); } } // Alternative 2. yyResult = pSymbol(yyStart); if (yyResult.hasValue("...")) { yyValue = null; return yyResult.createValue(yyValue, yyError); } // Done. yyError = yyError.select("ellipsis expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Voided. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pVoided(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fVoided) yyColumn.chunk2.fVoided = pVoided$1(yyStart); return yyColumn.chunk2.fVoided; } /** Actually parse xtc.parser.PGrammar.Voided. */ private Result pVoided$1(final int yyStart) throws IOException { Result yyResult; int yyBase; Element yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("void")) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(":")) { yyResult = pVoided(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element p = yyResult.semanticValue(); yyValue = new VoidedElement(p); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } else { yyError = yyError.select("':' expected", yyBase); } } // Alternative 2. yyResult = pPrefix(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Done. yyError = yyError.select("voided expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Prefix. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pPrefix(final int yyStart) throws IOException { Result yyResult; int yyBase; Element yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pSymbol(yyStart); if (yyResult.hasValue("&")) { yyResult = pSuffix(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element s = yyResult.semanticValue(); if (s instanceof Action) { yyValue = new SemanticPredicate((Action)s); } else { yyValue = new FollowedBy(s); } setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Alternative 2. yyResult = pSymbol(yyStart); if (yyResult.hasValue("!")) { yyResult = pSuffix(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element s = yyResult.semanticValue(); yyValue = new NotFollowedBy(s); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Alternative 3. yyResult = pSymbol(yyStart); if (yyResult.hasValue("^")) { yyResult = pSuffix(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element s = yyResult.semanticValue(); if (s instanceof Action) { yyValue = new ParserAction((Action)s); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } } // Alternative 4. yyResult = pWord(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String id = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(":")) { yyResult = pSuffix(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element s = yyResult.semanticValue(); yyValue = new Binding(id, s); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } else { yyError = yyError.select("':' expected", yyBase); } } // Alternative 5. yyResult = pStringLiteral(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String t1 = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(":")) { yyResult = pSuffix(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element s = yyResult.semanticValue(); final String t2 = Utilities.unescape(t1.substring(1, t1.length()-1)); yyValue = new StringMatch(t2, s); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } else { yyError = yyError.select("':' expected", yyBase); } } } // Alternative 6. yyResult = pSuffix(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Done. yyError = yyError.select("prefix expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Suffix. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSuffix(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fSuffix) yyColumn.chunk2.fSuffix = pSuffix$1(yyStart); return yyColumn.chunk2.fSuffix; } /** Actually parse xtc.parser.PGrammar.Suffix. */ private Result pSuffix$1(final int yyStart) throws IOException { Result yyResult; int yyBase; Element yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pPrimary(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final Element p = yyResult.semanticValue(); final int yyChoice1 = yyResult.index; // Nested alternative 1. yyBase = yyChoice1; yyResult = pSymbol(yyBase); if (yyResult.hasValue("?")) { yyValue = new Option(p); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("'?' expected", yyBase); } // Nested alternative 2. yyBase = yyChoice1; yyResult = pSymbol(yyBase); if (yyResult.hasValue("*")) { yyValue = new Repetition(false, p); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("'*' expected", yyBase); } // Nested alternative 3. yyBase = yyChoice1; yyResult = pSymbol(yyBase); if (yyResult.hasValue("+")) { yyValue = new Repetition(true, p); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("'+' expected", yyBase); } } // Alternative 2. yyResult = pPrimary(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Primary. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pPrimary(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fPrimary) yyColumn.chunk2.fPrimary = pPrimary$1(yyStart); return yyColumn.chunk2.fPrimary; } /** Actually parse xtc.parser.PGrammar.Primary. */ private Result pPrimary$1(final int yyStart) throws IOException { Result yyResult; int yyBase; Element yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pNullLiteral(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative 2. yyResult = pNonTerminal(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative 3. yyResult = pTerminal(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative 4. yyResult = pNodeMarker(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative 5. yyResult = pAction(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative 6. yyResult = pSymbol(yyStart); if (yyResult.hasValue("(")) { yyResult = pChoice(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(")")) { return yyResult.createValue(yyValue, yyError); } else { yyError = yyError.select("')' expected", yyBase); } } } // Done. yyError = yyError.select("primary expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.NullLiteral. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pNullLiteral(final int yyStart) throws IOException { Result yyResult; NullLiteral yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pName(yyStart); if (yyResult.hasValue("null")) { yyValue = new NullLiteral(); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } // Done. yyError = yyError.select("null literal expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.NonTerminal. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pNonTerminal(final int yyStart) throws IOException { Result yyResult; NonTerminal yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pName(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String name = yyResult.semanticValue(); yyValue = new NonTerminal(name); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.UnqualifiedNonTerminal. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pUnqualifiedNonTerminal(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fUnqualifiedNonTerminal) yyColumn.chunk2.fUnqualifiedNonTerminal = pUnqualifiedNonTerminal$1(yyStart); return yyColumn.chunk2.fUnqualifiedNonTerminal; } /** Actually parse xtc.parser.PGrammar.UnqualifiedNonTerminal. */ private Result pUnqualifiedNonTerminal$1(final int yyStart) throws IOException { Result yyResult; NonTerminal yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String name = yyResult.semanticValue(); yyValue = new NonTerminal(name); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Terminal. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pTerminal(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; int yyRepetition1; Pair<CharRange> yyRepValue1; Terminal yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('_' == yyC) { yyIndex = yyStart + 1; yyResult = pSpacing(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = new AnyChar(); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Alternative 2. yyResult = pCharacterLiteral(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String c = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = new CharLiteral(Utilities.unescape(c).charAt(1)); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Alternative 3. yyC = character(yyStart); if ('[' == yyC) { yyIndex = yyStart + 1; yyRepetition1 = yyIndex; yyRepValue1 = Pair.empty(); while (true) { yyResult = pRange(yyRepetition1); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { final CharRange v$el$1 = yyResult.semanticValue(); yyRepetition1 = yyResult.index; yyRepValue1 = new Pair<CharRange>(v$el$1, yyRepValue1); continue; } break; } { // Start scope for l. final Pair<CharRange> l = yyRepValue1.reverse(); yyC = character(yyRepetition1); if (']' == yyC) { yyIndex = yyRepetition1 + 1; yyResult = pSpacing(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = new CharClass(l.list()); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } } // End scope for l. } // Alternative 4. yyResult = pStringLiteral(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String s1 = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String s2 = Utilities.unescape(s1); yyValue = new StringLiteral(s2.substring(1, s2.length()-1)); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Done. yyError = yyError.select("terminal expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Range. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pRange(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; CharRange yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pClassChar(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String c1 = yyResult.semanticValue(); final int yyChoice1 = yyResult.index; // Nested alternative 1. yyC = character(yyChoice1); if ('-' == yyC) { yyIndex = yyChoice1 + 1; yyResult = pClassChar(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String c2 = yyResult.semanticValue(); yyValue = new CharRange(Utilities.unescape(c1).charAt(0), Utilities.unescape(c2).charAt(0)); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Nested alternative 2. yyValue = new CharRange(Utilities.unescape(c1).charAt(0)); setLocation(yyValue, yyStart); return new SemanticValue(yyValue, yyChoice1, yyError); } // Done. yyError = yyError.select("range expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.NodeMarker. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pNodeMarker(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; NodeMarker yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('@' == yyC) { yyIndex = yyStart + 1; yyResult = pWord(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String name = yyResult.semanticValue(); yyValue = new NodeMarker(name); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } // Done. yyError = yyError.select("node marker expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Action. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pAction(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fAction) yyColumn.chunk2.fAction = pAction$1(yyStart); return yyColumn.chunk2.fAction; } /** Actually parse xtc.parser.PGrammar.Action. */ private Result pAction$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Action yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('{' == yyC) { yyIndex = yyStart + 1; yyResult = pActionBody(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String s = yyResult.semanticValue(); yyC = character(yyResult.index); if ('}' == yyC) { yyIndex = yyResult.index + 1; yyResult = pSpacing(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = new Action(s, yyState.indentations()); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } } } } // Done. yyError = yyError.select("action expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ActionBody. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pActionBody(final int yyStart) throws IOException { Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Start a state modification. yyState.start(); // Alternative 1. yyResult = pActionText(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = difference(yyStart, yyResult.index); // Commit the state modification. yyState.commit(); return yyResult.createValue(yyValue, yyError); } // Abort the state modification. yyState.abort(); // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ActionText. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pActionText(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; int yyRepetition1; int yyRepetition2; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyRepetition1 = yyStart; while (true) { final int yyChoice1 = yyRepetition1; // Nested alternative 1. yyC = character(yyChoice1); if ('{' == yyC) { yyIndex = yyChoice1 + 1; yyState.open(); yyResult = pActionText(yyIndex); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyC = character(yyResult.index); if ('}' == yyC) { yyIndex = yyResult.index + 1; yyState.close(); yyRepetition1 = yyIndex; continue; } } } // Nested alternative 2. yyResult = pCharacterLiteral(yyChoice1); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyState.content(); yyRepetition1 = yyResult.index; continue; } // Nested alternative 3. yyResult = pStringLiteral(yyChoice1); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyState.content(); yyRepetition1 = yyResult.index; continue; } // Nested alternative 4. yyResult = pCodeComment(yyChoice1); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyState.content(); yyRepetition1 = yyResult.index; continue; } // Nested alternative 5. yyResult = pTraditionalComment(yyChoice1); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyState.content(); yyRepetition1 = yyResult.index; continue; } // Nested alternative 6. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '/': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; if ('/' == yyC) { yyRepetition2 = yyIndex; while (true) { yyC = character(yyRepetition2); if (-1 != yyC) { yyIndex = yyRepetition2 + 1; switch (yyC) { case '\n': case '\r': /* No match. */ break; default: { yyRepetition2 = yyIndex; continue; } } } break; } yyResult = pLineTerminator(yyRepetition2); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyRepetition1 = yyResult.index; continue; } } } } break; case '\r': { final int yyChoice2 = yyIndex; // Nested alternative 1. yyC = character(yyChoice2); if ('\n' == yyC) { yyIndex = yyChoice2 + 1; yyState.newline(); yyRepetition1 = yyIndex; continue; } // Nested alternative 2. yyState.newline(); yyRepetition1 = yyChoice2; continue; } case '\n': { yyState.newline(); yyRepetition1 = yyIndex; continue; } case ' ': { yyRepetition1 = yyIndex; continue; } case '\t': { yyRepetition1 = yyIndex; continue; } case '\f': { yyRepetition1 = yyIndex; continue; } default: /* No match. */ } } // Nested alternative 7. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '\t': case '\n': case '\f': case '\r': case ' ': case '}': /* No match. */ break; default: { yyState.content(); yyRepetition1 = yyIndex; continue; } } } break; } yyValue = null; return new SemanticValue(yyValue, yyRepetition1, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Attribute. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ public Result pAttribute(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fAttribute) yyColumn.chunk2.fAttribute = pAttribute$1(yyStart); return yyColumn.chunk2.fAttribute; } /** Actually parse xtc.parser.PGrammar.Attribute. */ private Result pAttribute$1(final int yyStart) throws IOException { Result yyResult; int yyBase; int yyOption1; Object yyOpValue1; Attribute yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWord(yyStart); if (yyResult.hasValue("public")) { yyValue = new Attribute(Constants.NAME_VISIBILITY, Constants.VALUE_PUBLIC); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } // Alternative 2. yyResult = pWord(yyStart); if (yyResult.hasValue("protected")) { yyValue = new Attribute(Constants.NAME_VISIBILITY, Constants.VALUE_PROTECTED); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } // Alternative 3. yyResult = pWord(yyStart); if (yyResult.hasValue("private")) { yyValue = new Attribute(Constants.NAME_VISIBILITY, Constants.VALUE_PRIVATE); setLocation(yyValue, yyStart); return yyResult.createValue(yyValue, yyError); } // Alternative 4. yyResult = pWord(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String name = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyBase = yyOption1; yyResult = pSymbol(yyBase); if (yyResult.hasValue("(")) { yyResult = pAttributeValue(yyResult.index); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { final Object v$el$1 = yyResult.semanticValue(); yyBase = yyResult.index; yyResult = pSymbol(yyBase); if (yyResult.hasValue(")")) { yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } else { yyError = yyError.select("')' expected", yyBase); } } } else { yyError = yyError.select("'(' expected", yyBase); } { // Start scope for value. final Object value = yyOpValue1; yyValue = new Attribute(name, value); setLocation(yyValue, yyStart); return new SemanticValue(yyValue, yyOption1, yyError); } // End scope for value. } // Done. yyError = yyError.select("attribute expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.AttributeValue. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pAttributeValue(final int yyStart) throws IOException { Result yyResult; Object yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pIntegerLiteral(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { final String lit = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { try { yyValue = Integer.decode(lit); } catch (NumberFormatException x) { yyValue = null; // Cannot happen. } if (yyValue instanceof Locatable) { setLocation((Locatable)yyValue, yyStart); } return yyResult.createValue(yyValue, yyError); } } // Alternative 2. yyResult = pName(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); return yyResult.createValue(yyValue, yyError); } // Alternative 3. yyResult = pStringLiteral(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { return yyResult.createValue(yyValue, yyError); } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.TypeName. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pTypeName(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fTypeName) yyColumn.chunk2.fTypeName = pTypeName$1(yyStart); return yyColumn.chunk2.fTypeName; } /** Actually parse xtc.parser.PGrammar.TypeName. */ private Result pTypeName$1(final int yyStart) throws IOException { Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pTypeNameCharacters(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { return yyResult.createValue(yyValue, yyError); } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.TypeNameCharacters. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pTypeNameCharacters(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fTypeNameCharacters) yyColumn.chunk2.fTypeNameCharacters = pTypeNameCharacters$1(yyStart); return yyColumn.chunk2.fTypeNameCharacters; } /** Actually parse xtc.parser.PGrammar.TypeNameCharacters. */ private Result pTypeNameCharacters$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; int yyOption1; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pNameCharacters(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyOption1 = yyResult.index; yyResult = pSimpleSpacing(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { yyC = character(yyResult.index); if ('<' == yyC) { yyIndex = yyResult.index + 1; yyResult = pSimpleSpacing(yyIndex); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { yyResult = pTypeNameCharacters(yyResult.index); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { yyResult = pSimpleSpacing(yyResult.index); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { yyResult = pTypeNameCharacters$$Star1(yyResult.index); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { yyC = character(yyResult.index); if ('>' == yyC) { yyIndex = yyResult.index + 1; yyOption1 = yyIndex; } } } } } } } yyValue = difference(yyStart, yyOption1); return new SemanticValue(yyValue, yyOption1, yyError); } // Done. yyError = yyError.select("type name characters expected", yyStart); return yyError; } // ========================================================================= /** * Parse synthetic nonterminal * xtc.parser.PGrammar.TypeNameCharacters$$Star1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pTypeNameCharacters$$Star1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fTypeNameCharacters$$Star1) yyColumn.chunk2.fTypeNameCharacters$$Star1 = pTypeNameCharacters$$Star1$1(yyStart); return yyColumn.chunk2.fTypeNameCharacters$$Star1; } /** Actually parse xtc.parser.PGrammar.TypeNameCharacters$$Star1. */ private Result pTypeNameCharacters$$Star1$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (',' == yyC) { yyIndex = yyStart + 1; yyResult = pSimpleSpacing(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyResult = pTypeNameCharacters(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyResult = pSimpleSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyResult = pTypeNameCharacters$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } } } } // Alternative 2. yyValue = null; return new SemanticValue(yyValue, yyStart, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Name. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pName(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk2) yyColumn.chunk2 = new Chunk2(); if (null == yyColumn.chunk2.fName) yyColumn.chunk2.fName = pName$1(yyStart); return yyColumn.chunk2.fName; } /** Actually parse xtc.parser.PGrammar.Name. */ private Result pName$1(final int yyStart) throws IOException { Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pNameCharacters(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { return yyResult.createValue(yyValue, yyError); } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Word. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pWord(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fWord) yyColumn.chunk3.fWord = pWord$1(yyStart); return yyColumn.chunk3.fWord; } /** Actually parse xtc.parser.PGrammar.Word. */ private Result pWord$1(final int yyStart) throws IOException { Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWordCharacters(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { return yyResult.createValue(yyValue, yyError); } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.NameCharacters. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pNameCharacters(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fNameCharacters) yyColumn.chunk3.fNameCharacters = pNameCharacters$1(yyStart); return yyColumn.chunk3.fNameCharacters; } /** Actually parse xtc.parser.PGrammar.NameCharacters. */ private Result pNameCharacters$1(final int yyStart) throws IOException { Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWordCharacters(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyResult = pNameCharacters$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = difference(yyStart, yyResult.index); return yyResult.createValue(yyValue, yyError); } } // Done. return yyError; } // ========================================================================= /** * Parse synthetic nonterminal xtc.parser.PGrammar.NameCharacters$$Star1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pNameCharacters$$Star1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fNameCharacters$$Star1) yyColumn.chunk3.fNameCharacters$$Star1 = pNameCharacters$$Star1$1(yyStart); return yyColumn.chunk3.fNameCharacters$$Star1; } /** Actually parse xtc.parser.PGrammar.NameCharacters$$Star1. */ private Result pNameCharacters$$Star1$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('.' == yyC) { yyIndex = yyStart + 1; yyResult = pWordCharacters(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyResult = pNameCharacters$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } } // Alternative 2. yyValue = null; return new SemanticValue(yyValue, yyStart, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.WordCharacters. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pWordCharacters(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fWordCharacters) yyColumn.chunk3.fWordCharacters = pWordCharacters$1(yyStart); return yyColumn.chunk3.fWordCharacters; } /** Actually parse xtc.parser.PGrammar.WordCharacters. */ private Result pWordCharacters$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; if ((('A' <= yyC) && (yyC <= 'Z')) || (('a' <= yyC) && (yyC <= 'z'))) { yyResult = pWordCharacters$$Star1(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = difference(yyStart, yyResult.index); return yyResult.createValue(yyValue, yyError); } } } // Done. yyError = yyError.select("word characters expected", yyStart); return yyError; } // ========================================================================= /** * Parse synthetic nonterminal xtc.parser.PGrammar.WordCharacters$$Plus1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pWordCharacters$$Plus1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fWordCharacters$$Plus1) yyColumn.chunk3.fWordCharacters$$Plus1 = pWordCharacters$$Plus1$1(yyStart); return yyColumn.chunk3.fWordCharacters$$Plus1; } /** Actually parse xtc.parser.PGrammar.WordCharacters$$Plus1. */ private Result pWordCharacters$$Plus1$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; if ('_' == yyC) { final int yyChoice1 = yyIndex; // Nested alternative 1. yyResult = pWordCharacters$$Plus1(yyChoice1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } // Nested alternative 2. yyValue = null; return new SemanticValue(yyValue, yyChoice1, yyError); } } // Done. yyError = yyError.select("word characters expected", yyStart); return yyError; } // ========================================================================= /** * Parse synthetic nonterminal xtc.parser.PGrammar.WordCharacters$$Star1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pWordCharacters$$Star1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fWordCharacters$$Star1) yyColumn.chunk3.fWordCharacters$$Star1 = pWordCharacters$$Star1$1(yyStart); return yyColumn.chunk3.fWordCharacters$$Star1; } /** Actually parse xtc.parser.PGrammar.WordCharacters$$Star1. */ private Result pWordCharacters$$Star1$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWordCharacters$$Plus1(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyC = character(yyResult.index); if (-1 != yyC) { yyIndex = yyResult.index + 1; if ((('0' <= yyC) && (yyC <= '9')) || (('A' <= yyC) && (yyC <= 'Z')) || (('a' <= yyC) && (yyC <= 'z'))) { yyResult = pWordCharacters$$Star1(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } } } // Alternative 2. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; if ((('0' <= yyC) && (yyC <= '9')) || (('A' <= yyC) && (yyC <= 'Z')) || (('a' <= yyC) && (yyC <= 'z'))) { yyResult = pWordCharacters$$Star1(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } } // Alternative 3. yyValue = null; return new SemanticValue(yyValue, yyStart, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.CharacterLiteral. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pCharacterLiteral(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fCharacterLiteral) yyColumn.chunk3.fCharacterLiteral = pCharacterLiteral$1(yyStart); return yyColumn.chunk3.fCharacterLiteral; } /** Actually parse xtc.parser.PGrammar.CharacterLiteral. */ private Result pCharacterLiteral$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('\'' == yyC) { yyIndex = yyStart + 1; yyResult = pCharacterLiteral$$Choice1(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyC = character(yyResult.index); if ('\'' == yyC) { yyIndex = yyResult.index + 1; yyValue = difference(yyStart, yyIndex); return new SemanticValue(yyValue, yyIndex, yyError); } } } // Done. yyError = yyError.select("character literal expected", yyStart); return yyError; } // ========================================================================= /** * Parse synthetic nonterminal * xtc.parser.PGrammar.CharacterLiteral$$Choice1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pCharacterLiteral$$Choice1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '\\': { final int yyChoice1 = yyIndex; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '\"': case '\'': case '-': case '[': case '\\': case ']': case 'b': case 'f': case 'n': case 'r': case 't': { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } case 'u': { yyResult = pHexQuad(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } break; default: /* No match. */ } } // Nested alternative 2. { // Start scope for nested choice. final int yyChoice2 = yyChoice1; // Nested alternative 1. yyC = character(yyChoice2); if (-1 != yyC) { yyIndex = yyChoice2 + 1; switch (yyC) { case '0': case '1': case '2': case '3': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } } break; default: /* No match. */ } } } break; default: /* No match. */ } } // Nested alternative 2. yyC = character(yyChoice2); if (-1 != yyC) { yyIndex = yyChoice2 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { final int yyChoice3 = yyIndex; // Nested alternative 1. yyC = character(yyChoice3); if (-1 != yyC) { yyIndex = yyChoice3 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } // Nested alternative 2. yyValue = null; return new SemanticValue(yyValue, yyChoice3, yyError); } default: /* No match. */ } } } // End scope for nested choice. } break; default: /* No match. */ } } // Alternative 2. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '\'': case '\\': /* No match. */ break; default: { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } } } // Done. yyError = yyError.select("character literal expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.StringLiteral. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pStringLiteral(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fStringLiteral) yyColumn.chunk3.fStringLiteral = pStringLiteral$1(yyStart); return yyColumn.chunk3.fStringLiteral; } /** Actually parse xtc.parser.PGrammar.StringLiteral. */ private Result pStringLiteral$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('\"' == yyC) { yyIndex = yyStart + 1; yyResult = pStringLiteral$$Star1(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyC = character(yyResult.index); if ('\"' == yyC) { yyIndex = yyResult.index + 1; yyValue = difference(yyStart, yyIndex); return new SemanticValue(yyValue, yyIndex, yyError); } } } // Done. yyError = yyError.select("string literal expected", yyStart); return yyError; } // ========================================================================= /** * Parse synthetic nonterminal xtc.parser.PGrammar.StringLiteral$$Star1. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pStringLiteral$$Star1(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fStringLiteral$$Star1) yyColumn.chunk3.fStringLiteral$$Star1 = pStringLiteral$$Star1$1(yyStart); return yyColumn.chunk3.fStringLiteral$$Star1; } /** Actually parse xtc.parser.PGrammar.StringLiteral$$Star1. */ private Result pStringLiteral$$Star1$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pEscapeSequence(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyResult = pStringLiteral$$Star1(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } // Alternative 2. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '\"': case '\\': /* No match. */ break; default: { yyResult = pStringLiteral$$Star1(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } } } // Alternative 3. yyValue = null; return new SemanticValue(yyValue, yyStart, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.ClassChar. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pClassChar(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk3) yyColumn.chunk3 = new Chunk3(); if (null == yyColumn.chunk3.fClassChar) yyColumn.chunk3.fClassChar = pClassChar$1(yyStart); return yyColumn.chunk3.fClassChar; } /** Actually parse xtc.parser.PGrammar.ClassChar. */ private Result pClassChar$1(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '\\': { final int yyChoice1 = yyIndex; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '\"': case '\'': case '-': case '[': case '\\': case ']': case 'b': case 'f': case 'n': case 'r': case 't': { yyValue = difference(yyStart, yyIndex); return new SemanticValue(yyValue, yyIndex, yyError); } case 'u': { yyResult = pHexQuad(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = difference(yyStart, yyResult.index); return yyResult.createValue(yyValue, yyError); } } break; default: /* No match. */ } } // Nested alternative 2. { // Start scope for nested choice. final int yyChoice2 = yyChoice1; // Nested alternative 1. yyC = character(yyChoice2); if (-1 != yyC) { yyIndex = yyChoice2 + 1; switch (yyC) { case '0': case '1': case '2': case '3': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyValue = difference(yyStart, yyIndex); return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } } break; default: /* No match. */ } } } break; default: /* No match. */ } } // Nested alternative 2. yyC = character(yyChoice2); if (-1 != yyC) { yyIndex = yyChoice2 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { final int yyChoice3 = yyIndex; // Nested alternative 1. yyC = character(yyChoice3); if (-1 != yyC) { yyIndex = yyChoice3 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyValue = difference(yyStart, yyIndex); return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } // Nested alternative 2. yyValue = difference(yyStart, yyChoice3); return new SemanticValue(yyValue, yyChoice3, yyError); } default: /* No match. */ } } } // End scope for nested choice. } break; default: /* No match. */ } } // Alternative 2. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '-': case '\\': case ']': /* No match. */ break; default: { yyValue = difference(yyStart, yyIndex); return new SemanticValue(yyValue, yyIndex, yyError); } } } // Done. yyError = yyError.select("class char expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.EscapeSequence. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pEscapeSequence(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; if ('\\' == yyC) { final int yyChoice1 = yyIndex; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '\"': case '\'': case '-': case '[': case '\\': case ']': case 'b': case 'f': case 'n': case 'r': case 't': { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } case 'u': { yyResult = pHexQuad(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = null; return yyResult.createValue(yyValue, yyError); } } break; default: /* No match. */ } } // Nested alternative 2. { // Start scope for nested choice. final int yyChoice2 = yyChoice1; // Nested alternative 1. yyC = character(yyChoice2); if (-1 != yyC) { yyIndex = yyChoice2 + 1; switch (yyC) { case '0': case '1': case '2': case '3': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } } break; default: /* No match. */ } } } break; default: /* No match. */ } } // Nested alternative 2. yyC = character(yyChoice2); if (-1 != yyC) { yyIndex = yyChoice2 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { final int yyChoice3 = yyIndex; // Nested alternative 1. yyC = character(yyChoice3); if (-1 != yyC) { yyIndex = yyChoice3 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } // Nested alternative 2. yyValue = null; return new SemanticValue(yyValue, yyChoice3, yyError); } default: /* No match. */ } } } // End scope for nested choice. } } // Done. yyError = yyError.select("escape sequence expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.IntegerLiteral. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pIntegerLiteral(final int yyStart) throws IOException { int yyC; int yyIndex; int yyRepetition1; boolean yyRepeated1; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '0': { final int yyChoice1 = yyIndex; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case 'X': case 'x': { yyRepetition1 = yyIndex; yyRepeated1 = false; while (true) { yyC = character(yyRepetition1); if (-1 != yyC) { yyIndex = yyRepetition1 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': { yyRepetition1 = yyIndex; yyRepeated1 = true; continue; } default: /* No match. */ } } break; } if (yyRepeated1) { yyValue = difference(yyStart, yyRepetition1); return new SemanticValue(yyValue, yyRepetition1, yyError); } } break; default: /* No match. */ } } // Nested alternative 2. yyRepetition1 = yyChoice1; yyRepeated1 = false; while (true) { yyC = character(yyRepetition1); if (-1 != yyC) { yyIndex = yyRepetition1 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { yyRepetition1 = yyIndex; yyRepeated1 = true; continue; } default: /* No match. */ } } break; } if (yyRepeated1) { yyValue = difference(yyStart, yyRepetition1); return new SemanticValue(yyValue, yyRepetition1, yyError); } // Nested alternative 3. yyValue = "0"; return new SemanticValue(yyValue, yyChoice1, yyError); } case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { yyRepetition1 = yyIndex; while (true) { yyC = character(yyRepetition1); if (-1 != yyC) { yyIndex = yyRepetition1 + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { yyRepetition1 = yyIndex; continue; } default: /* No match. */ } } break; } yyValue = difference(yyStart, yyRepetition1); return new SemanticValue(yyValue, yyRepetition1, yyError); } default: /* No match. */ } } // Done. yyError = yyError.select("integer literal expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.HexQuad. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pHexQuad(final int yyStart) throws IOException { int yyC; int yyIndex; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': { yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } } break; default: /* No match. */ } } } break; default: /* No match. */ } } } break; default: /* No match. */ } } // Done. yyError = yyError.select("hex quad expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Symbol. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSymbol(final int yyStart) throws IOException { PParserColumn yyColumn = (PParserColumn)column(yyStart); if (null == yyColumn.chunk4) yyColumn.chunk4 = new Chunk4(); if (null == yyColumn.chunk4.fSymbol) yyColumn.chunk4.fSymbol = pSymbol$1(yyStart); return yyColumn.chunk4.fSymbol; } /** Actually parse xtc.parser.PGrammar.Symbol. */ private Result pSymbol$1(final int yyStart) throws IOException { Result yyResult; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pSymbolCharacters(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyValue = yyResult.semanticValue(); yyResult = pSpacing(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { return yyResult.createValue(yyValue, yyError); } } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.SymbolCharacters. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSymbolCharacters(final int yyStart) throws IOException { int yyC; int yyIndex; boolean yyPredMatched; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '.': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; if ('.' == yyC) { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; if ('.' == yyC) { yyValue = "..."; return new SemanticValue(yyValue, yyIndex, yyError); } } } } } break; case '+': { final int yyChoice1 = yyIndex; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; if ('=' == yyC) { yyValue = "+="; return new SemanticValue(yyValue, yyIndex, yyError); } } // Nested alternative 2. yyValue = "+"; return new SemanticValue(yyValue, yyChoice1, yyError); } case '-': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; if ('=' == yyC) { yyValue = "-="; return new SemanticValue(yyValue, yyIndex, yyError); } } } break; case ':': { final int yyChoice1 = yyIndex; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; if ('=' == yyC) { yyValue = ":="; return new SemanticValue(yyValue, yyIndex, yyError); } } // Nested alternative 2. yyValue = ":"; return new SemanticValue(yyValue, yyChoice1, yyError); } case ',': { yyValue = ","; return new SemanticValue(yyValue, yyIndex, yyError); } case '=': { yyValue = "="; return new SemanticValue(yyValue, yyIndex, yyError); } case '/': { yyPredMatched = false; yyC = character(yyIndex); if (-1 != yyC) { if (('*' == yyC) || ('/' == yyC)) { yyPredMatched = true; } } if (! yyPredMatched) { yyValue = "/"; return new SemanticValue(yyValue, yyIndex, yyError); } else { yyError = yyError.select("symbol characters expected", yyStart); } } break; case '&': { yyValue = "&"; return new SemanticValue(yyValue, yyIndex, yyError); } case '!': { yyValue = "!"; return new SemanticValue(yyValue, yyIndex, yyError); } case '^': { yyValue = "^"; return new SemanticValue(yyValue, yyIndex, yyError); } case '?': { yyValue = "?"; return new SemanticValue(yyValue, yyIndex, yyError); } case '*': { yyValue = "*"; return new SemanticValue(yyValue, yyIndex, yyError); } case '(': { yyValue = "("; return new SemanticValue(yyValue, yyIndex, yyError); } case ')': { yyValue = ")"; return new SemanticValue(yyValue, yyIndex, yyError); } case ';': { yyValue = ";"; return new SemanticValue(yyValue, yyIndex, yyError); } case '<': { yyValue = "<"; return new SemanticValue(yyValue, yyIndex, yyError); } case '>': { yyValue = ">"; return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } // Done. yyError = yyError.select("symbol characters expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.Spacing. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSpacing(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; boolean yyPredMatched; int yyBase; int yyRepetition1; int yyRepetition2; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyRepetition1 = yyStart; while (true) { final int yyChoice1 = yyRepetition1; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case ' ': { yyRepetition1 = yyIndex; continue; } case '\t': { yyRepetition1 = yyIndex; continue; } case '\f': { yyRepetition1 = yyIndex; continue; } case '\r': { final int yyChoice2 = yyIndex; // Nested alternative 1. yyC = character(yyChoice2); if ('\n' == yyC) { yyIndex = yyChoice2 + 1; yyState.newline(); yyRepetition1 = yyIndex; continue; } // Nested alternative 2. yyState.newline(); yyRepetition1 = yyChoice2; continue; } case '\n': { yyState.newline(); yyRepetition1 = yyIndex; continue; } case '/': { yyC = character(yyIndex); if (-1 != yyC) { yyIndex = yyIndex + 1; switch (yyC) { case '*': { yyPredMatched = false; yyC = character(yyIndex); if ('*' == yyC) { yyPredMatched = true; } if (! yyPredMatched) { yyResult = pCommentBody(yyIndex); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyBase = yyResult.index; yyC = character(yyBase); if ('*' == yyC) { yyIndex = yyResult.index + 1; yyC = character(yyIndex); if ('/' == yyC) { yyIndex = yyIndex + 1; yyRepetition1 = yyIndex; continue; } else { yyError = yyError.select("'*/' expected", yyBase); } } else { yyError = yyError.select("'*/' expected", yyBase); } } } else { yyError = yyError.select("spacing expected", yyStart); } } break; case '/': { yyRepetition2 = yyIndex; while (true) { yyC = character(yyRepetition2); if (-1 != yyC) { yyIndex = yyRepetition2 + 1; switch (yyC) { case '\n': case '\r': /* No match. */ break; default: { yyRepetition2 = yyIndex; continue; } } } break; } yyResult = pLineTerminator(yyRepetition2); yyError = yyResult.select(yyError, yyRepetition1); if (yyResult.hasValue()) { yyRepetition1 = yyResult.index; continue; } } break; default: /* No match. */ } } } break; default: /* No match. */ } } break; } yyValue = null; return new SemanticValue(yyValue, yyRepetition1, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.SimpleSpacing. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pSimpleSpacing(final int yyStart) throws IOException { int yyC; int yyIndex; int yyRepetition1; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyRepetition1 = yyStart; while (true) { final int yyChoice1 = yyRepetition1; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '\r': { final int yyChoice2 = yyIndex; // Nested alternative 1. yyC = character(yyChoice2); if ('\n' == yyC) { yyIndex = yyChoice2 + 1; yyState.newline(); yyRepetition1 = yyIndex; continue; } // Nested alternative 2. yyState.newline(); yyRepetition1 = yyChoice2; continue; } case '\n': { yyState.newline(); yyRepetition1 = yyIndex; continue; } case ' ': { yyRepetition1 = yyIndex; continue; } case '\t': { yyRepetition1 = yyIndex; continue; } case '\f': { yyRepetition1 = yyIndex; continue; } default: /* No match. */ } } break; } yyValue = null; return new SemanticValue(yyValue, yyRepetition1, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.CodeComment. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pCodeComment(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; int yyBase; String yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('/' == yyC) { yyIndex = yyStart + 1; yyC = character(yyIndex); if ('*' == yyC) { yyIndex = yyIndex + 1; yyC = character(yyIndex); if ('*' == yyC) { yyIndex = yyIndex + 1; yyResult = pCommentBody(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyBase = yyResult.index; yyC = character(yyBase); if ('*' == yyC) { yyIndex = yyResult.index + 1; yyC = character(yyIndex); if ('/' == yyC) { yyIndex = yyIndex + 1; yyValue = difference(yyStart, yyIndex); return new SemanticValue(yyValue, yyIndex, yyError); } else { yyError = yyError.select("'*/' expected", yyBase); } } else { yyError = yyError.select("'*/' expected", yyBase); } } } } } // Done. yyError = yyError.select("code comment expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.TraditionalComment. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pTraditionalComment(final int yyStart) throws IOException { int yyC; int yyIndex; Result yyResult; boolean yyPredMatched; int yyBase; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if ('/' == yyC) { yyIndex = yyStart + 1; yyC = character(yyIndex); if ('*' == yyC) { yyIndex = yyIndex + 1; yyPredMatched = false; yyC = character(yyIndex); if ('*' == yyC) { yyPredMatched = true; } if (! yyPredMatched) { yyResult = pCommentBody(yyIndex); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { yyBase = yyResult.index; yyC = character(yyBase); if ('*' == yyC) { yyIndex = yyResult.index + 1; yyC = character(yyIndex); if ('/' == yyC) { yyIndex = yyIndex + 1; yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } else { yyError = yyError.select("'*/' expected", yyBase); } } else { yyError = yyError.select("'*/' expected", yyBase); } } } else { yyError = yyError.select("traditional comment expected", yyStart); } } } // Done. yyError = yyError.select("traditional comment expected", yyStart); return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.EndOfFile. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pEndOfFile(final int yyStart) throws IOException { int yyC; boolean yyPredMatched; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyPredMatched = false; yyC = character(yyStart); if (-1 != yyC) { yyPredMatched = true; } if (! yyPredMatched) { yyValue = null; return new SemanticValue(yyValue, yyStart, yyError); } else { yyError = yyError.select("end of file expected", yyStart); } // Done. return yyError; } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.CommentBody. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pCommentBody(final int yyStart) throws IOException { int yyC; int yyIndex; boolean yyPredMatched; int yyRepetition1; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyRepetition1 = yyStart; while (true) { final int yyChoice1 = yyRepetition1; // Nested alternative 1. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '*': { yyPredMatched = false; yyC = character(yyIndex); if ('/' == yyC) { yyPredMatched = true; } if (! yyPredMatched) { yyRepetition1 = yyIndex; continue; } else { yyError = yyError.select("comment body expected", yyStart); } } break; case '\r': { final int yyChoice2 = yyIndex; // Nested alternative 1. yyC = character(yyChoice2); if ('\n' == yyC) { yyIndex = yyChoice2 + 1; yyState.newline(); yyRepetition1 = yyIndex; continue; } // Nested alternative 2. yyState.newline(); yyRepetition1 = yyChoice2; continue; } case '\n': { yyState.newline(); yyRepetition1 = yyIndex; continue; } default: /* No match. */ } } // Nested alternative 2. yyC = character(yyChoice1); if (-1 != yyC) { yyIndex = yyChoice1 + 1; switch (yyC) { case '\n': case '\r': case '*': /* No match. */ break; default: { yyRepetition1 = yyIndex; continue; } } } break; } yyValue = null; return new SemanticValue(yyValue, yyRepetition1, yyError); } // ========================================================================= /** * Parse nonterminal xtc.parser.PGrammar.LineTerminator. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */ private Result pLineTerminator(final int yyStart) throws IOException { int yyC; int yyIndex; Void yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyC = character(yyStart); if (-1 != yyC) { yyIndex = yyStart + 1; switch (yyC) { case '\r': { final int yyChoice1 = yyIndex; // Nested alternative 1. yyC = character(yyChoice1); if ('\n' == yyC) { yyIndex = yyChoice1 + 1; yyState.newline(); yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } // Nested alternative 2. yyState.newline(); yyValue = null; return new SemanticValue(yyValue, yyChoice1, yyError); } case '\n': { yyState.newline(); yyValue = null; return new SemanticValue(yyValue, yyIndex, yyError); } default: /* No match. */ } } // Done. yyError = yyError.select("line terminator expected", yyStart); return yyError; } }
class Test14 { public static void main(String[] args) { int x = (x=2)*2; System.out.println(x); int two = 2, three = two + 1; System.out.println(three); } }
/** * PTP 2018 BeepBoop - the RobotGame * * Grants access to all Tiles used in BeepBoop. Use getInstance() to get access * to the singleton instance of TileFactory. TileFactory holds each TileId as a * public static int. * * @author ptp18-d06(Pawel Rasch, Tim Runge) * */ public class TileFactory { // ground tile ids, declared as public to facilitate development // Therefore Javadoc is not included for these public static final int GRASS_0 = 1; public static final int GRASS_1 = 2; public static final int GRASS_2 = 3; public static final int EARTH_0 = 4; public static final int EARTH_1 = 5; public static final int EARTH_2 = 6; public static final int ROCK_0 = 7; public static final int ROCK_1 = 8; public static final int ROCK_2 = 9; // ground tile type offsets public static final int GRASS_OFFSET = 1; public static final int EARTH_OFFSET = 4; public static final int ROCK_OFFSET = 7; // ressource tile ids public static final int COPPER = 10; public static final int GOLD = 11; public static final int IRON = 12; public static final int PLATINUM = 13; public static final int SILICON = 14; // player and terminal tile ids public static final int PLAYER = 15; public static final int TERMINAL = 16; // robot tile ids public static final int ROBOT_0 = 17; // imageless tile id public static final int NULL_TILE = 0; private static HashMap<Integer, Tile> tiles = new HashMap<Integer, Tile>(); private static TileFactory tileFactory = null; // private constructor, prevents instantiation, loads ground tiles private TileFactory() { loadTiles(); } /** * Returns the instance of the TileFactory singleton, creates it if * necessary. * * @return the singleton TileFactory instance */ public static TileFactory getInstance() { if (TileFactory.tileFactory == null) { TileFactory.tileFactory = new TileFactory(); } return TileFactory.tileFactory; } // preloads all tiles for later use private void loadTiles() { // #######GRASS###### tiles.put(GRASS_0, new BmpTile("grass0", true)); tiles.put(GRASS_1, new BmpTile("grass1", true)); tiles.put(GRASS_2, new BmpTile("grass2", true)); // #######EARTH###### tiles.put(EARTH_0, new BmpTile("earth0", true)); tiles.put(EARTH_1, new BmpTile("earth1", true)); tiles.put(EARTH_2, new BmpTile("earth2", true)); // #######ROCK###### tiles.put(ROCK_0, new BmpTile("rock0", false)); tiles.put(ROCK_1, new BmpTile("rock1", false)); tiles.put(ROCK_2, new BmpTile("rock2", false)); // ######RESOURCES###### tiles.put(COPPER, new BmpTile("copper", false)); tiles.put(GOLD, new BmpTile("gold", false)); tiles.put(IRON, new BmpTile("iron", false)); tiles.put(PLATINUM, new BmpTile("platinum", false)); tiles.put(SILICON, new BmpTile("silicon", false)); // ######misc###### tiles.put(PLAYER, new BmpTile("player", false)); tiles.put(ROBOT_0, new BmpTile("robot0", false)); tiles.put(TERMINAL, new BmpTile("terminal", false)); tiles.put(NULL_TILE, null); } /** * Returns the tile with the specified Id. Tile Ids can be accessed via the * static fields of TileFactory. * * @param tileId * the Id of the desired tile * @return the desired tile */ public Tile get(int tileId) { return tiles.get(tileId); } /** * Maps a Resource name to the corresponding Tile * * @param name * an element of {"copper", "gold", "iron", "platinum", * "silicon"} * @return the Tile ID of the Resource, or the ID of the null Tile, if the * argument was invalid */ public static int getTileIdForResource(String name) { int id = NULL_TILE; switch (name.toLowerCase()) { case "copper": id = COPPER; break; case "gold": id = GOLD; break; case "iron": id = IRON; break; case "platinum": id = PLATINUM; break; case "silicon": id = SILICON; break; } return id; } }
<reponame>NetheriteServer/Netherite declare module "secure-json-parse-deno/mod.js" { function secureJsonParse(data: string): any; // @ts-ignore export const parse = secureJsonParse; }
import path from 'path' import Umzug from 'umzug' import { loggingFactory } from '../logger' import { Sequelize } from 'sequelize' const logger = loggingFactory('db:migration') export class Migration { private umzugIns: Umzug.Umzug constructor (sequelize: Sequelize) { this.umzugIns = new Umzug({ storage: 'sequelize', logging: logger.info, storageOptions: { sequelize }, migrations: { path: path.resolve(__dirname, './scripts'), params: [sequelize.getQueryInterface(), sequelize], pattern: /^\d+[\w-]+\.(ts|js)$/ } }) } // eslint-disable-next-line require-await async up (options?: string | string[] | Umzug.UpToOptions | Umzug.UpDownMigrationsOptions): Promise<Umzug.Migration[]> { return this.umzugIns.up(options as any) } // eslint-disable-next-line require-await async down (options?: string | string[] | Umzug.DownToOptions | Umzug.UpDownMigrationsOptions): Promise<Umzug.Migration[]> { return this.umzugIns.down(options as any) } get on (): (eventName: ('migrating' | 'reverting' | 'migrated' | 'reverted'), cb?: (name: string, migration: Umzug.Migration) => void) => Umzug.Umzug { return this.umzugIns.on } // eslint-disable-next-line require-await async pending (): Promise<Umzug.Migration[]> { return this.umzugIns.pending().catch(e => { if (e.code === 'ENOENT') return [] return Promise.reject(e) }) } // eslint-disable-next-line require-await async executed (): Promise<Umzug.Migration[]> { return this.umzugIns.executed() } }
/** Tell card driver to stop this queue. */ static void idc_terminate_queue(void) { errval_t r; INITDEBUG("idc_terminate_queue()\n"); if (!standalone) { USER_PANIC("Terminating monolithic driver is not a good idea"); } r = e10k_terminate_queue__tx(binding, NOP_CONT, qi); assert(err_is_ok(r)); }
Share. You shall ask, Tequila Works shall answer. You shall ask, Tequila Works shall answer. After a long slumber, promising puzzle-adventure game Rime has reawakened with our month of IGN First coverage. Now that you know what the game is (click that link if you need a refersher!), we're capping the month off by giving you a chance to ask the developers at Tequila Works any question you have about the game. As promised, here are the answers to your best questions! CaleonesKarma asks: How is development on the Switch and would you have any future game release on the console? The Nintendo Switch version is being developed by Tantalus, the studio behind the HD version of The Legend of Zelda: Twilight Princess. We worked side by side with them to ensure the Nintendo Switch version is absolutely true to the original vision we have for RiME, and they are doing a terrific work indeed! We are really enthusiastic about Nintendo Switch and we are immensely happy about releasing RiME on that console, but it’s too early to discuss future developments. Let’s finish RiME first! :) Joshmas asks: How long is the game? The game length can vary significantly from player to player. It will largely depend on how much you want to explore the island and unveil all of its secrets (and we wholeheartedly recommend you do precisely that, as they serve to help flesh out the story) or just speedily progress through the game. On average, RiME can take anywhere from 8-10 hours for one play through, but it can also be a couple hours fewer or more. It really does come down to each player and their playstyle. DougalsMD asks: Will the Nintendo Switch version release at the same time as the PS4, Xbox One, etc.? There is still a lot of development to take into consideration, but we’re working hard to get the Nintendo Switch version ready for release at the same time as the PlayStation 4, Xbox One and Windows PC versions in May. We'll have more info to share in the months to come. chisberry asks: Will there be a physical release for Switch? We’re currently working with Grey Box to check on the production pricing for cartridge Switch releases. We’ll ideally have additional information before long. wwwarea asks: In a Chapter, is it possible to backtrack to previous areas? I saw gameplay but saw someone jump off a rock high up to reach the fox again and looked like you can't go back maybe. Good eye! We have tried to design the island as an open space as possible. We have hidden a good number of secrets in each area, encouraging players to explore the environment, but bear in mind that RiME is not a sandbox. There are several places in the game where you won’t be able to backtrack unless you start a new game. You can always have a second run if you want to get all the Trophies/Achievements! Theoquarius asks: Who came up with the name of the game? What was it inspired by? Originally we wanted to call it Siren but it was already in use, so we brainstormed hundreds of different names that transmitted what RiME was. Most of them were awful, really awful. It was Bradley Crooks who came up with the name "RiME" as in "Rime of the Ancient Mariner", the 18th Century poem by Samuel Taylor Coleridge. (Or maybe you are more familiar with the Iron Maiden version…) aandiorio4 asks: What was the name of the song you used in the most recent trailer? I NEEEEEED it! It is a beautiful theme, isn’t it? It’s called "The Birth of an Island," by the incredibly talented David García! David composed the music for RiME, so if you enjoyed the trailer music, you’ll find that you’ll enjoy the many tracks in the game. Tha_Reaper_84 asks: I feel that single player games should come back. Was really looking forward to this with PS4. Glad it's still being made. The soundtrack, will it be sold separately? Also physical copy of the game? Collector’s edition at all? We can confirm that RiME will get physical editions for PlayStation 4 and Xbox One. There are no plans for a Collector’s Edition. As for the soundtrack, we are hoping to release it in the future – in our opinion, David García’s work is nothing short of a masterpiece – but we cannot confirm any specific release quite yet. darikooistra asks: The world you've built looks gorgeous. What are you doing to ensure it doesn't feel empty? Filling it with emotions: life, wildlife is essential when you start this new, unknown world. Knowing you are not alone is essential. And if we give you a world to explore, then there must be rewards for that exploration! There are lots of secrets, collectibles, puzzles even unique moments for you to discover.
Brazilian President Luis Inacio Lula da Silva signed the “Statute of Racial Equality.” But he was the first to recognize that in achieving this legislative landmark “we didn’t get everything that we wanted” and “there is much yet to do.” The statute, whose passage required a fight lasting ten years, recognizes the fact that Brazil is a multi-racial and multi-ethnic state in which Brazilians of African descent who have been the victim of four hundred years of slavery as well as pervasive historical current patterns of racial discrimination. In a statement in support of the law, Senator Paulo Paim of President Lula’s own Workers’ Party said, “Last year, research institutes connected with the federal government indicated that Blacks are the poorest, the least educated, are those who when employed receive the lowest wages and who are the overwhelming majority of workers pushed into informal employment and unemployment…the proportion of Blacks below the poverty line is 50%, while among whites it is 25%…” The senator went on to show that these differences show up in social indices such as life expectancy and other things. To remedy this, the law creates a number of new measures: A new social action agency called Sinapir (Portuguese anagram for National System for the Promotion of Racial Equality). A requirement that schools at all levels include detailed instruction on the history and culture of the Africans in Africa and Brazil and their descendents, especially but not only in the teaching of history. Prohibition of racial and ethnic discrimination including with regard to access to any public or private resource or benefit. Protection of Afro-Brazilian religious beliefs, practices and religions (such as Candomble, Macumba, Umbanda and others), as a matter of freedom of religion. Recognition of the descendents of escaped slave “quilombola” communities and financial help for them. Recognition of Afro-Brazilian kick-boxing, called “capoeira” as an official sport worthy of receiving government support. Lula however pitched into the right wing opposition for having gone to court to block sections of the law that would have established affirmative action quotas in jobs, education and television programming. Opposition to the law was based on the argument, which will be very recognizable to people in the United States, that there is no official racism in Brazil any more, so the law was not needed. This is what Lula was referring to by “much yet to be done”. The total population of Brazil is about 192 million. Of these, the 2000 census identified about 6% as Black, but another 38% as “pardos” (mixed race), who would certainly be considered African Americans if they lived in the United States. Slavery was abolished in Brazil only in 1888, after numerous slave rebellions, but economic, political, social and military elites connected to the slave system continued to have power in the society for a long time afterward, and continued to promote both legal and de-facto racial oppression. Lula also signed legislation authorizing the creation of a “Federal University for Luso-Afro-Brazilian Integration” in the Eastern state of Ceará. This university, which is to be ready in 2011, is intended to bring together Brazilian students and faculty and those from African countries, especially those which were former Portuguese colonies — Angola, Mozambique, Cape Verde, and Sao Tome and Principe. The promotion and passage of these two laws comes has, like everything else, its national and international context. The national context is the Brazilian national election scheduled for October 3. The presidential candidate of Lula’s Workers’ Party and its allies, former energy minister Dilma Rousseff, as well as the left generally, hope to mobilize poor Brazilians who are often left out of national electoral politics; the Racial Equality Statute is just one of numerous progressive social policy initiatives related to this aim. But Brazil also has been reaching out to African countries to form new trading partnerships and diplomatic alliances; the recognition of the huge role that African people played, and keep on playing, in Brazilian history and society certainly can’t harm these efforts. Photo: (Agência Brasil/CC)
/* * Copyright (c) 2013 Battelle Memorial Institute * Licensed under modified BSD License. A copy of this license can be found * in the LICENSE file in the top level directory of this distribution. */ // ------------------------------------------------------------- /** * @file se_app_module.cpp * @author Yousu Chen, Bruce Palmer * @date 2014-09-18 12:27:18 d3g096 * Last updated: 8/5/2014 * * @brief * * */ // ------------------------------------------------------------- #include "gridpack/configuration/configuration.hpp" #include "gridpack/serial_io/serial_io.hpp" #include "gridpack/parser/PTI23_parser.hpp" #include "gridpack/parser/PTI33_parser.hpp" #include "gridpack/mapper/full_map.hpp" #include "gridpack/mapper/gen_matrix_map.hpp" #include "gridpack/mapper/gen_vector_map.hpp" #include "gridpack/mapper/bus_vector_map.hpp" #include "gridpack/math/math.hpp" #include "se_app_module.hpp" // Calling program for state estimation application /** * Basic constructor */ gridpack::state_estimation::SEAppModule::SEAppModule(void) { } /** * Basic destructor */ gridpack::state_estimation::SEAppModule::~SEAppModule(void) { } /** * Get list of measurements from external file * @param cursor pointer to contingencies in input deck * @return vector of measurements */ std::vector<gridpack::state_estimation::Measurement> gridpack::state_estimation::SEAppModule::getMeasurements( gridpack::utility::Configuration::ChildCursors measurements) { std::vector<gridpack::state_estimation::Measurement> ret; if (p_comm.rank() == 0) { int size = measurements.size(); int idx; for (idx = 0; idx < size; idx++) { std::string meas_type; measurements[idx]->get("Type", &meas_type); double meas_value; measurements[idx]->get("Value", &meas_value); double meas_deviation; measurements[idx]->get("Deviation", &meas_deviation); if (meas_type == "VM" || meas_type == "PI" || meas_type == "PJ" || meas_type == "QI" || meas_type == "QJ" || meas_type == "VA") { int busid; measurements[idx]->get("Bus", &busid); gridpack::state_estimation::Measurement measurement; strcpy(measurement.p_type,meas_type.c_str()); measurement.p_busid = busid; measurement.p_value = meas_value; measurement.p_deviation = meas_deviation; //printf("%s %d %f %f\n", measurement.p_type.c_str(), measurement.p_busid, // measurement.p_value, measurement.p_deviation); ret.push_back(measurement); } else if (meas_type == "PIJ" || meas_type == "PJI" || meas_type == "QIJ" || meas_type == "QJI" || meas_type == "IIJ" || meas_type == "IJI") { int fbusid; measurements[idx]->get("FromBus", &fbusid); int tbusid; measurements[idx]->get("ToBus", &tbusid); std::string ckt; measurements[idx]->get("CKT", &ckt); // Fix up tag so that single character tags are right-justified if (ckt.length() == 1) { ckt.insert(0,1,' '); } gridpack::state_estimation::Measurement measurement; strcpy(measurement.p_type,meas_type.c_str()); measurement.p_fbusid = fbusid; measurement.p_tbusid = tbusid; strcpy(measurement.p_ckt,ckt.c_str()); measurement.p_value = meas_value; measurement.p_deviation = meas_deviation; //printf("%s %d %d %s %f %f\n", measurement.p_type.c_str(), measurement.p_fbusid, // measurement.p_tbusid, measurement.p_ckt.c_str(), measurement.p_value, // measurement.p_deviation); ret.push_back(measurement); } } } return ret; } enum Parser{PTI23, PTI33}; /** * Read in and partition the network. The input file is read * directly from the state_estimation block in the configuration file so no * external file names or parameters need to be passed to this routine * @param network pointer to a SENetwork object. This should not have any * buses or branches defined on it. * @param config pointer to open configuration file */ void gridpack::state_estimation::SEAppModule::readNetwork( boost::shared_ptr<SENetwork> &network, gridpack::utility::Configuration *config) { gridpack::parallel::Communicator p_comm; p_network = network; p_config = config; p_comm = network->communicator(); gridpack::utility::Configuration::CursorPtr cursor, secursor; secursor = config->getCursor("Configuration.State_estimation"); std::string filename; int filetype = PTI23; if (!secursor->get("networkConfiguration",&filename)) { if (secursor->get("networkConfiguration_v33",&filename)) { filetype = PTI33; } else { printf("No network configuration file specified\n"); return; } } // Convergence and iteration parameters p_tolerance = secursor->get("tolerance",1.0e-3); p_max_iteration = secursor->get("maxIteration",20); // load input file //gridpack::parser::PTI23_parser<SENetwork> parser(p_network); //parser.parse(filename.c_str()); double phaseShiftSign = secursor->get("phaseShiftSign",1.0); if (filetype == PTI23) { gridpack::parser::PTI23_parser<SENetwork> parser(network); parser.parse(filename.c_str()); if (phaseShiftSign == -1.0) { parser.changePhaseShiftSign(); } } else if (filetype == PTI33) { gridpack::parser::PTI33_parser<SENetwork> parser(network); parser.parse(filename.c_str()); if (phaseShiftSign == -1.0) { parser.changePhaseShiftSign(); } } // partition network p_network->partition(); // Create serial IO object to export data from buses or branches p_busIO.reset(new gridpack::serial_io::SerialBusIO<SENetwork>(1024, p_network)); p_branchIO.reset(new gridpack::serial_io::SerialBranchIO<SENetwork>(1024, p_network)); } /** * Assume that SENetwork already exists and just cache an internal pointer * to it. This routine does not call the partition function. Also read in * simulation parameters from configuration file * @param network pointer to a complete SENetwork object. * @param config pointer to open configuration file */ void gridpack::state_estimation::SEAppModule::setNetwork( boost::shared_ptr<SENetwork> &network, gridpack::utility::Configuration *config) { gridpack::parallel::Communicator p_comm; p_network = network; p_config = config; p_comm = network->communicator(); gridpack::utility::Configuration::CursorPtr cursor, secursor; secursor = p_config->getCursor("Configuration.State_estimation"); // Convergence and iteration parameters p_tolerance = secursor->get("tolerance",1.0e-3); p_max_iteration = secursor->get("maxIteration",20); char buf[128]; sprintf(buf,"Tolerance: %12.4e\n",p_tolerance); p_busIO->header(buf); sprintf(buf,"Maximum number of iterations: %de\n",p_max_iteration); p_busIO->header(buf); // Create serial IO object to export data from buses or branches p_busIO.reset(new gridpack::serial_io::SerialBusIO<SENetwork>(1024, p_network)); p_branchIO.reset(new gridpack::serial_io::SerialBranchIO<SENetwork>(1024, p_network)); } /** * Read branch and bus measurements. These will come from a separate file. * The name of this file comes from the input configuration file. Call this * method after initializing the network. */ void gridpack::state_estimation::SEAppModule::readMeasurements(void) { // Read in measurement file std::string measurementfile; gridpack::utility::Configuration::CursorPtr cursor, secursor; secursor = p_config->getCursor("Configuration.State_estimation"); if (!secursor->get("measurementList", &measurementfile)) { measurementfile = "IEEE14_meas.xml"; } bool ok = p_config->open(measurementfile, p_comm); // get a list of measurements cursor = p_config->getCursor("Measurements"); gridpack::utility::Configuration::ChildCursors measurements; if (cursor) cursor->children(measurements); std::vector<gridpack::state_estimation::Measurement> meas = getMeasurements(measurements); /* if (p_comm.rank() == 0) { int idx; for (idx = 0; idx < meas.size(); idx++) { std::string meas_type = meas[idx].p_type; if (meas_type == "VM" || meas_type == "PI" || meas_type == "QI") { printf("Type: %s\n", meas[idx].p_type); printf("Bus: %d\n", meas[idx].p_busid); printf("Value: %f\n", meas[idx].p_value); printf("Deviation: %f\n", meas[idx].p_deviation); } else if (meas_type == "PIJ" || meas_type == "QIJ") { printf("Type: %s\n", meas[idx].p_type); printf("FromBus: %d\n", meas[idx].p_fbusid); printf("ToBus: %d\n", meas[idx].p_tbusid); printf("CKT: %s\n", meas[idx].p_ckt); printf("Value: %f\n", meas[idx].p_value); printf("Deviation: %f\n", meas[idx].p_deviation); } printf("\n"); } } */ // Add measurements to buses and branches p_factory->setMeasurements(meas); } /** * Set up exchange buffers and other internal parameters and initialize * network components using data from data collection */ void gridpack::state_estimation::SEAppModule::initialize(void) { // create factory p_factory.reset(new gridpack::state_estimation::SEFactoryModule(p_network)); p_factory->load(); // set network components using factory p_factory->setComponents(); // Set up bus data exchange buffers. Need to decide what data needs to be exchanged p_factory->setExchange(); // Create bus data exchange p_network->initBusUpdate(); } /** * Solve the state estimation problem */ void gridpack::state_estimation::SEAppModule::solve(void) { // set YBus components so that you can create Y matrix p_factory->setYBus(); // set some state estimation parameters p_factory->configureSE(); p_factory->setMode(YBus); gridpack::mapper::FullMatrixMap<SENetwork> ybusMap(p_network); boost::shared_ptr<gridpack::math::Matrix> ybus = ybusMap.mapToMatrix(); // p_branchIO->header("\nybus:\n"); // ybus->print(); // Create mapper to push voltage data back onto buses p_factory->setMode(Voltage); gridpack::mapper::BusVectorMap<SENetwork> VMap(p_network); // Create initial version of H Jacobian and estimation vector p_factory->setMode(Jacobian_H); gridpack::mapper::GenMatrixMap<SENetwork> HJacMap(p_network); boost::shared_ptr<gridpack::math::Matrix> HJac = HJacMap.mapToMatrix(); // p_branchIO->header("\nHJac:\n"); // HJac->print(); gridpack::mapper::GenVectorMap<SENetwork> EzMap(p_network); boost::shared_ptr<gridpack::math::Vector> Ez = EzMap.mapToVector(); // Convergence and iteration parameters ComplexType tol; tol = 2.0*p_tolerance; int iter = 0; p_factory->setMode(R_inv); gridpack::mapper::GenMatrixMap<SENetwork> RinvMap(p_network); boost::shared_ptr<gridpack::math::Matrix> Rinv = RinvMap.mapToMatrix(); // Rinv->print(); // Start N-R loop while (real(tol) > p_tolerance && iter < p_max_iteration) { // Form estimation vector p_factory->setMode(Jacobian_H); // printf("Got to HJac\n"); HJacMap.mapToMatrix(HJac); // HJac->print(); // printf("Got to H'\n"); // Form H' boost::shared_ptr<gridpack::math::Matrix> trans_HJac(transpose(*HJac)); // trans_HJac->print(); // printf("Got to Ez\n"); // Build measurement equation EzMap.mapToVector(Ez); // Ez->print(); // printf("Got to Gain\n"); // Form Gain matrix boost::shared_ptr<gridpack::math::Matrix> Gain1(multiply(*trans_HJac, *Rinv)); boost::shared_ptr<gridpack::math::Matrix> Gain(multiply(*Gain1, *HJac)); // Gain->print(); // printf("Got to H'*Rinv\n"); // Form right hand side vector boost::shared_ptr<gridpack::math::Matrix> HTR(multiply(*trans_HJac, *Rinv)); // HTR->print(); // printf("Got to RHS\n"); // printf("HTR iDim: %d jDim: %d Ez len: %d\n",HTR->rows(),HTR->cols(),Ez->size()); boost::shared_ptr<gridpack::math::Vector> RHS(multiply(*HTR, *Ez)); // printf("Create Solver\n"); // RHS->print(); // printf("Got to Solver\n"); // create a linear solver gridpack::utility::Configuration::CursorPtr cursor; cursor = p_config->getCursor("Configuration.State_estimation"); gridpack::math::LinearSolver solver(*Gain); solver.configure(cursor); p_busIO->header("\n Print Gain matrix\n"); // Gain->print(); // Gain->save("gain.txt"); // printf("Got to DeltaX\n"); // Solve linear equation boost::shared_ptr<gridpack::math::Vector> X(RHS->clone()); // printf("Got to Solve\n"); p_busIO->header("\n Print RHS vector\n"); // RHS->print(); X->zero(); //might not need to do this solver.solve(*RHS, *X); // X->print(); // printf("Got to updateBus\n"); // boost::shared_ptr<gridpack::math::Vector> X(solver.solve(*RHS)); tol = X->normInfinity(); char ioBuf[128]; sprintf(ioBuf,"\nIteration %d Tol: %12.6e\n",iter+1,real(tol)); p_busIO->header(ioBuf); // Push solution back onto bus variables p_factory->setMode(Voltage); // VMap.mapToBus(X); // update values p_network->updateBuses(); // printf("Last sentence\n"); iter++; // End N-R loop } } /** * Write final results of state estimation calculation to standard * output */ void gridpack::state_estimation::SEAppModule::write(void) { // gridpack::serial_io::SerialBranchIO<SENetwork> p_branchIO(512,p_network); // p_branchIO->header("\n Branch Power Flow\n"); // p_branchIO->header("\n Bus 1 Bus 2 CKT P" // " Q\n"); // p_branchIO->write(); p_busIO->header("\n State Estimation Outputs\n"); p_busIO->header("\n Bus Number Phase Angle Voltage Magnitude\n"); p_busIO->write(); p_branchIO->header("\n Branch Power Flow (p.u.)\n"); p_branchIO->header("\n Bus 1 Bus 2 P Q\n"); p_branchIO->write(); p_busIO->header("\n Comparison of Bus Measurements and Estimations\n"); p_busIO->header("\n Type Bus Number Measurement Estimate" " Difference Deviation\n"); p_busIO->write("se"); p_branchIO->header("\n Comparison of Branch Measurements and Estimations\n"); p_branchIO->header("\n Type From To CKT Measurement Estimate" " Difference Deviation\n"); p_branchIO->write("se"); // Output } /** * Save results of state estimation calculation to data collection objects */ void gridpack::state_estimation::SEAppModule::saveData(void) { p_factory->saveData(); }
// decode code to try to decode secret? copied from somewhere to help.. func decode(t *testing.T, data []byte) (runtime.Object, metav1.Object, error) { decoder := scheme.Codecs.UniversalDecoder(corev1.SchemeGroupVersion) r, _, err := decoder.Decode(data, nil, nil) if err != nil { return nil, nil, err } obj, err := meta.Accessor(r) if err != nil { return nil, nil, err } return r, obj, nil }
def autodoc_process_docstring(app, what, name, obj, options, lines): if not isinstance(obj, zope.interface.interface.InterfaceClass): return constructor = obj.get("__init__") if not constructor: return obj.__init__ = constructor return
/** * check_corruption - check the data area of PEB. * @ubi: UBI device description object * @vid_hdr: the (corrupted) VID header of this PEB * @pnum: the physical eraseblock number to check * * This is a helper function which is used to distinguish between VID header * corruptions caused by power cuts and other reasons. If the PEB contains only * 0xFF bytes in the data area, the VID header is most probably corrupted * because of a power cut (%0 is returned in this case). Otherwise, it was * probably corrupted for some other reasons (%1 is returned in this case). A * negative error code is returned if a read error occurred. * * If the corruption reason was a power cut, UBI can safely erase this PEB. * Otherwise, it should preserve it to avoid possibly destroying important * information. */ static int check_corruption(struct ubi_device *ubi, struct ubi_vid_hdr *vid_hdr, int pnum) { int err; #ifdef CONFIG_UBI_SHARE_BUFFER mutex_lock(&ubi_buf_mutex); #else mutex_lock(&ubi->buf_mutex); #endif memset(ubi->peb_buf, 0x00, ubi->leb_size); err = ubi_io_read(ubi, ubi->peb_buf, pnum, ubi->leb_start, ubi->leb_size); if (err == UBI_IO_BITFLIPS || mtd_is_eccerr(err)) { err = 0; goto out_unlock; } if (err) goto out_unlock; if (ubi_check_pattern(ubi->peb_buf, 0xFF, ubi->leb_size)) goto out_unlock; ubi_err("PEB %d contains corrupted VID header, and the data does not contain all 0xFF", pnum); ubi_err("this may be a non-UBI PEB or a severe VID header corruption which requires manual inspection"); ubi_dump_vid_hdr(vid_hdr); pr_err("hexdump of PEB %d offset %d, length %d", pnum, ubi->leb_start, ubi->leb_size); ubi_dbg_print_hex_dump(KERN_DEBUG, "", DUMP_PREFIX_OFFSET, 32, 1, ubi->peb_buf, ubi->leb_size, 1); err = 1; out_unlock: #ifdef CONFIG_UBI_SHARE_BUFFER mutex_unlock(&ubi_buf_mutex); #else mutex_unlock(&ubi->buf_mutex); #endif return err; }
/* ** Example: overrides ** ** This example shows using the overrides system in Metadesk to plug in a ** custom memory allocator and file loading routine. There are more options ** in the overrides than are presented here. A full list of the overrides ** options is kept in md.c 'Overrides & Options Macros' ** ** A few of the reasons one might want to use the Metadesk overrides are: ** 1. Plugging in a custom allocator to control the memory allocations ** 2. Plugging in a custom arena implementation for more seamless ** interoperation between the codebase and library ** 3. Provide implementation for unsupported OSes without having to modify ** md.h or md.c ** 4. Remove dependency on CRT ** 5. Remove dependency on OS headers ** */ //~ example allocator ///////////////////////////////////////////////////////// // @notes This isn't really "the example" but we need something to play the // role of a custom allocator, imagine this is any alloc & free style // allocator you might already have in a codebase. typedef struct ExampleAllocatorNode{ struct ExampleAllocatorNode *next; struct ExampleAllocatorNode *prev; int size_after_node; } ExampleAllocatorNode; typedef struct ExampleAllocator{ ExampleAllocatorNode *first; ExampleAllocatorNode *last; } ExampleAllocator; void* examp_alloc(ExampleAllocator *a, int size); void examp_free(ExampleAllocator *a, void *ptr); //~ include metadesk header /////////////////////////////////////////////////// // @notes Disabling print helpers removes any APIs from the library that depend // on FILE from stdio.h. #define MD_DISABLE_PRINT_HELPERS 1 // @notes Here is also a good place to disable the default implementations of // anything that is overriden to avoid extra includes. #define MD_DEFAULT_MEMORY 0 #define MD_DEFAULT_FILE_LOAD 0 // @notes We can also disable default implementations for "optional" parts, // here we disable the default file iterator without replacing it, which gets // this example off of direct OS header dependencies. #define MD_DEFAULT_FILE_ITER 0 // @notes We include the metadesk header before we define the overrides because // some overrides require that metadesk base types be visible. There are // exceptions to this pattern, in particular overrides for types need to be // defined before including md.h, we aren't going that far here. #include "md.h" //~ set metadesk overrides //////////////////////////////////////////////////// // override memory to use malloc/free // @notes A common practice in setting up allocator overrides is to use a pass // through opaque user context pointer. Metadesk does something different. // We recommend passing the context pointer with a global in single threaded // cases, and with a pointer in thread local storage in multi-threaded cases. ExampleAllocator* md_example_allocator = 0; // @notes In this example the allocator only provides alloc & free, but the // Metadesk override group we want to plug into has reserve commit, decommit & // release. This is okay though, we can turn commit & decommit into no-ops and // reserve & release as equivalent to alloc & free. void* md_reserve_by_example_allocator(unsigned long long size); void md_release_by_example_allocator(void *ptr, unsigned long long ignore); #define MD_IMPL_Reserve md_reserve_by_example_allocator #define MD_IMPL_Commit(p,z) (1) #define MD_IMPL_Decommit(p,z) ((void)0) #define MD_IMPL_Release md_release_by_example_allocator // @notes Since we are turning commit & decommit into no-ops it doesn't make // sense for the Metadesk arena to have a reserve size larger than it's // commit size anymore. The default for reserve size is 64 megabytes, which // is usually too large of an alloc block size, and the default commit size // is 64 kilabytes, which is usually too small. So we set both to 1 megabyte. // // Pro-Tip: (N << 20) is a nice shorthand for N megabytes, and // (N << 10) is N kilabytes. #define MD_DEFAULT_ARENA_RES_SIZE (1 << 20) #define MD_DEFAULT_ARENA_CMT_SIZE (1 << 20) // override file loading // @notes We'll also demonstrate another override, this time one that relies // on Metadesk-provided types. The actual override here is pointless, as it's // just another implementation of "LoadEntireFile" on stdio.h, which is what // the default provided by the library is as well. MD_String8 md_load_entire_file_by_stdio(MD_Arena *arena, MD_String8 filename); #define MD_IMPL_LoadEntireFile md_load_entire_file_by_stdio //~ metadesk source, global arena ///////////////////////////////////////////// #include "md.c" static MD_Arena *arena = 0; //~ implement overrides /////////////////////////////////////////////////////// // override memory to use malloc/free #include <stdlib.h> #include <assert.h> void* md_reserve_by_example_allocator(unsigned long long size) { assert(md_example_allocator != 0); void *result = examp_alloc(md_example_allocator, (int)size); return(result); } void md_release_by_example_allocator(void *ptr, unsigned long long ignore) { assert(md_example_allocator != 0); examp_free(md_example_allocator, ptr); } // override file loading #include <stdio.h> MD_String8 md_load_entire_file_by_stdio(MD_Arena *arena, MD_String8 filename) { MD_String8 result = {0}; MD_ArenaTemp scratch = MD_GetScratch(&arena, 1); MD_String8 filename_copy = MD_S8Copy(scratch.arena, filename); char *filename_cstr = (char*)filename_copy.str; FILE *file = fopen(filename_cstr, "rb"); if (file != 0) { fseek(file, 0, SEEK_END); result.size = ftell(file); fseek(file, 0, SEEK_SET); fread(result.str, result.size, 1, file); fclose(file); } MD_ReleaseScratch(scratch); return(result); } //~ main ////////////////////////////////////////////////////////////////////// int main(int argc, char **argv) { // ... where ever program init stuff is happening ... // initialize the example allocator ExampleAllocator allocator = {0}; // metadesk allocator context gets setup before a call to MD_ArenaAlloc md_example_allocator = &allocator; // setup the global arena arena = MD_ArenaAlloc(); // ... any normal metadesk usage may now happen ... return 0; } //~ implement the example allocator /////////////////////////////////////////// void* examp_alloc(ExampleAllocator *a, int size) { ExampleAllocatorNode *node = (ExampleAllocatorNode*)malloc(size + sizeof(*node)); node->size_after_node = size; if (a->first == 0) { a->first = a->last = node; node->next = node->prev = 0; } else { node->prev = a->last; node->next = 0; a->last->next = node; a->last = node; } void *result = (node + 1); return(result); } void examp_free(ExampleAllocator *a, void *ptr) { ExampleAllocatorNode *node = ((ExampleAllocatorNode*)ptr) - 1; if (node->next != 0) { node->next->prev = node->prev; } if (node->prev != 0) { node->prev->next = node->next; } if (a->first == node) { a->first = node->next; } if (a->last == node) { a->last = node->prev; } free(node); } //~ final notes /////////////////////////////////////////////////////////////// // @notes The Metadesk override system uses macro overriding. This means it // does not provide a mechanism for dynamically dispatching to different // implementations of the overridable operations. But you can always plug in // your own dynamic dispatch into the macro if you really need it. We // recommend against trying to instantiate the library twice in the same // program with different overrides, because that will lead to separate // instances of our thread local context variables which will make Metadesk // more resource intensive than it needs to be, and may lead to surprising // behavior.
/** * Called when a clan associate displays either their own or another clans information. * * @apiNote Alternatively called when a normal (non-associate) player views another clans information. * */ public class AssociateDisplayInfoEvent extends AssociateEvent { private final Type type; private final Clan c; public AssociateDisplayInfoEvent(Clan.Associate associate, Type type) { super(associate, true); this.type = type; this.c = null; } public AssociateDisplayInfoEvent(Clan.Associate associate, Player player, Clan c, Type type) { super(associate, player.getUniqueId(), true); this.type = type; this.c = c; } @Override public Clan getClan() { return c != null ? c : super.getClan(); } public final Type getType() { return type; } public enum Type { PERSONAL, OTHER } }
/** * A PaymentResponse is returned when a user has selected a payment method and * approved a payment request. */ public class PaymentResponse { /** * The payment method identifier for the payment method that the user * selected to fulfil the transaction. */ @JsonProperty(value = "methodName") private String methodName; /** * A JSON-serializable object that provides a payment method specific * message used by the merchant to process the transaction and determine * successful fund transfer. */ @JsonProperty(value = "details") private Object details; /** * If the requestShipping flag was set to true in the PaymentOptions passed * to the PaymentRequest constructor, then shippingAddress will be the full * and final shipping address chosen by the user. */ @JsonProperty(value = "shippingAddress") private PaymentAddress shippingAddress; /** * If the requestShipping flag was set to true in the PaymentOptions passed * to the PaymentRequest constructor, then shippingOption will be the id * attribute of the selected shipping option. */ @JsonProperty(value = "shippingOption") private String shippingOption; /** * If the requestPayerEmail flag was set to true in the PaymentOptions * passed to the PaymentRequest constructor, then payerEmail will be the * email address chosen by the user. */ @JsonProperty(value = "payerEmail") private String payerEmail; /** * If the requestPayerPhone flag was set to true in the PaymentOptions * passed to the PaymentRequest constructor, then payerPhone will be the * phone number chosen by the user. */ @JsonProperty(value = "payerPhone") private String payerPhone; /** * Get the methodName value. * * @return the methodName value */ public String methodName() { return this.methodName; } /** * Set the methodName value. * * @param methodName the methodName value to set * @return the PaymentResponse object itself. */ public PaymentResponse withMethodName(String methodName) { this.methodName = methodName; return this; } /** * Get the details value. * * @return the details value */ public Object details() { return this.details; } /** * Set the details value. * * @param details the details value to set * @return the PaymentResponse object itself. */ public PaymentResponse withDetails(Object details) { this.details = details; return this; } /** * Get the shippingAddress value. * * @return the shippingAddress value */ public PaymentAddress shippingAddress() { return this.shippingAddress; } /** * Set the shippingAddress value. * * @param shippingAddress the shippingAddress value to set * @return the PaymentResponse object itself. */ public PaymentResponse withShippingAddress(PaymentAddress shippingAddress) { this.shippingAddress = shippingAddress; return this; } /** * Get the shippingOption value. * * @return the shippingOption value */ public String shippingOption() { return this.shippingOption; } /** * Set the shippingOption value. * * @param shippingOption the shippingOption value to set * @return the PaymentResponse object itself. */ public PaymentResponse withShippingOption(String shippingOption) { this.shippingOption = shippingOption; return this; } /** * Get the payerEmail value. * * @return the payerEmail value */ public String payerEmail() { return this.payerEmail; } /** * Set the payerEmail value. * * @param payerEmail the payerEmail value to set * @return the PaymentResponse object itself. */ public PaymentResponse withPayerEmail(String payerEmail) { this.payerEmail = payerEmail; return this; } /** * Get the payerPhone value. * * @return the payerPhone value */ public String payerPhone() { return this.payerPhone; } /** * Set the payerPhone value. * * @param payerPhone the payerPhone value to set * @return the PaymentResponse object itself. */ public PaymentResponse withPayerPhone(String payerPhone) { this.payerPhone = payerPhone; return this; } }
/* returns number of var +1, because that garanties that var-number is never zero maybe a bad idea */ int mvar_create( int id ) { int p = mvar_alloc(); mvar_create_var( p, id ); return p; }
#ifndef LINEDOC_DOC_LINE_HXX_SEEN #define LINEDOC_DOC_LINE_HXX_SEEN #include <string> namespace linedoc { /// Document line type template <typename T> struct doc_line_ { /// The UID of the on-disk file that this line was read from size_t fileID; /// The line number in the on-disk file that this line corresponds to size_t file_line_no; /// The character line read from the on-disk file std::basic_string<T> characters; bool is_empty() const { return !characters.size(); } bool is_EOL(size_t character) const { return character >= characters.size(); } size_t last_char() const { return is_empty() ? std::string::npos : (characters.size() - 1); } T get_char(size_t character) const { return is_EOL(character) ? '\n' : characters[character]; } size_t size() const { return characters.size(); } doc_line_<T> subline(size_t i, size_t n = std::basic_string<T>::npos) const { if (is_EOL(i)) { return doc_line_<T>{fileID, file_line_no, ""}; } return doc_line_<T>{fileID, file_line_no, characters.substr(i, n)}; } }; typedef doc_line_<char> doc_line; typedef doc_line_<wchar_t> wdoc_line; } // namespace linedoc #endif
module BlockRam1 where import Clash.Prelude import Clash.Explicit.Testbench import qualified Clash.Explicit.Prelude as Explicit zeroAt0 :: HiddenClockResetEnable dom => Signal dom (Unsigned 8) -> Signal dom (Unsigned 8) zeroAt0 a = mux en a 0 where en = register False (pure True) topEntity :: Clock System -> Reset System -> Enable System -> Signal System (Index 1024) -> Signal System (Maybe (Index 1024, Unsigned 8)) -> Signal System (Unsigned 8) topEntity = exposeClockResetEnable go where go rd wr = zeroAt0 dout where dout = blockRam1 ClearOnReset (SNat @1024) (3 :: Unsigned 8) rd wr {-# NOINLINE topEntity #-} testBench :: Signal System Bool testBench = done where (rst0, rd, wr) = unbundle $ stimuliGenerator clk rst ( (True, 0, Nothing) -- Confirm initial values :> (False, 0, Nothing) :> (False, 1, Nothing) :> (False, 2, Nothing) :> (False, 3, Nothing) -- Write some values :> (False, 0, Just (0, 8)) :> (False, 0, Just (1, 9)) :> (False, 0, Just (2, 10)) :> (False, 0, Just (3, 11)) -- Read written values :> (False, 0, Nothing) :> (False, 1, Nothing) :> (False, 2, Nothing) :> (False, 3, Nothing) -- Reset for two cycles :> (True, 0, Nothing) :> (True, 0, Nothing) -- Check whether first two values were reset :> (False, 0, Nothing) :> (False, 1, Nothing) :> (False, 2, Nothing) :> (False, 3, Nothing) :> Nil ) expectedOutput = outputVerifier' clk rst ( 0 :> 0 -- Initial values should be all threes :> 3 :> 3 :> 3 :> 3 -- Read address zero while writing data :> 3 :> 8 :> 8 :> 8 -- Read written values back from BRAM :> 8 :> 9 :> 10 :> 0 -- < Reset is high, so we won't read '11' -- Reset for two cycles :> 0 :> 0 -- Check whether reset worked :> 3 :> 3 :> 10 :> 11 :> Nil) done = expectedOutput (topEntity clk (unsafeFromHighPolarity rst0) enableGen rd wr) clk = tbSystemClockGen (not <$> done) rst = systemResetGen
As announced previously we now have our own stable in the West Crater so we can all level up our own pet :eleni: "Monria Prize Tickets" can now be looted through hunting! These tickets can be exchanged for prizes on display in my shop in the main hub and here. They are numbered from 1 to 20 and you have 6 months from the time you loot them to claim your prize. This is on the off chance that some of you prefer tickets to shiny things! The 'Great Ones' decide when the tickets appear but they can be considered quite rare, so stop reading and start hunting to get your discovery! The Monria Tickets are a first step in giving us the tools to run our crazy events and it will also enable me to vary the loot in the future. I also want more people to visit my shop so that I can sell those Refrigerators that @Dark Moon Enigma keeps forcing me to move. It might be small items in the future but I feel its important to personalize the rewards for hunting and with all @Kendra's shopping I have a lot of junk to get rid of. So please stop stocking your shops because it will just end up being returned in loot! The Zoldenite mission chain (DSEC : Zoldenite I-VI) now rewards surveying skills instead of prospecting. I felt this was more balanced and made sense, I know we're a little different but that's just @Crazyshadow. There are other changes that may or may not be very obvious and there are some things for which I am sworn to secrecy on pain of death but I did some of those things too... Cthylla Towers Shop 1 | Display Area Cthylla Towers Shop 2 | Display Area One comes with the shop on the auction and the other will stop @Pusherman nagging me. All Display areas should be in the possession of Avatars now. If anyone has any further issues with the display areas please let me know. It is likely in the future that these deeds will be merged as having them separate is a little nutty. In the meantime if anyone is buying a shop in the towers on Monria be sure to get two deeds (one for the display area and one for the shop). The other reported bugs such as the tower shops not showing who bought an item, TP to home not working, some of the display areas in the hub not working and other quirky bugs will not be fixed in this VU. Rather than attempt to patch these things I have decided that it is best that parts of Monria are rebuilt. This will take longer but will be better for us in the long term and is nothing too exciting, just some housekeeping. The repeatable shub chain mission has not been fixed yet but I haven't forgotten you @Angel Of Shadows, it's just that you were too slow to kill them and I was too late to get it fixed in time. I am continuing to chase this and other bugs. So please if you spot anything out of the ordinary, add it to the bug list here, just bear in mind that not all things strange on Monria are bugs, some of them are just us expressing ourselves.... With the introduction of the new jumpsuits it seems our Monrian born are about to experience a fashion faux pas as I have had reports that the Monria top may not play nice with the new outfit. Be warned that if you remove the outfit you may look like @sluggo in a tee shirt. Your fashion is important to me so rest assured I will be doing all I can in the future to ensure that Monrian's are always dressed to party. I am pleased to announce Monria's first version update in over 2 years. We are resetting the clock so you can consider this version zero or VU "o_O"Does a smiley count as an update?A stable was a priority early on due to feedback from our resident 'lunartics.' The stable will be placed on the auction in an upcoming patch along with the introduction of Sweetstuff so that we can make our own nutrio bars. We are working on plans for a pet in the future but it will not be available in this VU, although Calypso Pets can be spawned on Monria, so grab your own Leprechaun for the St Patrick's Day Event on Saturday.The stable is placed in the West Crater because I want as many reasons to go to different parts of our Moon as possible. It is positioned as close as I could get it to the TP without it actually being a TP.Thanks for your supportAnt
Coffee is a high consumption export product in many Latin American countries. Consumers may be aware of the disparity between how much they pay for a cup of coffee and how much the coffee farmers in far away countries may make, but perhaps they don't realise that what they pay may be more even than their barista's hourly wage, as the Chilean Starbucks workers currently on hunger strike state: Hunger strike The hunger strike demanding better working conditions started by three Starbucks workers in Chile is up to its 11th day, with Starbucks refusing to negotiate due to “company policies”, according to La Fundación Sol's (@lafundacionsol) [es] Twitter update: @lafundacionsol: 11días de #huelgadehambre y Starbucks le dice a sus “partners” @Sindicatosbux q x polít corporativa no pueden dar benef en una Neg Colectiva @lafundacionsol : 11 days into the #hungerstrike and Starbucks tells its “partners” @sindicatosbux that due to corporate policies they can't grant benefits through a collective negotiation. The three workers on hunger strike are members of the Starbucks employee union in Chile, which through its Twitter account @Sindicatosbux [es] has been reporting on their 30 day strike and latter hunger strike. As Andrés Giordano, the union's president, explains in this next video interview [es], their hunger strike is their last resort after all other efforts to reach an agreement with Starbucks were met with denials: Through their blog [es], started in 2009, they have actively posted information on their union and their efforts to work with Starbucks to arrive at mutually beneficial solutions. For example, part of the Starbucks policy was to provide transportation to employees who close stores after 10 or 11pm at night in far off locations or in areas considered dangerous, as reported in this December 2009 post [es]. However, many locations and “partners” were not receiving the benefit, or due to routing issues arrived at their homes as late as 2am. Back in November 2010, Starbucks’ partners in Chile wrote a letter [es] to their international visitors exposing some of the company's most offensive behaviors towards their partners. Although Starbucks excused themselves due to the economic recession, to the baristas it did not make sense that their prior benefits were being cut off, their salaries were staying the same and the yearly employee company party was cancelled while the top management went for a planning retreat to the Chilean South, complete with horseback riding and hot springs. In the letter [es] they wrote: No es posible que un americano alto preparado en 3 minutos valga más que la hora de un barista. It isn't possible that a tall latte prepared in 3 minutes is worth more than a barista's hourly wage. In this next video, The Timber Beast plays on the guitar and sings Joe Feinberg's ‘What Shall We Do With the Starbucks Bosses?’ in front of a Starbucks in Ontario, Canada, in support of the Chilean Starbucks workers: The Wall Street Journal reports that United States baristas are also supporting their Chilean partners’ strike. In the article, Starbucks spokesman Jim Olson expressed that baristas in Chile make 30% more than the industry average and made reference to some of the 24 original demands which were dropped by the union, without mentioning the four that still stand. Industrial Workers of the World explains a couple of these demands: Their most crucial demand is earning a higher wage. Currently baristas at Starbucks in Chile make $2.50/hr. while the drinks are still sold for US prices, and they haven’t received raises in 8 years. The baristas are also asking for a lunch stipend in order to eat during their shifts, this is something managers in Chile are provided. In addition, workers are also requiring transportation for those baristas working in remote locations and for those in dangerous neighborhoods who after 10 pm have to figure out how to return to their homes. They are also asking Starbucks to provide them with their mandatory uniforms. Is it illegal? None of this makes for happy employees, but is it illegal? In this podcast interview by El Quinto Poder with Marco Kremerman [es], a researcher at the Fundacion Sol, he explains how companies like Starbucks take advantage of legal loopholes in Chile, such as the possibility of hiring replacements for workers on strike, turning their strike ineffective. According to nacion.cl, Starbucks Union workers will report the company to the ILO for their anti-union policies. As Giordano was quoted: Starbucks Coffee está vulnerando nuestra legislación laboral por sus reiterativas prácticas que buscan desarticular nuestra organización sindical y el proceso de negociación colectiva. Starbucks Coffee is violating our labor legislation through their repeated practices seeking to dismantle our labor union organization and the process of collective negotiation. Coffee is one of the top world commodities and is mostly produced in developing countries. By far the greatest consumption is in developed countries, where people pay an average of 3 USD for a cup of coffee at chains like Starbucks. But when Starbucks opens in developing coffee-producing countries and maintains their prices, well, the contrast is noticeable. Back in 2003, when Starbucks opened in Lima, Peru, BBC reporter Hannah Hennessy wrote about it: There are some locals who can afford to pay two-thirds of Peru's minimum daily wage for a cup of coffee, but even they know it is a luxury for the privileged few. And seven years later, the situation is not much different: although Starbucks sells fairtrade coffee, some critics at the GreenLiving blog of the UK's Guardian newspaper believe it isn't enough and that anti labor union policies affect its ethical rating. This is by no means a Starbucks problem, though: the coffee industry in general has been criticized for its exploitative behaviors. Movies like Black Gold, which focuses on the gap between winners and losers in the coffee industry, specifically in Ethiopia, have been met with silence from the major coffee companies, as they explain on their FAQ: We wanted to include interviews with all the major coffee multinationals: Kraft, Nestle, Proctor & Gamble, Nestle and Starbucks. But they all declined our invitations, which you could say, speaks volumes about the transparency in the industry. In the case of Starbucks, we spent over six months trying to get an interview through their PR agencies and their HQ in Seattle. They declined all requests and went on to publicly discredit the film when it was released. So far, Starbucks has not addressed the strike in Chile, and its local website [es] makes no mention of it. Now that the strike is echoing around the world maybe it will force Starbucks to answer questions like the one @micronauta [es] asked on Twitter: Si los precios q cobra Starbucks en Chile por sus productos son similares a los de EEUU ¿por qué los sueldos son más bajos? If the prices Starbucks charges in Chile for their products are similar to those in the USA, why are their salaries lower?
/* Do a printf-style operation on either the XML or normal output channel, depending on the setting of VG_(clo_xml). */ static void emit_WRK ( HChar* format, va_list vargs ) { if (VG_(clo_xml)) { VG_(vprintf_xml)(format, vargs); } else { VG_(vmessage)(Vg_UserMsg, format, vargs); } }
It is the Holy Grail of instant success as an author. The elusive grand slam home run of literary home runs. It is better – beyond – getting published, or even making a bestseller list. It is the dream. Bigger than your highest vision of The Dream. It is called “going viral.” For the Luddites among us… going viral means that word-of-mouth and the media, especially the internet – which in this case are simply responding to an initial word-of-mouth phenomena — conspire in a dance of co-dependent cause and effect to explode a book beyond the bestseller lists into a feeding frenzy of attention, demand, praise and bookstore waiting lists. For most readers, this sudden attention is the first time they’ll hear about the title, or its author. Think The DaVinci Code, Twilight, The Hunger Games, The Lovely Bones, The Help, The Bridges of Madison County… books that seemingly appear out of nowhere and sell millions within a few weeks, and more millions afterward, almost always resulting in a movie and a sequel. People who wouldn’t have been interested before are now clicking onto Amazon to pick up a copy, in some cases simply because they want to see what all the buzz is about. How did they do that? How can we do that? Good news and bad news: we can enter the game, we can go for it, but once qualified and out there, it’s a total crapshoot. One over which you have, after meeting the criteria for viral consideration, absolutely no control. It is beyond social media. You can’t tweet or Facebook yourself into viral status. Your publisher can’t even make it happen. It rarely happens to the common A-list author names – they became A-listers after their viral debut – it’s usually something fresh, from a fresh face. And yet, going viral is a paradox. It is something you can wish for, but once the book has been written, cannot create or execute. The best you can do is write a book that is positioned – that delivers the right stuff – to be discovered, ignited and launched on a viral journey at the scale required to wear this nametag. Many books qualify. Few hear their name called. The paradox is this: The criteria for putting your book into a position to go viral is almost exactly that associated with getting published in the first place. The book has to work. Really, really well. That said, viral books tend to do a couple of specific things really well: They are often “high concept” (rather than character-driven, even though they introduce great characters), with exceptional execution across all of the Six Core Competencies. They also deliver something else, almost without exception: they seize the inherent compelling power of underlying story physics in way that exceeds the competition. These two realms of story – compelling concept, with exceptionally strong underlying essences, is what gets you into the viral game. And if that sounds underwhelming, welcome to the paradox. Doesn’t everybody try for a compelling concept and the blowing of their story physics out of the water? Answer: not really. Mostly because they don’t address these as goals. Some authors just write their story, write it well, let it unspool organically, and hope somebody out there gets it. This may get them published, but it doesn’t usually get them on Good Morning America. If you want to go viral, you should address high concept and the optimization of story physics in the story development process. You should be aware of their inherent compelling power, or not. And if the latter, jack it higher. The Latest Example of the Viral Dream Come True Just this morning Good Morning America did a feature on the latest viral sensation in the book world. It described a mad frenzy of word-of-mouth obsession, and during the segment the GMA anchors were literally grabbing the book from each other’s hands to swoon over randomly selected sentences. Not because the sentences were astoundingly eloquent. Rather, because the sentences deliver more than one of the basic elements of story physics like a bullet to the brain. The book is called “50 Shades of Grey,” dubbed an erotic novel (part of a trilogy) by a little known English author named E.L. James. As I write this, a mere four hours after the GMA lovefest, less than two weeks after initial release, it resides at #1 on the Amazon Kindle list, and #4 on the overall bestselling books list. Almost all because of reader word-of-mouth. And media that listens and jumps on board. Interestlingly, it isn’t yet registering on the New York Times bestseller list. Why? Because that’s an insider industry list based on wholesale distribution to bookstores and a lagging nod to digital books, and 50 Shades of Grey is barely in bookstores and is too new to crack the old boy network that the NYT represents. But wait ‘til next week. It’ll be there, and probably at #1. Let me tell you why this book has gone viral. And in doing so, identify the simple elements of story physics that this book delivers. Read and learn, this is your ticket not only to the viral world, but to finding a publisher and a readership, as well. The book is about a young woman who has an affair with a billionaire. In one reader’s words, it is full of sex, money and clothes. It is Sex in the City times ten. One interviewed reader calls it “mommy porn.” A guilty pleasure perfectly suited to the anonymity of a Kindle in a crowded mall. High concept? Not particularly. But here’s what it does do well: It is fueled by two things, both of them among the short list of essential story physics that capture readers: The book is driven by hero empathy, while delivering a vicarious ride. Read that again. It isn’t the plot, and it isn’t character. No, this is about the reader. This strategy shoots for the result of what you’ve written, the impact on a reader that creates a reading experience beyond the intellectual curiosity of plot, the reward of laughter or any marveling at great art. It’s about the reader transporting themselves into this world… going on this ride… feeling it… wanting to be the hero… wishing it was them… the reader completely engaging in this journey on a personal level. You may enjoy the heck out of the latest detective thriller, but really, is this something you want to actually do? To actually feel? No, that’s a voyeuristic read. 50 Shades of Grey, while perhaps voyeuristic, is actually more masturbatory and vicarious in nature. It delivers an emotional experience that taps into something deep and forbidden and unavailable. It mines pure gold from the power of its underlying story physics. That’s it. Do this, and do it within a compelling premise with professional-level execution, and you are in a position to go viral. And if you don’t happen to win that particular lottery, at least you’ll have increased your chances at publication or digital success exponentially. More on this in a day or two. For now, ask yourself what about your story delivers a vicarious ride, where your story takes the reader, and at what level your story makes the reader feel and actually become a part of the story in a vicarious and personally empathetic way… rather than sitting in the literary grandstands and watching it all go down. Read more about story physics here. Read about how to deliver them to your reader here.
/** * Uses the skin URL to make a custom skull * * @param skinURL The skin url * @return The ItemStack */ public ItemStack customHeadBuild(String skinURL) { if (!skinURL.endsWith("=")) return setCustomSkin(this, "http://textures.minecraft.net/texture/" + skinURL); return setCustomSkin(this, base64Decoder(skinURL)); }
//===--- NotificationCenter.cpp -------------------------------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See http://swift.org/LICENSE.txt for license information // See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// #include "SourceKit/Core/NotificationCenter.h" #include "SourceKit/Support/Concurrency.h" using namespace SourceKit; void NotificationCenter::addDocumentUpdateNotificationReceiver( DocumentUpdateNotificationReceiver Receiver) { WorkQueue::dispatchOnMain([this, Receiver]{ DocUpdReceivers.push_back(Receiver); }); } void NotificationCenter::postDocumentUpdateNotification( StringRef DocumentName) const { std::string DocName = DocumentName; WorkQueue::dispatchOnMain([this, DocName]{ for (auto &Fn : DocUpdReceivers) Fn(DocName); }); }
// Check if output directory exists. If not, create it. Returns true if OK. bool createDirIfNotExists(char *DirPath){ if (!DirPath){ return false; } DWORD dwAttr = GetFileAttributesA(DirPath); if (!(dwAttr != 0xffffffff && (dwAttr & FILE_ATTRIBUTE_DIRECTORY))){ return CreateDirectoryA(DirPath,NULL) != 0; } return true; }
# -*- Mode: Python; tab-width: 4 -*- # # Author: <NAME> <<EMAIL>> # Copyright 1996, 1997 by <NAME> # All Rights Reserved. # # This software is provided free for non-commercial use. If you are # interested in using this software in a commercial context, or in # purchasing support, please contact the author. RCS_ID = '$Id$' # support for `~user/public_html'. import regex import string import default_handler import filesys import os import pwd split_path = default_handler.split_path get_header = default_handler.get_header user_dir = regex.compile ('/~\([^/]+\)\(.*\)') class unix_user_handler (default_handler.default_handler): def __init__ (self, public_html = 'public_html'): self.public_html = public_html default_handler.default_handler.__init__ (self, None) # cache userdir-filesystem objects fs_cache = {} def match (self, request): if user_dir.match (request.uri) == len(request.uri): return 1 else: return 0 def handle_request (self, request): # get the user name user = user_dir.group(1) rest = user_dir.group(2) # special hack to catch those lazy URL typers if not rest: request['Location'] = 'http://%s/~%s/' % ( request.channel.server.server_name, user ) request.error (301) return # have we already built a userdir fs for this user? if self.fs_cache.has_key (user): fs = self.fs_cache[user] else: # no, well then, let's build one. # first, find out where the user directory is try: info = pwd.getpwnam (user) except KeyError: request.error (404) return ud = info[5] + '/' + self.public_html if os.path.isdir (ud): fs = filesys.os_filesystem (ud) self.fs_cache[user] = fs else: request.error (404) return # fake out default_handler self.filesystem = fs # massage the request URI request.uri = '/' + rest return default_handler.default_handler.handle_request (self, request) def __repr__ (self): return '<Unix User Directory Handler at %08x [~user/%s, %d filesystems loaded]>' % ( id(self), self.public_html, len(self.fs_cache) )
#include "../cache.h" #include "progress.h" static void null_progress__update(struct ui_progress *p __maybe_unused) { } static struct ui_progress_ops null_progress__ops = { .update = null_progress__update, }; struct ui_progress_ops *ui_progress__ops = &null_progress__ops; void ui_progress__update(struct ui_progress *p, u64 adv) { p->curr += adv; if (p->curr >= p->next) { p->next += p->step; ui_progress__ops->update(p); } } void ui_progress__init(struct ui_progress *p, u64 total, const char *title) { p->curr = 0; p->next = p->step = total / 16; p->total = total; p->title = title; } void ui_progress__finish(void) { if (ui_progress__ops->finish) ui_progress__ops->finish(); }
/** * Creates a <code>Properties</code> table that contains (name, value) pairs of environment * variables settings for a given installation descriptor object. * * @param insdObject * The given installation descriptor object. * @return The <code>Properties</code> table that contains environment variables settings for the * given installation descriptor object. */ public static Properties buildTableOfEnvVars(InstallationDescriptor insdObject) { Properties envVarsTable = new Properties(); Iterator<InstallationDescriptor.ActionInfo> envActions = insdObject .getInstallationActions(InstallationDescriptor.ActionInfo.SET_ENV_VARIABLE_ACT) .iterator(); while (envActions.hasNext()) { InstallationDescriptor.ActionInfo actInfo = envActions.next(); String varName = actInfo.params.getProperty(InstallationDescriptorHandler.VAR_NAME_TAG); String varValue = actInfo.params.getProperty(InstallationDescriptorHandler.VAR_VALUE_TAG); if (varName != null && varValue != null && !varName.equalsIgnoreCase(CLASSPATH_VAR) && !varName.equalsIgnoreCase(PATH_VAR)) { String curValue = envVarsTable.getProperty(varName); if (curValue != null) { curValue = curValue + File.pathSeparator + varValue; envVarsTable.setProperty(varName, curValue); } else envVarsTable.setProperty(varName, varValue); } } return envVarsTable; }
// launchAPI launches an api server and returns clients configured to // access it. func launchApi(t *testing.T) (routeclientset.Interface, projectclientset.Interface, kclientset.Interface, func(), error) { masterConfig, clusterAdminKubeConfig, err := testserver.StartTestMasterAPI() if err != nil { return nil, nil, nil, nil, err } kc, err := testutil.GetClusterAdminKubeClient(clusterAdminKubeConfig) if err != nil { return nil, nil, nil, nil, err } cfg, err := testutil.GetClusterAdminClientConfig(clusterAdminKubeConfig) if err != nil { return nil, nil, nil, nil, err } routeclient, err := routeclientset.NewForConfig(cfg) if err != nil { return nil, nil, nil, nil, err } projectclient, err := projectclientset.NewForConfig(cfg) if err != nil { return nil, nil, nil, nil, err } return routeclient, projectclient, kc, func() { testserver.CleanupMasterEtcd(t, masterConfig) }, nil }
Updated 5:50 p.m. ET Restore Our Future, a super PAC aiding Mitt Romney, raised $6.6 million in January to aid his presidential campaign, documents filed today show. The super PAC spent nearly $14 million in January as it blistered Romney's GOP rivals with stinging ads during last month's caucuses and primaries. It ended the month with $16.3 million in the bank, some of which is being deployed in Romney's native state of Michigan ahead of the Feb. 28 primary there. By comparison, the super PAC supporting Newt Gingrich raised slightly more than $11 million last month -- nearly all of it from casino mogul Sheldon Adelson and his wife, Miriam. The details of the super PAC's fundraising emerged as Republican presidential candidates and the other super PACs active in this year's election file January fundraising reports to the Federal Election Commission today. Winning Our Future, the pro-Gingrich super PAC, reported spending about $9.8 million on its federal election activities, the group's documents show. It had about $2.4 million cash on hand. Adelson, who owns the Venetian on the Las Vegas strip, and his wife, Miriam, a physician, each donated $5 million to the super PAC. Overall, Restore Our Future has raised nearly $36.8 million since it was created last year. January donors include Texas billionaire Harold Simmons, who gave $100,000, and members of the Marriott and Walton families, who established the hotel chain and Walmart stores, respectively. Meg Whitman, who ran unsuccessfully for California governor in 2010, also donated $100,000 to Restore Our Future last month. A report filed earlier today showed PayPal co-founder Peter Thiel donated $1.7 million last month to a super PAC supporting one of Romney's GOP rivals, Ron Paul. Overall, Thiel has donated $2.6 million to Endorse Liberty, created last year to aid the Texas representative. The latest FEC reports are expected to show a sharp uptick in fundraising for former Pennsylvania senator Rick Santorum, who raised less than $2.2 million in 2011 before winning a string of contests early this year. He now leads Mitt Romney in the latest public polls in Michigan. On Friday, President Obama reported a sharp drop from four years ago in his fundraising. He collected $29.1 million for his campaign and the Democratic National Committee in January. Nearly $11.9 million went into his campaign account, a third of what he raised as a candidate in January 2008. Today's filings are giving the public a picture of how much a handful of unions, corporations and wealthy individuals can shape super PACs, new outside groups raising and spending unlimited amounts to influence the presidential contest and other races. Jon Huntsman Sr., the billionaire father of former GOP candidate Jon Huntsman, donated another $335,000 in January to Our Destiny PAC, a super PAC that supported his son's failed candidacy, the reports show. In all, the elder Huntsman gave $2.2 million to the super PAC, nearly 70% of the total it collected. The reports are due by midnight. (Contributing: Ray Locker)
/** * A chunk of data from a file. AuxFileChunk is used in the transfer to and * from remote servers. */ public final class AuxFileChunk implements Serializable { private final int _chunkNumber; private final int _chunkSize; private final long _fileSize; private final long _timestamp; private final byte[] _chunkData; public AuxFileChunk(int chunkNumber, int chunkSize, long fileSize, long timestamp, byte[] chunkData) { if (chunkNumber < 0) { throw new IllegalArgumentException("chunk number: " + chunkNumber); } if (chunkSize < 0) { throw new IllegalArgumentException("chunk size: " + chunkSize); } if (fileSize < 0) { throw new IllegalArgumentException("fileSize: " + fileSize); } if (chunkData == null) { throw new NullPointerException("chunk data is null"); } _chunkNumber = chunkNumber; _chunkSize = chunkSize; _fileSize = fileSize; _timestamp = timestamp; _chunkData = chunkData; } public int getChunkNumber() { return _chunkNumber; } public int getTotalChunks() { return (int) Math.ceil(((double) _fileSize) / _chunkSize); } public byte[] getChunkData() { return _chunkData; } public boolean isLastChunk() { return ((_chunkNumber+1) * _chunkSize) >= _fileSize; } public long getFileSize() { return _fileSize; } public long getTimestamp() { return _timestamp; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AuxFileChunk that = (AuxFileChunk) o; if (_chunkNumber != that._chunkNumber) return false; if (_chunkSize != that._chunkSize) return false; if (_fileSize != that._fileSize) return false; if (_timestamp != that._timestamp) return false; if (!Arrays.equals(_chunkData, that._chunkData)) return false; return true; } @Override public int hashCode() { int result = _chunkNumber; result = 31 * result + _chunkSize; result = 31 * result + (int) (_fileSize ^ (_fileSize >>> 32)); result = 31 * result + (int) (_timestamp ^ (_timestamp >>> 32)); result = 31 * result + Arrays.hashCode(_chunkData); return result; } }
/** * Socket factory implementation that pools connections to one wrapped socket * factory. Sessions are ignored on all requests. Consider wrapping with a * {@link LazySocketFactory} for automatic checking against pooled connections * that may have been closed. * * @author Brian S O'Neill */ public class PooledSocketFactory implements SocketFactory { private final SocketFactory mFactory; private final long mTimeout; // Stack of Sockets. private final Stack mPool = new Stack(); private CheckedSocket.ExceptionListener mListener; public PooledSocketFactory(SocketFactory factory) { this(factory, factory.getDefaultTimeout()); } public PooledSocketFactory(SocketFactory factory, int maxPoolSize) { this(factory); } public PooledSocketFactory(SocketFactory factory, long timeout) { mFactory = factory; mTimeout = timeout; mListener = new CheckedSocket.ExceptionListener() { public void exceptionOccurred(CheckedSocket s, Exception e, int count) { // Only act on the first exception. if (count == 1) { // Assume all the pooled connections are bad, so ditch 'em. clear(); } } }; } public InetAddressAndPort getInetAddressAndPort() { return mFactory.getInetAddressAndPort(); } public InetAddressAndPort getInetAddressAndPort(Object session) { return mFactory.getInetAddressAndPort(); } public long getDefaultTimeout() { return mTimeout; } public CheckedSocket createSocket() throws ConnectException, SocketException { return createSocket(null, mTimeout); } public CheckedSocket createSocket(Object session) throws ConnectException, SocketException { return createSocket(mTimeout); } public CheckedSocket createSocket(long timeout) throws ConnectException, SocketException { return new PooledSocket(mFactory.createSocket(timeout)); } public CheckedSocket createSocket(Object session, long timeout) throws ConnectException, SocketException { return createSocket(timeout); } public CheckedSocket getSocket() throws ConnectException, SocketException { return getSocket(mTimeout); } public CheckedSocket getSocket(Object session) throws ConnectException, SocketException { return getSocket(mTimeout); } public CheckedSocket getSocket(long timeout) throws ConnectException, SocketException { synchronized (mPool) { if (mPool.size() > 0) { return new PooledSocket((SocketFace)mPool.pop()); } } return new PooledSocket(mFactory.createSocket(timeout)); } public CheckedSocket getSocket(Object session, long timeout) throws ConnectException, SocketException { return getSocket(timeout); } public void recycleSocket(CheckedSocket socket) throws SocketException, IllegalArgumentException { if (socket != null) { if (socket instanceof PooledSocket) { PooledSocket psock = (PooledSocket)socket; if (psock.getOwner() == this) { psock.removeExceptionListener(mListener); SocketFace s = psock.recycle(); if (s != null) { mPool.push(s); } return; } } throw new IllegalArgumentException ("Socket did not originate from this pool"); } } public void clear() { synchronized (mPool) { while (mPool.size() > 0) { try { ((SocketFace)mPool.pop()).close(); } catch (IOException e) { } } } } public int getAvailableCount() { return mPool.size(); } /** * This class does two things. First, it supports virtual socket closure. * After a socket is put back into the pool, it can't be used again, but * the internal socket is still open. * * This class also tracks exceptions checks if this socket can be recycled. */ private class PooledSocket extends CheckedSocket { private InputStream mIn; private OutputStream mOut; private boolean mClosed; public PooledSocket(SocketFace s) throws SocketException { super(s); addExceptionListener(mListener); } public synchronized InputStream getInputStream() throws IOException { if (mIn != null) { return mIn; } final InputStream mStream = super.getInputStream(); mIn = new InputStream() { public int read() throws IOException { check(); return mStream.read(); } public int read(byte[] b) throws IOException { check(); return mStream.read(b); } public int read(byte[] b, int off, int len) throws IOException{ check(); return mStream.read(b, off, len); } public long skip(long n) throws IOException { check(); return mStream.skip(n); } public int available() throws IOException { check(); return mStream.available(); } public void close() throws IOException { if (doClose()) { mStream.close(); } } public void mark(int readlimit) { mStream.mark(readlimit); } public void reset() throws IOException { check(); mStream.reset(); } public boolean markSupported() { return mStream.markSupported(); } }; return mIn; } public synchronized OutputStream getOutputStream() throws IOException { if (mOut != null) { return mOut; } final OutputStream mStream = super.getOutputStream(); mOut = new OutputStream() { public void write(int b) throws IOException { check(); mStream.write(b); } public void write(byte[] b) throws IOException { check(); mStream.write(b); } public void write(byte[] b, int off, int len) throws IOException { check(); mStream.write(b, off, len); } public void flush() throws IOException { check(); mStream.flush(); } public void close() throws IOException { if (doClose()){ mStream.close(); } } }; return mOut; } public void setTcpNoDelay(boolean on) throws SocketException { check(); super.setTcpNoDelay(on); } public boolean getTcpNoDelay() throws SocketException { check(); return super.getTcpNoDelay(); } public void setSoLinger(boolean on, int linger) throws SocketException { check(); super.setSoLinger(on, linger); } public int getSoLinger() throws SocketException { check(); return super.getSoLinger(); } public void setSoTimeout(int timeout) throws SocketException { check(); super.setSoTimeout(timeout); } public int getSoTimeout() throws SocketException { check(); return super.getSoTimeout(); } public void setSendBufferSize(int size) throws SocketException { check(); super.setSendBufferSize(size); } public int getSendBufferSize() throws SocketException { check(); return super.getSendBufferSize(); } public void setReceiveBufferSize(int size) throws SocketException { check(); super.setReceiveBufferSize(size); } public int getReceiveBufferSize() throws SocketException { check(); return super.getReceiveBufferSize(); } public void close() throws IOException { if (doClose()) { super.close(); } } SocketFactory getOwner() { return PooledSocketFactory.this; } SocketFace recycle() throws SocketException { if (mClosed) { return null; } else if (getExceptionCount() != 0) { try { close(); } catch (IOException e) { throw new SocketException(e.getMessage()); } return null; } else { mClosed = true; return mSocket; } } boolean doClose() { return (mClosed) ? false : (mClosed = true); } void check() throws SocketException { if (mClosed) { throw new SocketException("Socket closed"); } } } }
<gh_stars>0 #! /usr/bin/env python3 # -*- coding:utf-8 -*- __author__ = ["<NAME>", "<NAME>", "<NAME>"] import rospy import numpy as np import tf import math import cv2 from geometry_msgs.msg import Twist, Vector3, Pose from nav_msgs.msg import Odometry from sensor_msgs.msg import Image, CompressedImage from cv_bridge import CvBridge, CvBridgeError import cormodule bridge = CvBridge() cv_image = None media = [] centro = [] atraso = 1.5E9 # 1 segundo e meio. Em nanossegundos area = 0.0 # Variavel com a area do maior contorno # Só usar se os relógios ROS da Raspberry e do Linux desktop estiverem sincronizados. # Descarta imagens que chegam atrasadas demais check_delay = False # A função a seguir é chamada sempre que chega um novo frame def roda_todo_frame(imagem): print("frame") global cv_image global media global centro now = rospy.get_rostime() imgtime = imagem.header.stamp lag = now-imgtime # calcula o lag delay = lag.nsecs print("delay ", "{:.3f}".format(delay/1.0E9)) if delay > atraso and check_delay==True: print("Descartando por causa do delay do frame:", delay) return try: antes = rospy.Time.now() cv_image = bridge.compressed_imgmsg_to_cv2(imagem, "bgr8") # cv_image = cv2.flip(cv_image, -1) # Descomente se for robo real media, centro, maior_area = cormodule.identifica_cor(cv_image) depois = rospy.Time.now() cv2.imshow("Camera", cv_image) except CvBridgeError as e: print('ex', e) if __name__=="__main__": rospy.init_node("cor") # topico_imagem = "/kamera" topico_imagem = "camera/image/compressed" # Use para robo virtual # topico_imagem = "/raspicam/image_raw/compressed" # Use para robo real # Para renomear a *webcam* # Primeiro instale o suporte https://github.com/Insper/robot19/blob/master/guides/debugar_sem_robo_opencv_melodic.md # # Depois faça: # # rosrun cv_camera cv_camera_node # # rosrun topic_tools relay /cv_camera/image_raw/compressed /kamera # # # Para renomear a câmera simulada do Gazebo # # rosrun topic_tools relay /camera/rgb/image_raw/compressed /kamera # # Para renomear a câmera da Raspberry # # rosrun topic_tools relay /raspicam_node/image/compressed /kamera # recebedor = rospy.Subscriber(topico_imagem, CompressedImage, roda_todo_frame, queue_size=4, buff_size = 2**24) print("Usando ", topico_imagem) velocidade_saida = rospy.Publisher("/cmd_vel", Twist, queue_size = 1) try: while not rospy.is_shutdown(): vel = Twist(Vector3(0,0,0), Vector3(0,0,0)) if len(media) != 0 and len(centro) != 0: print("Média dos vermelhos: {0}, {1}".format(media[0], media[1])) print("Centro dos vermelhos: {0}, {1}".format(centro[0], centro[1])) if (media[0] > centro[0]): vel = Twist(Vector3(0,0,0), Vector3(0,0,-0.1)) if (media[0] < centro[0]): vel = Twist(Vector3(0,0,0), Vector3(0,0,0.1)) velocidade_saida.publish(vel) rospy.sleep(0.1) except rospy.ROSInterruptException: print("Ocorreu uma exceção com o rospy")
/* ---- * See if the name in data matches the variable name in * curr. Make the string comparison case-sensive. */ static int NCF_ListTraverse_FoundVarNameCase( char *data, char *curr ) { ncvar *var_ptr = (ncvar*) curr; return strcmp(data, var_ptr->name); }
<reponame>bmingithub/qingcloud-csi<filename>pkg/disk/rpcserver/identityserver_test.go /* Copyright (C) 2019 Yunify, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this work except in compliance with the License. You may obtain a copy of the License in the LICENSE file, or at: http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package rpcserver import ( "context" "flag" "github.com/container-storage-interface/spec/lib/go/csi" "github.com/yunify/qingcloud-csi/pkg/cloud/mock" "github.com/yunify/qingcloud-csi/pkg/disk/driver" qcservice "github.com/yunify/qingcloud-sdk-go/service" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "k8s.io/klog" "reflect" "testing" ) func init() { klog.InitFlags(nil) flag.Parse() } func TestIdentityServer_Probe(t *testing.T) { tests := []struct { name string zones map[string]*qcservice.Zone err error }{ { name: "normal", zones: map[string]*qcservice.Zone{ "mock1": {}, "mock2": {}, }, err: nil, }, { name: "failed", zones: nil, err: status.Error(codes.FailedPrecondition, "cannot find any zones"), }, } for _, test := range tests { cm := &mock.MockCloudManager{} cm.SetZones(test.zones) is := NewIdentityServer(nil, cm) resp, err := is.Probe(context.Background(), &csi.ProbeRequest{}) if !reflect.DeepEqual(err, test.err) { t.Errorf("testcase %s: expect %s, but actually %t", test.name, test.err, err) } if err == nil && resp.GetReady().GetValue() != true { t.Errorf("testcase %s: expect %t, but actually %t", test.name, true, resp.GetReady().GetValue()) } } } func TestIdentityServer_GetPluginCapabilities(t *testing.T) { tests := []struct { name string config *driver.InitDiskDriverInput }{ { name: "normal", config: &driver.InitDiskDriverInput{ PluginCap: []*csi.PluginCapability{ { Type: &csi.PluginCapability_Service_{ Service: &csi.PluginCapability_Service{ Type: csi.PluginCapability_Service_CONTROLLER_SERVICE, }, }, }, { Type: &csi.PluginCapability_VolumeExpansion_{ VolumeExpansion: &csi.PluginCapability_VolumeExpansion{ Type: csi.PluginCapability_VolumeExpansion_OFFLINE, }, }, }, { Type: &csi.PluginCapability_VolumeExpansion_{ VolumeExpansion: &csi.PluginCapability_VolumeExpansion{ Type: csi.PluginCapability_VolumeExpansion_ONLINE, }, }, }, { Type: &csi.PluginCapability_Service_{ Service: &csi.PluginCapability_Service{ Type: csi.PluginCapability_Service_VOLUME_ACCESSIBILITY_CONSTRAINTS, }, }, }, }, }, }, { name: "empty", config: &driver.InitDiskDriverInput{}, }, } for _, test := range tests { driver := driver.GetDiskDriver() driver.InitDiskDriver(test.config) is := NewIdentityServer(driver, nil) resp, _ := is.GetPluginCapabilities(context.Background(), &csi.GetPluginCapabilitiesRequest{}) if !reflect.DeepEqual(resp.GetCapabilities(), test.config.PluginCap) { t.Errorf("testcase %s: expect cap %v, but actually %v", test.name, test.config.PluginCap, resp.GetCapabilities()) } } } func TestIdentityServer_GetPluginInfo(t *testing.T) { tests := []struct { name string config *driver.InitDiskDriverInput err error }{ { name: "normal", config: &driver.InitDiskDriverInput{ Name: "test-driver", Version: "v19.2.0", }, err: nil, }, { name: "lack of driver name", config: &driver.InitDiskDriverInput{ Version: "v19.2.0", }, err: status.Error(codes.Unavailable, "Driver name not configured"), }, { name: "lack of driver version", config: &driver.InitDiskDriverInput{ Name: "mock_driver", }, err: status.Error(codes.Unavailable, "Driver is missing version"), }, } for _, test := range tests { driver := driver.GetDiskDriver() driver.InitDiskDriver(test.config) is := NewIdentityServer(driver, nil) resp, err := is.GetPluginInfo(context.Background(), &csi.GetPluginInfoRequest{}) if !reflect.DeepEqual(test.err, err) { t.Errorf("testcase %s: expect error %s, but actually %s", test.name, test.err, err) } if err == nil && resp.GetName() != test.config.Name { t.Errorf("testcase %s: expect name %s, but actually %s", test.name, test.config.Name, resp.GetName()) } if err == nil && resp.GetVendorVersion() != test.config.Version { t.Errorf("testcase %s: expect version %s, but actually %s", test.name, test.config.Version, resp.GetVendorVersion()) } } }
/** * Tests the contents of two collections to determine if they are equal. This method will return true if and only * if all items in collection 1 are present in collection 2 and all items in collection 2 are present in collection * 1. Furthermore, for collections that may contain duplicates (such as {@link List}s), both lists must be the same * length for this to be true. * * @param c1 collection 1 * @param c2 collection 2 * @param <T> the type of collection 1 and 2 (if either collection is typed, both collections must have the same * type) * @return true if the contents of collection 1 and 2 are identical */ public static <T> boolean allEquals(Collection<T> c1, Collection<T> c2) { if (c1 instanceof Set && c2 instanceof Set) { if (c1.size() != c2.size()) { return false; } } for (T t : c1) { if (!c2.contains(t)) { return false; } } for (T t : c2) { if (!c1.contains(t)) { return false; } } return true; }
Steven Spielberg is the subject of a new feature-length documentary set at HBO. Directed and produced by Emmy and Peabody-winning documentarian Susan Lacy, Spielberg premieres Saturday, October 7 on HBO. The documentary, which includes extensive interviews with Spielberg, chronicles the filmmaker’s nearly 50-year-career and includes insights from industry notables including J.J. Abrams, Christian Bale, Drew Barrymore, Cate Blanchett, Francis Ford Coppola, Daniel Craig, Daniel Day-Lewis, Brian de Palma, Laura Dern, Leonardo DiCaprio, Richard Dreyfuss, Ralph Fiennes, Harrison Ford, David Geffen, Tom Hanks, Dustin Hoffman, Holly Hunter, Jeffrey Katzenberg, Ben Kingsley, Kathleen Kennedy, George Lucas, Liam Neeson, Martin Scorsese, Oprah Winfrey and Robert Zemeckis. The documentary will also be available on HBO Now, HBO Go, HBO On Demand and affiliate portals. An HBO Documentary Films presentation, Spielberg is produced by Emma Pildes and Jessica Levin and edited by Deborah Peretz.
<filename>helper_methods.py # -*- coding: utf-8 -*- from ryu.lib.packet import ethernet, ether_types as ether, packet from ryu.ofproto import ofproto_v1_3 as ofp from ryu.ofproto import ofproto_v1_3_parser as parser import hashlib from ryu.lib.packet import packet, ethernet, arp, vlan #contain methods that can be used by different classes and apps def send_msgs(dp, msgs): "Send all the messages provided to the datapath" if bool(msgs): # print('msgs start to send\n') for msg in msgs: # print("! ", dp.id, msg) # print() # print() dp.send_msg(msg) # print('End of send') def send_l3_msgs(msgs): # структура msgs = {dp:[msgs]} for dp in msgs.keys(): if bool(msgs[dp]): for msg in msgs[dp]: dp.send_msg(msg) def make_message (datapath, cookie, table_id, priority, match, instructions = None, actions = None, buffer_id=None, command = None, idle_timeout = 0, hard_timeout = 0): ofproto = datapath.ofproto parser = datapath.ofproto_parser inst = [] if actions is not None: inst += [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, actions)] if instructions is not None: inst += instructions if command is None: command = ofproto.OFPFC_ADD if buffer_id: msg = parser.OFPFlowMod(datapath=datapath, cookie=cookie, table_id=table_id, priority=priority, buffer_id=buffer_id, match=match, instructions=inst, command = command, idle_timeout = idle_timeout, hard_timeout = hard_timeout) else: msg = parser.OFPFlowMod(datapath=datapath, cookie=cookie, table_id=table_id,priority=priority, match=match, instructions=inst, command = command, idle_timeout = idle_timeout, hard_timeout = hard_timeout) return msg def del_flow(dp, cookie, table_id = None, match = None, out_port=None, out_group=None, priority=32768, actions = None, instructions = None, idle_timeout = 0, hard_timeout = 0): parser = dp.ofproto_parser ofp = dp.ofproto if out_port is None: out_port = ofp.OFPP_ANY if out_group is None: out_group = ofp.OFPG_ANY if table_id is None: table_id = ofp.OFPTT_ALL inst = [] if actions is not None: inst += [parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions)] if instructions is not None: inst += instructions msg = parser.OFPFlowMod(cookie=cookie, cookie_mask=0xFFFFFFFFFFFFFFFF, datapath=dp, table_id=table_id, command=ofp.OFPFC_DELETE, out_port=out_port, out_group=out_group, match = match, priority=priority, instructions = inst, idle_timeout = idle_timeout, hard_timeout = hard_timeout) return msg def port_up(port, datapath): if port.mac is None: #значит такого интерфейса не сущетсвует return [] proto= datapath.ofproto mask_all = (proto.OFPPC_PORT_DOWN | proto.OFPPC_NO_RECV | proto.OFPPC_NO_FWD | proto.OFPPC_NO_PACKET_IN) #hw_addr=hw_addr, return [parser.OFPPortMod(datapath, port_no=port.num, config=0, mask=mask_all, hw_addr = port.mac)] # 0 means "up" state = no flag configured def port_shut(port, datapath): if port.mac is None: return [] proto= datapath.ofproto #hw_addr=hw_addr, return [parser.OFPPortMod(datapath, port_no=port.num, mask=(proto.OFPPC_PORT_DOWN), config=proto.OFPPC_PORT_DOWN, hw_addr = port.mac)] def goto_table(table_id): "Generate an OFPInstructionGotoTable message" return parser.OFPInstructionGotoTable(table_id) def apply_actions(dp, actions): "Generate an OFPInstructionActions message with OFPIT_APPLY_ACTIONS" return dp.ofproto_parser.OFPInstructionActions(dp.ofproto.OFPIT_APPLY_ACTIONS, actions) def action_output(dp, port, max_len=None): "Generate an OFPActionOutput message" kwargs = {'port': port} if max_len != None: kwargs['max_len'] = max_len return dp.ofproto_parser.OFPActionOutput(**kwargs) def match(dp, in_port=None, eth_dst=None, eth_src=None, eth_type=None, **kwargs): "Generate an OFPMatch message" if in_port != None: kwargs['in_port'] = in_port if eth_dst != None: kwargs['eth_dst'] = eth_dst if eth_src != None: kwargs['eth_src'] = eth_src if eth_type != None: kwargs['eth_type'] = eth_type return dp.ofproto_parser.OFPMatch(**kwargs) def barrier_request(dp): """Generate an OFPBarrierRequest message Used to ensure all previous flowmods are applied before running the flowmods after this request. For example, make sure the flowmods that delete any old flows for a host complete before adding the new flows. Otherwise there is a chance that the delete operation could occur after the new flows are added in a multi-threaded datapath. """ return [dp.ofproto_parser.OFPBarrierRequest(datapath=dp)] def props(cls): #get all Class properties return [i for i in cls.__dict__.keys() if i[:1] != '_'] def hash_for(data): # Prepare the project id hash hashId = hashlib.md5() hashId.update(repr(data).encode('utf-8')) return hashId.hexdigest() def get_key(d, value): #получить ключ по значению в словаре for k, v in d.items(): if v == value: return k return None # @functools.lru_cache(maxsize=1024) def arp_request( src_mac, src_ip, dst_ip, vid = None): src_ip = str(src_ip) dst_ip = str(dst_ip) BROADCAST = 'ff:ff:ff:ff:ff:ff' # BROADCAST = '00:00:00:00:00:00' e = ethernet.ethernet(src=src_mac, dst=BROADCAST, ethertype = 0x806) a = arp.arp(opcode=arp.ARP_REQUEST, src_mac=src_mac, src_ip=src_ip, dst_mac=BROADCAST, dst_ip=dst_ip) p = packet.Packet() if vid is not None: # 0x8100 - vlan ethertype vl_e = ethernet.ethernet(src=src_mac, dst=BROADCAST, ethertype = 0x8100) vl = vlan.vlan(vid=vid, ethertype=0x806) p.add_protocol(vl_e) p.add_protocol(vl) else: p.add_protocol(e) p.add_protocol(a) p.serialize() return p def arp_reply(dp, out_ports, src_mac, src_ip, dst_mac, dst_ip, vid = None): src_ip = str(src_ip) dst_ip = str(dst_ip) p = packet.Packet() print(dp, out_ports, src_mac, src_ip, dst_mac, dst_ip) e = ethernet.ethernet(src=src_mac, dst=dst_mac, ethertype = 0x806) a = arp.arp(opcode=2, src_mac=src_mac, src_ip=src_ip, dst_mac=dst_mac, dst_ip=dst_ip) if vid is not None: # 0x8100 - vlan ethertype vl_e = ethernet.ethernet(src=src_mac, dst=dst_mac, ethertype = 0x8100) vl = vlan.vlan(vid=vid, ethertype=0x806) p.add_protocol(vl_e) p.add_protocol(vl) else: p.add_protocol(e) p.add_protocol(a) p.serialize() return packet_output(p, out_ports, dp) def packet_output(packet, out_ports, dp): ofproto = dp.ofproto parser = dp.ofproto_parser actions = [] for port in out_ports: actions+=[parser.OFPActionOutput(port)] return [parser.OFPPacketOut(datapath=dp, buffer_id=ofproto.OFP_NO_BUFFER, in_port=ofproto.OFPP_CONTROLLER, actions=actions, data=packet)]
<reponame>dhaiducek/multicloud-operators-subscription // Copyright 2020 The Kubernetes Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package utils import ( "context" "testing" "time" "github.com/ghodss/yaml" "github.com/onsi/gomega" "helm.sh/helm/v3/pkg/repo" corev1 "k8s.io/api/core/v1" clientsetx "k8s.io/apiextensions-apiserver/pkg/client/clientset/clientset" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/manager" chnv1 "github.com/open-cluster-management/multicloud-operators-channel/pkg/apis/apps/v1" releasev1 "github.com/open-cluster-management/multicloud-operators-subscription-release/pkg/apis/apps/v1" appv1 "github.com/open-cluster-management/multicloud-operators-subscription/pkg/apis/apps/v1" appv1alpha1 "github.com/open-cluster-management/multicloud-operators-subscription/pkg/apis/apps/v1" ) var ( helmkey = types.NamespacedName{ Name: "testhelmkey", Namespace: "default", } helmchn = &chnv1.Channel{ ObjectMeta: metav1.ObjectMeta{ Name: helmkey.Name, Namespace: helmkey.Namespace, }, Spec: chnv1.ChannelSpec{ Type: "HelmRepo", Pathname: "https://github.com/open-cluster-management/multicloud-operators-subscription/test/helm", }, } helmsub = &appv1.Subscription{ ObjectMeta: metav1.ObjectMeta{ Name: helmkey.Name, Namespace: helmkey.Namespace, }, Spec: appv1.SubscriptionSpec{ Channel: helmkey.String(), }, } ) func TestGetPackageAlias(t *testing.T) { g := gomega.NewGomegaWithT(t) pkgAlias := GetPackageAlias(githubsub, "") g.Expect(pkgAlias).To(gomega.Equal("")) pkgOverrides1 := &appv1.Overrides{} pkgOverrides1.PackageName = "pkgName1" pkgOverrides2 := &appv1.Overrides{} pkgOverrides2.PackageName = "pkgName2" pkgOverrides2.PackageAlias = "pkgName2Alias" packageOverrides := make([]*appv1.Overrides, 0) packageOverrides = append(packageOverrides, pkgOverrides1, pkgOverrides2) githubsub.Spec.PackageOverrides = packageOverrides pkgAlias = GetPackageAlias(githubsub, "pkgName1") g.Expect(pkgAlias).To(gomega.Equal("")) pkgAlias = GetPackageAlias(githubsub, "pkgName2") g.Expect(pkgAlias).To(gomega.Equal("pkgName2Alias")) } func TestGenerateHelmIndexFile(t *testing.T) { g := gomega.NewGomegaWithT(t) chartDirs := make(map[string]string) chartDirs["../../test/github/helmcharts/chart1/"] = "../../test/github/helmcharts/chart1/" chartDirs["../../test/github/helmcharts/chart2/"] = "../../test/github/helmcharts/chart2/" indexFile, err := GenerateHelmIndexFile(githubsub, "../..", chartDirs) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(len(indexFile.Entries)).To(gomega.Equal(2)) } func TestConfigMapSecretRefsInHelmRelease(t *testing.T) { g := gomega.NewGomegaWithT(t) mgr, err := manager.New(cfg, manager.Options{MetricsBindAddress: "0"}) g.Expect(err).NotTo(gomega.HaveOccurred()) c = mgr.GetClient() ctx, cancel := context.WithTimeout(context.TODO(), 5*time.Minute) mgrStopped := StartTestManager(ctx, mgr, g) defer func() { cancel() mgrStopped.Wait() }() chartDirs := make(map[string]string) chartDirs["../../test/github/helmcharts/chart1/"] = "../../test/github/helmcharts/chart1/" chartDirs["../../test/github/helmcharts/chart2/"] = "../../test/github/helmcharts/chart2/" indexFile, err := GenerateHelmIndexFile(githubsub, "../..", chartDirs) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(len(indexFile.Entries)).To(gomega.Equal(2)) time.Sleep(3 * time.Second) githubchnNew := githubchn.DeepCopy() githubchnNew.Spec.ConfigMapRef = &corev1.ObjectReference{Name: "channel-configmap"} githubchnNew.Spec.SecretRef = &corev1.ObjectReference{Name: "channel-secret"} githubsub.UID = "dummyuid" helmrelease, err := CreateOrUpdateHelmChart("chart1", "chart1-1.0.0", indexFile.Entries["chart1"], c, githubchnNew, nil, githubsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(helmrelease).NotTo(gomega.BeNil()) g.Expect(helmrelease.Repo.ConfigMapRef.Namespace).To(gomega.Equal(githubchnNew.Namespace)) g.Expect(helmrelease.Repo.SecretRef.Namespace).To(gomega.Equal(githubchnNew.Namespace)) } func TestCreateOrUpdateHelmChart(t *testing.T) { g := gomega.NewGomegaWithT(t) mgr, err := manager.New(cfg, manager.Options{MetricsBindAddress: "0"}) g.Expect(err).NotTo(gomega.HaveOccurred()) c = mgr.GetClient() ctx, cancel := context.WithTimeout(context.TODO(), 5*time.Minute) mgrStopped := StartTestManager(ctx, mgr, g) defer func() { cancel() mgrStopped.Wait() }() chartDirs := make(map[string]string) chartDirs["../../test/github/helmcharts/chart1/"] = "../../test/github/helmcharts/chart1/" chartDirs["../../test/github/helmcharts/chart2/"] = "../../test/github/helmcharts/chart2/" indexFile, err := GenerateHelmIndexFile(githubsub, "../..", chartDirs) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(len(indexFile.Entries)).To(gomega.Equal(2)) time.Sleep(3 * time.Second) githubsub.UID = "dummyuid" helmrelease, err := CreateOrUpdateHelmChart("chart1", "chart1-1.0.0", indexFile.Entries["chart1"], c, githubchn, nil, githubsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(helmrelease).NotTo(gomega.BeNil()) err = c.Create(context.TODO(), helmrelease) g.Expect(err).NotTo(gomega.HaveOccurred()) // Sleep to make sure the helm release is created in the test kube time.Sleep(5 * time.Second) helmrelease, err = CreateOrUpdateHelmChart("chart1", "chart1-1.0.0", indexFile.Entries["chart1"], c, githubchn, nil, githubsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(helmrelease).NotTo(gomega.BeNil()) var relativeUrls []string relativeUrls = append(relativeUrls, "my-app-0.1.0.tgz") var relativeChartVersions []*repo.ChartVersion relativeChartVersions = append(relativeChartVersions, &repo.ChartVersion{URLs: relativeUrls}) helmrelease, err = CreateOrUpdateHelmChart("my-app", "my-app-0.1.0", relativeChartVersions, c, helmchn, nil, helmsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(helmrelease).NotTo(gomega.BeNil()) g.Expect(helmrelease.Repo.Source.HelmRepo.Urls[0]). Should(gomega.Equal( "https://github.com/open-cluster-management/multicloud-operators-subscription/test/helm/my-app-0.1.0.tgz")) var fullUrls []string fullUrls = append(fullUrls, "https://charts.helm.sh/stable/packages/nginx-ingress-1.36.3.tgz") var fullChartVersions []*repo.ChartVersion fullChartVersions = append(fullChartVersions, &repo.ChartVersion{URLs: fullUrls}) helmrelease, err = CreateOrUpdateHelmChart("nginx-ingress", "nginx-ingress-1.36.3", fullChartVersions, c, helmchn, nil, helmsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(helmrelease).NotTo(gomega.BeNil()) g.Expect(helmrelease.Repo.Source.HelmRepo.Urls[0]). Should(gomega.Equal( "https://charts.helm.sh/stable/packages/nginx-ingress-1.36.3.tgz")) } func TestCheckVersion(t *testing.T) { g := gomega.NewGomegaWithT(t) // Test Git clone with a secret mgr, err := manager.New(cfg, manager.Options{MetricsBindAddress: "0"}) g.Expect(err).NotTo(gomega.HaveOccurred()) c = mgr.GetClient() ctx, cancel := context.WithTimeout(context.TODO(), 5*time.Minute) mgrStopped := StartTestManager(ctx, mgr, g) defer func() { cancel() mgrStopped.Wait() }() chartDirs := make(map[string]string) chartDirs["../../test/github/helmcharts/chart1/"] = "../../test/github/helmcharts/chart1/" packageFilter := &appv1alpha1.PackageFilter{} packageFilter.Version = "1.1.1" githubsub.Spec.PackageFilter = packageFilter githubsub.Spec.Package = "chart1" indexFile, err := GenerateHelmIndexFile(githubsub, "../..", chartDirs) g.Expect(err).NotTo(gomega.HaveOccurred()) chartVersion, err := indexFile.Get("chart1", "1.1.1") g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(chartVersion).NotTo(gomega.BeNil()) ret := checkVersion(githubsub, chartVersion) g.Expect(ret).To(gomega.BeTrue()) subanno := make(map[string]string) subanno[appv1alpha1.AnnotationGitPath] = "test/github/helmcharts" githubsub.SetAnnotations(subanno) packageFilter = &appv1alpha1.PackageFilter{} packageFilter.Version = "2.0.0" githubsub.Spec.PackageFilter = packageFilter ret = checkVersion(githubsub, chartVersion) g.Expect(ret).To(gomega.BeFalse()) packageFilter = &appv1alpha1.PackageFilter{} githubsub.Spec.PackageFilter = packageFilter ret = checkVersion(githubsub, chartVersion) g.Expect(ret).To(gomega.BeTrue()) } func TestOverride(t *testing.T) { g := gomega.NewGomegaWithT(t) // Test Git clone with a secret mgr, err := manager.New(cfg, manager.Options{MetricsBindAddress: "0"}) g.Expect(err).NotTo(gomega.HaveOccurred()) c = mgr.GetClient() ctx, cancel := context.WithTimeout(context.TODO(), 5*time.Minute) mgrStopped := StartTestManager(ctx, mgr, g) defer func() { cancel() mgrStopped.Wait() }() substr2 := `apiVersion: apps.open-cluster-management.io/v1 kind: Subscription metadata: name: git-sub namespace: default spec: channel: default/testkey package: chart1 packageFilter: version: 1.1.1 packageOverrides: - packageName: chart1 packageOverrides: - path: spec value: | persistence: enabled: false` sub2 := &appv1alpha1.Subscription{} err = yaml.Unmarshal([]byte(substr2), &sub2) g.Expect(err).NotTo(gomega.HaveOccurred()) chartDirs := make(map[string]string) chartDirs["../../test/github/helmcharts/chart1/"] = "../../test/github/helmcharts/chart1/" chartDirs["../../test/github/helmcharts/chart2/"] = "../../test/github/helmcharts/chart2/" indexFile, err := GenerateHelmIndexFile(sub2, "../..", chartDirs) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(len(indexFile.Entries)).To(gomega.Equal(1)) time.Sleep(3 * time.Second) sub2.UID = "dummyuid" helmrelease, err := CreateOrUpdateHelmChart("chart1", "chart1-1.1.1", indexFile.Entries["chart1"], c, githubchn, nil, sub2) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(helmrelease).NotTo(gomega.BeNil()) err = Override(helmrelease, sub2) g.Expect(err).NotTo(gomega.HaveOccurred()) } func TestCreateHelmCRDeployable(t *testing.T) { g := gomega.NewGomegaWithT(t) // Test Git clone with a secret mgr, err := manager.New(cfg, manager.Options{MetricsBindAddress: "0"}) g.Expect(err).NotTo(gomega.HaveOccurred()) c = mgr.GetClient() ctx, cancel := context.WithTimeout(context.TODO(), 5*time.Minute) mgrStopped := StartTestManager(ctx, mgr, g) defer func() { cancel() mgrStopped.Wait() }() chartDirs := make(map[string]string) chartDirs["../../test/github/helmcharts/chart1/"] = "../../test/github/helmcharts/chart1/" chartDirs["../../test/github/helmcharts/chart1Upgrade/"] = "../../test/github/helmcharts/chart1Upgrade/" chartDirs["../../test/github/helmcharts/chart2/"] = "../../test/github/helmcharts/chart2/" packageFilter := &appv1alpha1.PackageFilter{} packageFilter.Version = "1.1.1" githubsub.Spec.PackageFilter = packageFilter githubsub.Spec.Package = "chart1" indexFile, err := GenerateHelmIndexFile(githubsub, "../..", chartDirs) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(len(indexFile.Entries)).To(gomega.Equal(1)) time.Sleep(3 * time.Second) githubsub.UID = "dummyuid" dpl, err := CreateHelmCRDeployable("../..", "chart1", indexFile.Entries["chart1"], c, githubchn, nil, githubsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(dpl).NotTo(gomega.BeNil()) dplName1 := dpl.Name githubchn.Spec.Type = chnv1.ChannelTypeHelmRepo dpl, err = CreateHelmCRDeployable("../..", "chart1", indexFile.Entries["chart1"], c, githubchn, nil, githubsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(dpl).NotTo(gomega.BeNil()) packageFilter.Version = "1.2.2" githubsub.Spec.PackageFilter = packageFilter githubsub.Spec.Package = "chart1" indexFile, err = GenerateHelmIndexFile(githubsub, "../..", chartDirs) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(len(indexFile.Entries)).To(gomega.Equal(1)) time.Sleep(3 * time.Second) dpl, err = CreateHelmCRDeployable("../..", "chart1", indexFile.Entries["chart1"], c, githubchn, nil, githubsub) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Expect(dpl).NotTo(gomega.BeNil()) dplName2 := dpl.Name // Test that the deployable names are the same for the same charts with different versions g.Expect(dplName1).To(gomega.Equal(dplName2)) } func TestDeleteHelmReleaseCRD(t *testing.T) { g := gomega.NewGomegaWithT(t) mgr, err := manager.New(cfg, manager.Options{MetricsBindAddress: "0"}) g.Expect(err).NotTo(gomega.HaveOccurred()) c = mgr.GetClient() ctx, cancel := context.WithTimeout(context.TODO(), 5*time.Minute) mgrStopped := StartTestManager(ctx, mgr, g) defer func() { cancel() mgrStopped.Wait() }() crdx, err := clientsetx.NewForConfig(cfg) g.Expect(err).NotTo(gomega.HaveOccurred()) runtimeClient, err := client.New(cfg, client.Options{}) g.Expect(err).NotTo(gomega.HaveOccurred()) hrlist := &releasev1.HelmReleaseList{} err = runtimeClient.List(context.TODO(), hrlist, &client.ListOptions{}) g.Expect(err).NotTo(gomega.HaveOccurred()) DeleteHelmReleaseCRD(runtimeClient, crdx) hrlist = &releasev1.HelmReleaseList{} err = runtimeClient.List(context.TODO(), hrlist, &client.ListOptions{}) g.Expect(!errors.IsNotFound(err)).To(gomega.BeTrue()) hrlist = &releasev1.HelmReleaseList{} err = runtimeClient.List(context.TODO(), hrlist, &client.ListOptions{}) g.Expect(!errors.IsNotFound(err)).To(gomega.BeTrue()) } func TestIsURL(t *testing.T) { g := gomega.NewGomegaWithT(t) g.Expect(IsURL("https://charts.helm.sh/stable/packages/nginx-ingress-1.40.1.tgz")).To(gomega.BeTrue()) g.Expect(IsURL("nginx-ingress-1.40.1.tgz")).To(gomega.BeFalse()) }
<gh_stars>0 import React from 'react' import BreadcrumbsToolbar from 'client/components/BreadcrumbsToolbar' import JobApi from 'client/ui/jobs/containers/JobApi' import JobAllocations from './components/JobAllocations' import { Job, JobSummary, isBedouinMeta } from 'shared/types' type Props = { jobSummary?: JobSummary jobSpec?: Job } const JobDetail = ({ jobSummary, jobSpec }: Props) => { if (!jobSummary || !jobSpec) { return <span>fetching...</span> } let templateInfo = <></> const { Meta } = jobSpec if (isBedouinMeta(Meta)) { templateInfo = ( <div> {Meta._b_author} <br /> {Meta._b_templateName} <br /> {Meta._b_templateChecksum} <br /> {Meta._b_templateParameters} </div> ) } return ( <div> <BreadcrumbsToolbar /> <div>{jobSummary.JobID}</div> <br /> <br /> {templateInfo} <br /> <br /> <JobAllocations jobId={jobSummary.JobID} taskGroups={jobSpec.TaskGroups} /> </div> ) } type ScreenProps = { match: { url: string; params: { id: string } } } const JobDetailScreen = (props: ScreenProps) => { const { match: { params: { id }, }, } = props return <JobApi jobId={id}>{({ jobApi }) => <JobDetail {...jobApi} />}</JobApi> } export default JobDetailScreen
def cycle_crossover(self, parent_generation, father, mother, cnt, round_number): dict_parent = 'GApool/generation' + str(parent_generation) + '/' dict_child = 'GApool/generation' + str(parent_generation + 1) + '/' father_list = self.fetch_net('../results/' + dict_parent + "astar-%04d-%02d.json" % (father[0], father[1])) mother_list = self.fetch_net('../results/' + dict_parent + "astar-%04d-%02d.json" % (mother[0], mother[1])) cnt = self.produce_child(dict_child, father_list, mother_list, round_number, cnt) cnt = self.produce_child(dict_child, mother_list, father_list, round_number, cnt) return cnt
An empirical foundation for planning an emerging pedagogical method of Physical Education from the predominant learning styles The objective of this research was to make a contribution to the process of increasing knowledge through the exploratory empirical demonstration of an emerging pedagogical theory on the planning of sports training in Spain. For this purpose, a cross-sectional investigation was designed with a quantitative approach composed of a simple random sample of 120 students (n = 120) from secondary education and from the degree in Physical Activity and Sports Sciences, who were resident in Spain, and who were administered the Honey-Alonso Questionnaire (CHAEA) on learning styles (Alonso, Gallego, & Honey, 1994). The predominant learning styles were identified and the possible changes that occur in the learning styles of the students at their different educational stages were examined. The assumption of normality was calculated using the K-S test and the equality of variances using the Levene test. Student's t test was performed to compare the characteristics of both groups. The results showed the predominance of the "Reflective", "Pragmatic", "Theoretical" and "Active" styles, respectively. No significant differences were found in learning styles at stages studied. Given the importance that emerging pedagogical methods attach to individualised teaching it seems appropriate to discuss the planning of an emerging pedagogical method taking as a reference the characteristics of students based on the predominant learning styles.
// WriteData writes a JPEG data segment, which follows a marker. func WriteData(writer io.Writer, buf []byte) error { len := len(buf) + 2 if len >= 2<<15 { return errors.New(fmt.Sprintf("writeData: data is too long (%d), max 2^16 - 3 (%d)", len-2, 2<<15-3)) } lenbuf := make([]byte, 2) lenbuf[0] = byte(len / 256) lenbuf[1] = byte(len % 256) if _, err := writer.Write(lenbuf); err != nil { return err } _, err := writer.Write(buf) return err }
/** * This Class was created for SimpleValueConverter on 19/12/16 * Designed and developed by Francesco Florio * All Right Reserved. */ public abstract class WS { protected static Retrofit createRetrofit(OkHttpClient okHttpClient, Executor executor, String baseUrl){ Retrofit retrofit = new Retrofit.Builder() .baseUrl(baseUrl) .client(okHttpClient) .callbackExecutor(executor) .addCallAdapterFactory(RxJavaCallAdapterFactory.create()) .addConverterFactory(createGsonConverterFactory()) .build(); return retrofit; } protected static Executor createExecutor(){ return Executors.newCachedThreadPool(); } protected static Interceptor createInterceptor(){ HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor(); interceptor.setLevel(HttpLoggingInterceptor.Level.BODY); return interceptor; } public static GsonBuilder getGsonBuilder(){ GsonBuilder gsonBuilder = new GsonBuilder(); return gsonBuilder; } protected static GsonConverterFactory createGsonConverterFactory(){ return GsonConverterFactory.create(getGsonBuilder().create()); } protected static OkHttpClient createOkHttpClient(Interceptor interceptor){ OkHttpClient.Builder builder = new OkHttpClient.Builder() .connectTimeout(WSConfig.CONNECTION_TIMEOUT_IN_SECONDS, TimeUnit.SECONDS) .readTimeout(WSConfig.READ_TIMEOUT_IN_SECONDS, TimeUnit.SECONDS) .retryOnConnectionFailure(true); if(BuildConfig.DEBUG){ builder.addInterceptor(interceptor); } return builder.build(); } protected static Retrofit createDefaultRetrofitConfiguration(){ final Interceptor interceptor = createInterceptor(); final OkHttpClient okHttpClient = createOkHttpClient(interceptor); final Executor executor = createExecutor(); return createRetrofit(okHttpClient, executor, WSConfig.BASE_URL); } }
def _destroy_subnets(self): subnets = throttled_call(self.boto3_ec2.describe_subnets, Filters=self.vpc_filters())['Subnets'] for subnet in subnets: throttled_call(self.boto3_ec2.delete_subnet, SubnetId=subnet['SubnetId'])
<reponame>obiSerra/saasform import { UserCredentialsEntity } from './userCredentials.entity' describe('User Credentials entity', () => { it('with email, should create the entity', () => { const user = new UserCredentialsEntity('<EMAIL>') expect(user).toBeDefined() expect(user.credential).toBe('<EMAIL>') }) it('with email, should create the entity setting default values', () => { const user = new UserCredentialsEntity('<EMAIL>') expect(user).toBeDefined() expect(user.json?.encryptedPassword).toBe(undefined) expect(user.json?.googleId).toBe(undefined) }) })
import { timeStamp } from 'console' import { stringify } from 'querystring' import { WordsApiRequest } from './wordApiRequest' import { inject, injectable, named } from 'inversify' export interface IWord { id: number letters: string[] completed: boolean get completedOn(): number set completedOn(value: number) get length(): number toString(): string } @injectable() export class Word implements IWord { constructor( public id: number, public letters: string[], public completed: boolean = false ) { this.id = id this.letters = letters this.completed = completed } get completedOn(): number { return Date.now() } set completedOn(value: number) { this.completedOn = value } get length(): number { return this.letters?.length ?? 0 } public static buildWord(id: number, letters: string[]): IWord { return new Word(id, letters, false) } static empty(): Word { const word = new Word(0, [], false) return word } public toString = (): string => { if (this.letters.length > 0) { return this.letters.join('') } else { return Word.empty.toString() } } } export interface IWordApiRepository { get(): IWord[] getById(id: number): IWord add(word: Word): number edit(id: number, word: IWord): IWord delete(id: number): IWord changeStatus(id: number, completionStatus: boolean): IWord } // concrete repository for looking up words from the WordsApi service // can create adaptors for local storage, database lookups, etc. @injectable() export class WordApiServiceCatalog implements IWordApiRepository { private wordList: IWord[] = new Array<IWord>( new Word(0, ['a', 'l', 'p', 'h', 'a']), new Word(0, ['s', 't', 'i', 'l', 'l']) ) get(): IWord[] { return this.wordList } getById(id: number): IWord { try { const localWord = this.wordList.find((word) => word.id == id) if (localWord) { return localWord } else { return Word.empty() } } catch { console.log('error caught') return Word.empty() } } add(word: IWord): number { return this.wordList.push(word) } edit(id: number, word: IWord): IWord { const targetIndex = this.wordList.findIndex((word) => word.id == id) this.wordList[targetIndex].letters = word.letters this.wordList[targetIndex].completed = word.completed this.wordList[targetIndex].completedOn = word.completedOn return this.wordList[targetIndex] } delete(id: number): IWord { const targetIndex = this.wordList.findIndex((word) => word.id == id) if (targetIndex < -1) return Word.empty() return this.wordList.splice(targetIndex, 1)[0] } changeStatus(id: number, completionStatus: boolean): IWord { const targetIndex = this.wordList.findIndex((word) => word.id == id) this.wordList[targetIndex].completed = completionStatus this.wordList[targetIndex].completedOn = Date.now() return this.wordList[targetIndex] } } export class WordCatalogService { constructor(public repository: IWordApiRepository) { this.repository = repository } get(): IWord[] { return this.repository.get() } getById(id: number): IWord { return this.repository.getById(id) } add(word: IWord): number { return this.repository.add(word) } edit(id: number, word: IWord) { return this.repository.edit(id, word) } delete(id: number): IWord { return this.repository.delete(id) } changeStatus(id: number, completionStatus: boolean): IWord { return this.repository.changeStatus(id, completionStatus) } }
import { Component } from '@angular/core'; @Component({ selector: 'my-app', template: ` <header><h1>ng-dynamic</h1></header> <main> <hr/> <dynamic-html-demo></dynamic-html-demo> <hr/> <dynamic-cmp-demo></dynamic-cmp-demo> </main> `, }) export class AppComponent { }
package ru.sbtqa.tag.pagefactory.web.aspects; import org.aeonbits.owner.ConfigFactory; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Actions; import ru.sbtqa.tag.pagefactory.environment.Environment; import ru.sbtqa.tag.pagefactory.web.properties.WebConfiguration; @Aspect public class ClickViaSeleniumActions { private static final WebConfiguration properties = ConfigFactory.create(WebConfiguration.class); @Pointcut("call(* org.openqa.selenium.WebElement.click()) && if()") public static boolean isClickViaSeleniumActionsEnabled() { return properties.isClickViaSeleniumActionsEnabled(); } @Around("isClickViaSeleniumActionsEnabled()") public void actions(ProceedingJoinPoint joinPoint) throws Throwable { WebElement element = (WebElement) joinPoint.getTarget(); Actions actions = new Actions((WebDriver) Environment.getDriverService().getDriver()); actions.moveToElement(element); actions.click(); actions.build().perform(); } }
<reponame>rustors/leetcode<filename>src/bin/find-the-difference.rs fn main() {} struct Solution; impl Solution { pub fn find_the_difference(s: String, t: String) -> char { let mut h = vec![0u8; 26]; for i in s.bytes() { h[(i - b'a') as usize] += 1; } for i in t.bytes() { if h[(i - b'a') as usize] == 0 { return i as char; } h[(i - b'a') as usize] -= 1; } 'a' } }
def close(self): stream = self._stream if stream is None: return self._stream = None stream.close()
Image caption Josh Hill was diagnosed with a form of leukaemia Primary school children with cancer are being bullied, losing friends and missing out on their education, a survey by a cancer charity suggests. Clic Sargent said its interviews with more than 200 families raised concerns about how schools supported pupils who had been diagnosed. It showed the side effects of treatment, such as hair loss and weight gain, were a source of bullying. There were also reports of delays in organising home tutoring. About 1,600 childhood cancers are detected in the UK each year. Leukaemia, a cancer of the white blood cells that fight infection, is the most common childhood cancer. 'Falling behind' Josh Hill, from Cheshire, was diagnosed with leukaemia in his first week at primary school and missed half of that year. His parents had concerns about his levels of basic reading and writing. His mum, Lynda, felt he was "falling behind" at school and never received the support to help him catch up. "I felt like the one who was always asking for work from school, it wasn't the other way round," she said. Sometimes parents, already struggling to cope with their child's diagnosis, have to fight to get the help their child needs - and they can feel really let down by the system Lorraine Clifton, Clic Sargent There were also problems with bullying. His mother said: "In primary school, a lot of children were really understanding, but there were a few kids that would pinch the hat he wore to hide his bald head. "They did it on purpose and it must have really got to him." The survey showed that more than a third of parents said they were unhappy with how their school had helped their child get back into the swing of school or to catch up with missed work. Nearly half said their child had grown apart from schoolmates and some had lost friends. 'Distressing' The charity's chief executive, Lorraine Clifton, said many children received the support they needed but some were still being let down after being diagnosed with cancer. She said: "No child should have to miss out on their education because they've had cancer - and it's distressing to hear that some are teased and even bullied on their return to school. "Sometimes parents, already struggling to cope with their child's diagnosis, have to fight to get the help their child needs - and they can feel really let down by the system." A Department for Education representative said: "Bullying in all its forms is completely unacceptable, and is particularly deplorable when aimed at a child with a serious illness. "Schools should be safe for all children to learn and reach their full potential free from fear."
def add_other_meta_data(self, other: _MetaData) -> None: for key in other._meta_data_dict.keys(): self.add_data(key, other._meta_data_dict[key])
NP213 (Novexatin®): A unique therapy candidate for onychomycosis with a differentiated safety and efficacy profile Abstract NP213 (Novexatin®) is a novel antifungal peptide specifically designed for the topical treatment of onychomycosis. NP213 was designed using host defense peptides (HDP), essential components of the innate immune response to infection, as a template. NP213 is a water-soluble cyclic fungicidal peptide that effectively penetrates human nail. NP213 demonstrated a promising preclinical and clinical safety profile, with no evidence of systemic exposure following topical application to the skin and nails. NP213 was efficacious in two phase IIa human trials with 43.3% of patients having no fungi detectable by culture of fragments from NP213-treated nails after 180 days in the first study and likewise 56.5% of patients were culture negative for dermatophytes after 360 days in the second phase IIa study. In both trials, NP213 was applied daily for only 28 days in marked contrast to other topical onychomycosis treatments that require application for up to 52 weeks. Patient reported outcomes from the phase IIa studies were positive with participants recording an improved appearance of their nails after only 14 days of application. All fungi identified in these studies were Trichophyton spp. NP213 (Novexatin®) is a promising, highly differentiated peptide-based candidate for the topical treatment of onychomycosis, addressing the infectious cause and cosmetic issues of this very common condition. Introduction To circumvent the limitations and challenges of drug delivery to the nail, we have taken a biological approach to combatting onychomycosis and designed a novel antifungal peptide, Novexatin ® (NP213), specifically for the topical treatment of onychomycosis. NP213 is a synthetic, water-soluble, cyclic antimicrobial peptide that effectively penetrates human nail. 1 NP213 was designed using host defense peptides (HDP) as a template. HDP are essential components of the innate immune response to infection 2,3 and are expressed and produced in skin and nail. is rapidly fungicidal in a water-based topical formulation and demonstrated superior activity to existing antifungal agents under in vitro under conditions representative of those in human nail. Significantly, in ex vivo human nails, and following only 28 days of daily application, NP213 successfully eradicated dif-ferent strains of Trichophyton rubrum from infected nails, unlike the comparator topical onychomycosis agents ciclopirox and amorolfine. Importantly, there was no evidence of a placebo effect for NP213, as treatment with the water-based vehicle alone did not cause a significant reduction in the number of T. rubrum colony-forming units recovered at the end of the experiments. Additionally, NP213 remained bioactive within human nail for at least 11 months following cessation of application. 1 NP213 effectively penetrates human nail in an water-based film-forming vehicle, without the need for penetration enhancers, optical brighteners, 11,12 or the use of organic solvents ; common features in other topical onychomycosis therapeutics and may access the nail by transungual and subungual routes. 16 Onychomycosis (fungal infection of the nail) is a notoriously difficult to treat infection. Most patients receiving any of the limited number of currently available treatments (mainly azoleor allylamine-based) often fail to respond or relapse. The nail is a highly effective biological barrier; hence, delivery of therapeutic agents to the nail and nail bed is challenging. 20,21 Concomitant tinea pedis (athlete's foot) is common and often a source of re-infection. 22,23 Not surprisingly perhaps, the overall efficacy of current antifungal agents in onychomycosis is poor. 20 Additionally, a number of recent reports have highlighted antifungal resistance in dermatophytes as an emerging problem (including resistance to terbinafine and efinaconazole, itraconazole and cross-resistance). Therefore, the need for new, safe, and more effective antifungal agents as onychomycosis therapies is obvious and significant. In this paper, we describe the clinical data generated to date for NP213 from phase I and phase IIa clinical studies. NP213 (Novexatin) preparation NP213 was synthesized as an acetate salt (∼95% purity) by solid-phase synthesis (PolyPeptide Group, France; Almac Group, UK; Ambiopharm, Inc., USA). NP213 was prepared in amorphous crystalline form as a lyophilized powder and its purity was determined by reversed phase-high performance liquid chromatography. NP213 is a backbone-cyclised homopolymer of 7 L-arginine residues with a net charge of + 7. Study designs This paper summarizes our findings from four clinical trials undertaken to assess the safety and efficacy of Novexatin® (NP213). All studies were conducted in accordance with the ethical principles set forth in the Declaration of Helsinki and in compliance with Good Clinical Practice and all applicable regulatory requirements. All subjects were informed of the nature and purpose of clinical studies, and their written informed consent was obtained before study commencement. Phase I/IIa study An initial phase I/IIa study (EudraCT No. 2008-001496-29) was a randomised, placebo-controlled, two sequential parts, first-inhuman clinical trial with two parts (part one double blind, part two single-blind) to assess safety, tolerability, pharmacokinetics (PK), and pharmacodynamics of NP213 in patients with mildto-moderate fungal infection of the toenail (25-75% nail involvement). In this study the causative fungus was not specified. Part two (phase IIa) began only after the results of part one (phase I) confirming tolerability and safety were available. Part one enrolled 12 participants with onychomycosis of the toenail that received NP213 or placebo (vehicle) (2:1 ratio), and part two enrolled 48 patients with onychomycosis of the toenail that received NP213 or placebo (vehicle) (2:1 ratio). A significant number of trial participants (19 out of 42 patients; 45.2%) had more severe onychomycosis than the intention-to-treat population (mild-to-moderate onychomycosis) but were nonetheless included in the study. Study analysis was carried out on all patients including a separate analysis of the intention-to-treat population. A more detailed description of the criteria for all of the trials in this paper can be found in the Supplemental Digital Content. Second phase IIa study The second phase IIa clinical trial (ClinicalTrials.gov identifier: NCT02343627) was a randomized, double-blind, placebocontrolled pilot study to assess the safety and efficacy of NP213 solution in patients with mild-to-moderate fungal infection of the toenail (10-50% nail involvement) caused by dermatophytes. The trial enrolled 47 participants that were randomized to receive either NP213 or placebo (3:1 ratio). Maximum exposure study A separate stand-alone maximum exposure study was next conducted in order to confirm previous pharmacokinetic data revealing no systemic levels of NP213 following administration to a single target toenail. This study was carried out in addition to the phase I/IIa and second phase IIa studies with an independent patient population. This study intended to ascertain the extent to which NP213 applied to every toe and finger nail daily for 28 days was absorbed systemically. This trial was an open-label, multiple-dose safety and PK trial of 10% (w/v) NP213 solution in a maximal use setting in healthy adult volunteers and patients with severe distal subungual onychomycosis (DSO) 38 caused by dermatophytes of the fingernails and/or toenails (≥50% nail involvement of both great toenails and at least four other toenails). The ideal target product profile of any topical therapy for the treatment of onychomycosis would be to apply the product to all nail and periungual skin as reinfection/recurrence of infection is common 18,19 and can result from subclinical infection of adjacent nails or concomitant tinea pedis (athlete's foot), which is common in patients with onychomycosis. 22,23 This is not possible with current topical onychomycosis treatments. Given the excellent safety profile of NP213 and the lack of systemic absorption of a molecule specifically designed to penetrate nails and not skin, the purpose of this maximal exposure trial was to investigate whether maximal exposure could result in any systemic exposure to NP213 and to determine whether application to all nails and periungual skin could subsequently become part of the treatment regimen. Fungal identification methods Infecting fungi were identified using standard methods including microscopy by KOH or Calcofluor white staining and culturing on selective media as described in Food and Drug Administration (FDA) guidance ((https://www.fda.gov/media/90831/download) and elsewhere. A random sample of isolates from study EudraCT No. 2008-001496-29 were subjected to DNA sequencing for more precise identification of the infectious agent using a 314 bp fragment of the fungal large sub-unit ribosomal RNA gene. 43 Transmission Electron Microscopy (TEM) Trichophyton rubrum NCPF0118 was prepared for TEM by growing in Roswell Park Memorial Institute (RPMI) 1640 medium for 7 days at 30°C. Fungi were exposed to either NP213 (2000 mg/l) or an equivalent volume of sterile-deionized water for 6 hours at 30°C. For TEM analysis, cells in 2.5% glutaraldehyde solution were dehydrated by passing through ethanol and acetone series before being embedded in wax resin, stained with uranyl acetate/lead citrate stains to improve contrast, sectioned at 90 nm, and mounted onto copper grids. Micrographs were acquired using a JEM-1400 TEM (Jeol USA Inc., Peabody, MA, USA) at the Microscopy and Histology Core Facility at the University of Aberdeen. Detection of NP213 in human plasma Detection and quantification NP213 in plasma in the PK component of the studies detailed above was conducted by enzymelinked immunosorbent assay (ELISA), developed, and validated by NovaBiotics and Charles River Laboratories (Tranent, UK, and Quebec, Canada). The lower limit of quantification (LLOQ) of NP213 in human plasma (normal, hemolysed, or lipemic) in the ELISA was 1.0 ng/ml in the second phase IIa study and the maximal exposure study and met FDA requirements for bioassay sensitivity. Introduction As well as a promising efficacy profile established during in vitro and ex vivo testing, 1 NP213 has been proven to be safe and well tolerated in a panel of preclinical toxicological studies as required to facilitate human studies. Safety and efficacy has been confirmed in clinical studies in humans. NP213 was not absorbed through skin with any detectable drug plasma levels following topical application to the skin and nails of up to 2800 mg over a 28-day period of daily application. NP213 has now been tested in four clinical studies, including three randomized controlled trials (ClinicalTrials.gov Identifiers: NCT02343627; NCT02933879 and EudraCT No. 2008-001496-29), and in total 238 trial participants have been exposed to topical doses of NP213 with no tolerability or safety concerns. This paper summarizes the findings of the human phase I and phase IIa safety and efficacy studies of NP213 in onychomycosis. Phase I study In the phase I study (EudraCT No. 2008-001496-29), systemic exposure was determined on plasma samples by ELISA on 8 trial participants with onychomycosis subjected to a single topical exposure to NP213 solution (10% (w/v)) on an infected toenail. In all cases, NP213 was not detected in plasma, indicating no systemic exposure. No adverse events (AE) were observed in any of the trial participants and there was no evidence of irritation at or around the site of application. Phase IIa study 1 Following on from this phase I safety study, the initial phase IIa clinical study (EudraCT No. 2008-001496-29), in which NP213 solution (10% (w/v)) was applied daily to a single toenail for 28 days, no serious adverse events (SAE) were reported for any of the 48 participants enrolled. NP213 was not detected in plasma, indicating no systemic exposure. In total, seven AE were recorded that were judged to be possibly related to the study drug in six subjects; five subjects with mild erythema of the skin at the treated toenail of short duration and one case of moderate, untreated headache (one patient on two separate days). In the cases of mild erythema, this was almost evenly distributed between patients receiving (NP213 (three cases) or placebo (two cases) (Table S1). Therefore, the cause of the erythema was not a result of exposure to NP213. The NP213 solution and placebo used in this study also contained 20% (w/v) urea. Urea is generally recognized as safe (GRAS), but there are reports that 5% and 20% urea can cause dermal irritation. 44,45 In cases of onychomycosis, the skin adjacent to the infected nail is often damaged or inflamed, sometimes as a result of concomitant tinea pedis, and this may have made trial participants more susceptible to the irritant effect of urea. 46 Therefore, urea was omitted from the NP213 formulation in subsequent trials and a second phase IIa clinical trial using NP213 solution without urea (ClinicalTrials.gov Identifier: NCT02343627) was conducted to confirm that NP213 did not have any associated safety issues. Overall, in this first phase IIa study, administration of NP213 was very well tolerated by all subjects. Phase IIa study 2 In the second phase IIa clinical study (ClinicalTrials.gov Identifier: NCT02343627), 47 participants with mild-to-moderate fungal infection of the great toenail received topical once-daily doses of NP213 solution (10% (w/v)) to all infected toenails and 0.5 mm of adjacent skin once-daily for 60 days. There were no SAE in this study and NP213 was well tolerated by all subjects. Of the mild or moderate AE, only one (untreated abdominal pain) was determined to be possibly related to NP213 (Table S2). No PK analyses were performed in this trial. Maximal exposure study Earlier preclinical and clinical studies demonstrated no systemic exposure to NP213 following daily topical exposure to single target toenails, so to confirm the lack of systemic exposure anticipated by dosing multiple nails, a maximal exposure study was conducted in which NP213 solution (10% (w/v)) was applied to all finger and toenails as well as 0.5 mm of adjacent skin once daily for 28 days in seven healthy subjects and 21 participants with severe DSO of the fingernails and/or toenails. NP213 was safe and well tolerated by all participants (healthy and severe DSO) with no SAE and no episodes of application site reactions (skin irritation or sensitization) reported. Importantly, PK analysis revealed plasma concentrations of NP213 were below the LLOQ in all samples tested. Thus, trial participants were exposed to ∼2800 mg (2.8 × 10 9 ng) NP213 over the course of 28 days, with no detectable NP213 found in participants plasma samples. Although this was not an objective of this study, clinical trial sites reported that 14 of the patients with severe DSO had evidence of clear nail growth several months following study completion. Phase IIa study 1 In the first phase IIa study (EudraCT No. 2008-001496-29), analysis of the culture-based diagnosis of onychomycosis and patient-reported clinical improvement in participants with mildto-moderate onychomycosis (intention-to-treat population), 84.6% of patients reported clinical improvement after 180 days compared to only 20% on placebo, whereas when the number of culture negative samples were also taken into account, 38.4% receiving NP213 reported improvement and had at least one negative culture over the 180 days, whereas this percentage decreased to 10.0% in the patients on placebo (Fig. 1A). When assessing all of the NP213 treated patients in this study demonstrated that 43.3% of participants reported clinical improvement after 180 days, compared to 31.25% on placebo. When the number of culture negative samples was also taken into account, 43.3% receiving NP213 reported clinical improvement and had at least one negative culture over the 180 days, whereas this percentage was 18.75% in the patients on placebo (Fig. 1B). When assessing mild-to-moderate onychomycosis patients that were culture negative after 180 days of the study (Fig. 1A), a marked difference between those on NP213 and those on placebo was observed at the day 180 time-point (152 days postcessation of treatment) as 38.4% were culture negative after 180 days, whereas 10.0% of those on placebo were culture negative. An example of improved nail appearance following 28 days of treatment with NP213 solution (10% (w/v)) after 180 days is shown in Fig. 2). This demonstrates that NP213 remained active in the nail for at least 158 day post-cessation of treatment and possibly longer, as an in vitro study revealed that NP213 remained active in nails in vitro for at least 11 months postcessation of treatment. 1 Analysis of those patients who were culture negative and microscopy negative (Calcofluor white staining) during the study (Fig. 3) revealed a significant drop in the number of patients who were both culture and microscopy negative; only 23.1% of mild-to-moderate onychomycosis patients and 13.3% of all onychomycosis patients were culture and The nail was treated daily for 28 days with NP213 solution (10% (w/v)). Images were acquired immediately before treatment and 180 post-treatment (152 days following treatment completion). Figure 3. Reported culture negative or culture and microscopy negative after 180 days following daily application of NP213 for the first 28 d to infected toenails. (A) Mild-to-moderate (intention-to-treat) onychomycosis patients; (B) All onychomycosis patients. At the onset of the study (day 0), all patients (100%) were culture positive and microscopy positive for dermatophytes in the nail material/subungual debris sampled. For culture analysis, samples of nail material and subungual debris were obtained immediately prior to treatment initiation and after 180 days. Treatment with NP213 was conducted on day 1 -28 of the study. Samples were plated on modified DTM agar, SDA and SDA + Chl and incubated at 30°C for up to 28 days. Positive cultures were morphologically identified as dermatophytes by an experienced mycologist. Microscopy analysis of samples was conducted by fluorescence microscopy following Calcofluor white staining. (2 x MIC) at 30°C for 6 h; B -Exposed to an equal volume of sterile-deionized water at 30°C for 6 h. T. rubrum NCPF0118 exposed to NP213 (A) was killed resulting in a complete loss of intracellular contents, but with minimal damage to the cell wall. T. rubrum NCPF0118 exposed to sdH 2 O (B) was not killed and an intact cell membrane and normal cell contents are visible. microscopy negative after 180 days. As this study only lasted for 180 days, we do not believe that microscopy is an appropriate method for determining cure as there is insufficient time for the nail to grow and eliminate fungi, whether dead or alive. It is known from TEM and other studies with NP213 that antifungal activity causes membrane lysis leading to cell death, and this leaves behind intact fungal cell walls (Fig. 4) giving the appearance of 'normal' fungi when analyzed by light or fluorescence microscopy and that these remain in the nail. 1 Therefore, the fungi killed by NP213 would stain with Calcofluor white (as well as KOH or Periodic Acid-Schiff), generating microscopy-positive appearance of fungi within the nail, albeit not viable. In this study, trial participants reported improvement in the appearance of their nails from day 14 onward, and this was maintained for the remainder of the period of NP213 application. Following cessation of NP213 application, trial participants assessed the appearance of their nails on a weekly basis for a further 9 weeks (Fig. 5). As can be seen from Figure 5, a greater proportion of patients receiving NP213 described an improvement in the appearance of their nails from week 1 until week 9 post-application. In this trial, all microorganisms isolated from trial participant samples by culture were identified morphologically as dermatophytes and by sequencing of a region of the large subunit region of the 28S rDNA gene in a random selection of samples (Table S3) the highest identity was with T. rubrum UWFP763. Phase IIa study 2 In the second phase IIa clinical trial (ClinicalTrials.gov identifier: NCT02343627), from which urea was omitted from the formulation, patients were treated with NP213 or placebo for the first 28 days of the study and then followed for a total of 360 days. All fungal specimens from patients were identified morphologically as T. rubrum, except one case of T. tonsurans and one case of T. mentagrophytes. Of the 32 patients that completed the trial until day 360 (23 receiving NP213 and 9 receiving placebo), 56.5% of patients receiving NP213 were culture negative at day 360, whereas none of the patients receiving placebo were culture negative (Fig. 6). When assessing the proportion of patients receiving NP213 over time that were culture negative (Fig. 7), it was demonstrated that even after 28 days (cessation of treatment), 42.4% of patients were culture negative and that this gradually increased over time to a maximum of 56.5% after 360 days. Discussion NP213 (Novexatin®) is a unique compound for the treatment of onychomycosis that addresses both the clinical and cosmetic issues associated with onychomycosis (infection eradica- Figure 6. Proportion of patients that were culture negative following daily application of NP213 for the first 28 days to infected toenails in phase IIa Clinical Trial NCT02343627 after 360 days. Samples of nail material and subungual debris were obtained after 360 days and subjected to fungal culture. Treatment with NP213 was conducted on days 1-28 of the study. Positive cultures were morphologically identified as dermatophytes by an experienced mycologist. Figure 7. Change in the proportion of participants that were culture positive or negative over the 360 days study period following treatment with NP213 for 28 days. Samples of nail material and sub-ungual debris were obtained after 0, 28, 60, 90, 180, 270, and 360 days and subjected to fungal culture. Treatment with NP213 was conducted on days 1 -28 of the study. Positive cultures were morphologically identified as dermatophytes by an experienced mycologist. tion and improved appearance of nails) without the need to add additional substances to the formulation such as penetration enhancers, optical brighteners or organic solvents. NP213 is a fungicidal peptide, delivered in a nail-and skin-friendly, water-based formulation that rapidly kills the causative, infecting agents, 1 thereby resolving infection and quickly stopping the ongoing nail discoloration caused by pigments produced by many of the infecting pathogens, for example, T. rubrum 47,48 and therefore rapidly improving nail appearance. In the two phase IIa clinical efficacy studies, NP213 demonstrated favorable cure rates when assessed by culturing infecting fungi from nail samples. However, when cure was also assessed by negative microscopy (Calcofluor white or KOH staining), results were less conclusive. It is our contention that microscopy is not a valid method for determining onychomycosis cure in the case of NP213 for studies that last for ≤ 12 months. This will possibly also be the case for other membraneactive antifungals. NP213 is a membrane-active peptide that lyses the fungal plasma membrane without affecting the cell wall 1 (Fig. 4). Microscopy techniques routinely used for onychomycosis diagnosis (e.g., KOH, Periodic Acid-Schiff, Calcofluor white staining) and in clinical trials for definition of mycological cure simply identify fungal hyphae by staining the fungal cell wall, or nonspecifically within the sample, 42,49,50 and provide no indication of fungal viability. 22,38 Therefore, fungi detected by microscopy may represent fungi that have been successfully killed by treatment with NP213, but that have not been removed from the nail due to insufficient growth of the nail at the time-points used. Microscopy samples from these studies would therefore give positive microscopy results, despite the fungi no longer being viable, severely impacting the apparent efficacy of NP213 when using microscopy as a measure of treatment success. Trial participants will have been confirmed as having onychomycosis by microscopy as well as culture as principal inclusion criteria for entry into the trial, and it is therefore inevitable that fungal hyphae (dead or alive) will be found in nail samples if sufficient time has not elapsed for full nail out-growth, which could take significantly longer than the period of the trial (180 or 360 days in the case of the two phase IIa trials reported herein), especially in the case of involvement of the great toenail; the target nail in most onychomycosis clinical trials. We believe that with culture, considered by ourselves and many others to be the 'Gold Standard' indicator of onychomycosis cure, 38,42,51,52,64,65 there is a much higher degree of certainty that isolated fungi represent identification of an ongoing infection as the fungi are viable. We and others contend that the success or failure of any treatment of onychomycosis is more accurately indicated by using culturing techniques than microscopy. 53 However, we also acknowledge that culture techniques are not perfect, 54-56 as false-negatives can occur (no culturable fungi, but nail remains infected). However, it would be preferable to take multiple samples for culture to reduce the risk of false negatives, rather than rely on microscopy as a definition of cure. Culture is the only one of the current, frequently used methods for onychomycosis diagnosis that determine whether viable fungi are present in the nail, something that is vitally important to know when assessing the efficacy of any fungicidal antifungal agent, including NP213. Current FDA guidance for clinical trials for the treatment of fungally infected nails places considerably less emphasis on microscopy and more on evidence of fungal viability (two negative cultures from the same nail or negative stain with concurrent negative culture) as well as visual appearance of symptom resolution (at least 12 mm or 120 mm 2 ) increase in clear nail 12 months after first treatment (or complete clearance if < 12 mm distal nail was involved prior to treatment) 57 (https://www.fda.gov/media/90831/download). Treatment with oral terbinafine or itraconazole remain the Gold Standard treatments for onychomycosis, 38,58 but topical therapies are frequently recommended for the treatment of less severe cases of onychomycosis. 9,16,53,59 A comparison of the results from the first phase IIa study with other onychomycosis clinical trials to examine mycological cure (negative culture and negative microscopy) demonstrated that NP213 (23.1% mycological cure after 180 days in patients with mild-to-moderate onychomycosis) even though applied for only 4 weeks, compared well with the topical onychomycosis therapies ciclopirox (29 and 36% mycological cure after 48 weeks following daily application for 48 weeks 60 ) and amorolfine (8% mycological cure after 10 months following weekly application for 9 months 61 ). By assessing cure after 180 days, where treatment with NP213 only took place over the first 28 days of the study, any NP213 on the surface of nails would have been removed, whereas in the case of ciclopirox and amorolfine treatment continued until much closer to the trial end-point and therefore residual ciclopirox or amorolfine could have remained on the surface of the nails (rather than within the nails) and could have adversely affected recovery of remaining infecting fungi from the nails. The rate of mycological cure for NP213 (23.1%) was greater than that achieved for a topical solution of terbinafine (12.7-18.8%) applied daily for either 24 or 48 weeks. 62 A comparison of the levels of cure by culture and mycological cure for topical efinaconazole or tavaborole with NP213 demonstrated that NP213 was not superior, but efinaconazole (53.4 and 55.2% mycological cure) and tavaborole (31.1% mycological cure) were applied for 13 times longer than NP213 (52 weeks compared to 4 weeks). 63,64 If NP213 had been applied for a longer duration, it is likely that superior rates of mycological cure would have been achieved. If the study participants in the second phase IIa clinical trial had been followed for more than 360 days, it is possible that the proportion of culture negative patients may have increased further from the 56.5% that were culture negative after 360 days, as it is known from in vitro studies that NP213 remains active and bioavailable in the nail for at least 365 days. 1 When trial participants in this study were assessed for negative microscopy (KOH mount), only 13% of patients receiving NP213 were KOH negative after 360 days, further demonstrating that assessment of cure by microscopy is not appropriate for determining the efficacy of NP213 and probably other onychomycosis therapies. Thus, using a definition of cure based on culture alone, a cure rate of 56.5% was achieved for NP213 after 360 days. A comparison with the pivotal terbinafine clinical trial for onychomycosis 65 revealed that after 12 or 24 weeks of oral terbinafine treatment, approximately 80% of trial participants achieved negative culture after 24 weeks (168 days), whereas treatment with NP213 for only 28 days (4 weeks) achieved negative culture in 50% of cases at approximately the same time-point (180 days). Similar to terbinafine, in the two phase III studies with tavaborole, daily topical treatment for 48 weeks achieved negative culture values of 87.0 and 85.4% (57), whereas treatment with NP213 for only 28 days (4 weeks) achieved negative culture values of 56.5% after 360 days. Prolonged treatment with NP213 may have achieved greater rates of negative culture. A phase IIb study to determine the efficacy of NP213 (manuscript in preparation), in which trial participants were exposed to one or two regimens of NP213 for 56 days with daily application, demonstrated that 27% (one regimen) and 36% (two regimens) of patients achieved negative culture at the end of the 365-day study period, which is similar to that achieved in the two phase IIa clinical trials described above. A range of topical products for the treatment of onychomycosis are available as both prescription-only products (e.g., Jublia® and Kerydin® ) or as over-thecounter (OTC) products (e.g., Curanail®, Excilor® and other generic products). Both patients and clinicians often express a preference for topical agents to treat onychomycosis, 64,66-68 but the treatment costs associated with prescription-only products, along with the reduced efficacy compared to oral treatments, is off-putting to many health services and insurers (e.g., Medicare and Medicaid). It was estimated that (using 2016 prices and complete cure as a measure of efficacy) each successfully treated case of onychomycosis cure with tavaborole would cost $176,478.72, whereas each successfully treated case of onychomycosis cure with efinaconazole would cost $72,500.34. 69 Additionally, onychomycosis is more common in the elderly, and they are more often on multiple medications (polypharmacy), which can make the prescription of oral medications for onychomycosis complex, if not impossible. 70,71 However, the cheaper cost of OTC products is generally associated with even lower levels of efficacy, especially those products with unknown active ingredients and those for which no scientific evidence of efficacy is available. There are a number of routes to market for NP213 as an appropriately priced, highly differentiated, safe product with significantly improved efficacy potential over other topical treatments; a solution to a common condition that cannot come soon enough for tens of millions of onychomycosis sufferers. Supplementary material Supplementary data are available at MMYCOL online.
# coding: utf-8 ''' Top-level statement objects. ''' from .ast import deproxy, nodeify from .joins import Join # TODO: Constructor docs. class Statement: '''The top-level AST node type, which must facilitate value collection.''' def write(self): '''Return an SQL, value list tuple of this statement's serialization''' raise NotImplementedError() class CreateStatement(Statement): '''A lazy SQL `CREATE` statement.''' def __init__(self, target): self.target = deproxy(target) def write(self): return ' '.join(( 'CREATE', self.target.object_type, 'IF NOT EXISTS', self.target.describe() )), tuple() class SelectStatement(Statement): '''An SQL `SELECT` statement.''' def __init__(self, target, condition=True, modifiers=tuple(), distinct=False): self.target, self.condition = deproxy(target), nodeify(condition) self.modifiers = modifiers self.distinct = distinct def write(self): name_policy = self.target.name_column if isinstance(self.target, Join) else None values = list() selection = '*' if not isinstance(self.target, Join): selection = self.target.serialize_selection() sql = ' '.join(( 'SELECT', selection, 'FROM', self.target.serialize_source(values), 'WHERE', nodeify(self.condition).serialize(values, name_policy=name_policy), *(modifier.serialize(values) for modifier in self.modifiers) )) return sql, values class InsertStatement(Statement): '''An SQL `INSERT` statement.''' def __init__(self, target, values): ''' ::target The target object reference. ::values A list of value, object-reference-esq tuples. ''' self.target = deproxy(target) self.values = [(nodeify(value[0]), value[1]) for value in values] def write(self): values = list() sql = ' '.join(( 'INSERT INTO', self.target.serialize(values), '(', ', '.join(value[1].name for value in self.values), ') VALUES (', ', '.join(value[0].serialize(values) for value in self.values), ') RETURNING ', self.target.primary_key.serialize() )) return sql, values class DeleteStatement(Statement): '''An SQL 'DELETE FROM' statement.''' def __init__(self, host, condition, cascade): self.host, self.condition = deproxy(host), condition self.cascade = cascade # TODO: Handle cascade options. def write(self): values = list() sql = ' '.join(( 'DELETE FROM', self.host.serialize(values), 'WHERE', self.condition.serialize(values) )) return sql, values class UpdateStatement(Statement): '''An SQL 'UPDATE' statement.''' def __init__(self, target, assignments, condition): self.target, self.condition = deproxy(target), nodeify(condition) self.assignments = ( (deproxy(target), nodeify(value)) for target, value in assignments ) def write(self): values, assignment_expressions = list(), list() for target, value in self.assignments: assignment_expressions.append( ' = '.join((target.name, value.serialize(values))) ) sql = ' '.join(( 'UPDATE', self.target.serialize(), 'SET', ', '.join(assignment_expressions), 'WHERE', self.condition.serialize(values) )) return sql, values
def wait_for(*args, loop=None): return gather(*[FullyResolved(wrap_in_future(a, loop=loop)) for a in args])
<reponame>copslock/broadcom_cpri<filename>sdk-6.5.20/src/soc/ltsw/config.c<gh_stars>0 /*! \file config.c * * Configuration management. * This file contains yaml event callback and database for configuration. */ /* * This license is set out in https://raw.githubusercontent.com/Broadcom-Network-Switching-Software/OpenBCM/master/Legal/LICENSE file. * * Copyright 2007-2020 Broadcom Inc. All rights reserved. */ #include <bsl/bsl.h> #include <soc/ltsw/config.h> #include <shr/shr_debug.h> #include <bcmcfg/bcmcfg_read_handler.h> #include <bcmdrd/bcmdrd_dev.h> #include <sal/sal_mutex.h> /****************************************************************************** * Local definitions */ #define BSL_LOG_MODULE BSL_LS_SOC_COMMON #define CONFIG_YAML_LEVEL_MAX BCMCFG_MAX_LEVEL #define CONFIG_UNIT_MAX BCMDRD_CONFIG_MAX_UNITS #define CONFIG_PORT_MAX BCMDRD_CONFIG_MAX_PORTS #define CONFIG_HASH_MAX_COUNT 1024 #define CONFIG_NAME_MAX 128 /* Configuration information structure. */ typedef struct config_fld_s { /* Next property field with same hash. */ struct config_fld_s *next; /* Configuration property name. */ char *name; /* Configuration property value. */ char *value; } config_fld_t; /* Configuration database. */ typedef struct config_db_s { /* Protection mutex. */ sal_mutex_t mutex; /* Property field data. */ config_fld_t *fld[CONFIG_HASH_MAX_COUNT]; } config_db_t; /* Configuration module informaiton. */ typedef struct config_info_s { /* Initialized flag. */ int inited; /* Property field data. */ config_db_t config_db[CONFIG_UNIT_MAX]; } config_info_t; /* static */ static config_info_t config_info = { 0 }; /* State of configuration table for Yaml parser. */ typedef enum config_tbl_state_e { /* Unit - can transit to TABLE state. */ UNIT, /* Table - can transit to FIELD state or KEY state. */ TABLE, /* Key - can transit to FIELD state. */ KEY, /* Field. */ FIELD } config_tbl_state_t; /* Type string of configuration table. */ static const char *config_tbl_type_str[] = { "global", "port" }; /* Type of configuration table. */ typedef enum config_tbl_type_e { /* Global configuration in an unit. */ CONFIG_TBL_GLOBAL, /* Per-port configuration in an unit. */ CONFIG_TBL_PORT, /* Count as a constraint. */ CONFIG_TBL_COUNT } config_tbl_type_t; /* Configuration read stack information. */ typedef struct config_stk_info_s { /* Table parser state. */ config_tbl_state_t state; } config_stk_info_t; /* Range Information used for key range specification. */ typedef struct config_key_range_s { /* Min key value. */ uint32_t min; /* Max key value. */ uint32_t max; /* Step value. */ uint32_t step; /* Count. */ uint32_t count; /* True if any key value is available. */ bool asterisk; /* Next range. */ struct config_key_range_s *next; } config_key_range_t; /* * Configuration key field sequence parsing mode. * * Single element mode * a. Single element without square bracket. * 2 # 2 * b. All digits array within first square bracket are treated as collection * of single elements. * [9, 12, 15] # 9, 12, 15 * [9, 12] # 9, 12 * * Range mode with two square brackets * c. range without step (2 element within 2nd square bracket, [min, max]) * [[9, 12], [14, 16]] # 9, 10, 11, 12, 14, 15, 16 * d. range with step (3 element within 2nd square bracket, [min, max, step]) * [[9, 18, 3], * [22, 26, 2]] # 9, 12, 15, 18, 22, 24, 26 * e. b + c * [9, [12, 15], 18] # 9, 12, 13, 14, 15, 18 * f. b + d * [9, [12, 21, 3]] # 9, 12, 15, 18, 21 * g. c + d * [[9, 12], * [15, 21, 3]] # 9, 10, 11, 12, 15, 18, 21 * h. b + c + d * [[9, 12], 13, * [15, 21, 3], 23] # 9, 10, 11, 12, 13, 15, 18, 21, 23 */ typedef enum config_key_seq_mode_e { /* Mode for single element. */ CONFIG_KEY_SEQ_MODE_SINGLE, /* Mode for key field range specification. */ CONFIG_KEY_SEQ_MODE_RANGE } config_key_seq_mode_t; /* Configuraiton table parser user data. */ typedef struct config_tbl_user_s { /* Reader stack. */ config_stk_info_t info[CONFIG_YAML_LEVEL_MAX]; /* Unit set. */ bool unit[CONFIG_UNIT_MAX]; /* Current key range list of configuration table. */ config_key_range_t *key_range_list; /* Table type. */ config_tbl_type_t type; /* Current mode of parsing sequence event for key field. */ config_key_seq_mode_t key_seq_mode; /* Current key range of configuration table. */ config_key_range_t key_range; /* Current field name of configuration property. */ char *fld_name; } config_tbl_user_t; /* Parse information. */ static bcmcfg_parse_info_t config_parse_info = { .mode = PARSE_MODE_GEN, .target_unit = BSL_UNIT_UNKNOWN }; /* Macro to check config mode is valid */ #define CONFIG_MODE_VALID (PARSE_MODE_GEN == config_parse_info.mode) /* Declarations of the functions and structure for yaml read handler. */ static int config_read_setup(bool start, bcmcfg_parse_info_t *info, void *user_data); static int config_read_scalar(const char *value, bcmcfg_read_context_t *context, void *user_data); static int config_read_map(bool start, bcmcfg_read_context_t *context, void *user_data); static int config_read_seq(bool start, bcmcfg_read_context_t *context, void *user_data); static int config_read_doc(bool start, bcmcfg_read_context_t *context, void *user_data); static int config_read_abort(bcmcfg_read_context_t *context, void *user_data); static config_tbl_user_t config_tbl_user; /* Yaml read handler. */ const bcmcfg_read_handler_t config_read_hdl = { .key = "bcm_device", .setup = config_read_setup, .scalar = config_read_scalar, .map = config_read_map, .seq = config_read_seq, .doc = config_read_doc, .abort = config_read_abort, .user_data = (void *)&config_tbl_user }; /* * TLV MESSAGE for HA usage * * +------------------------------------------+--------------+ * | TLV(s) | TLV_TYPE_END | * +------------------------------------------+--------------+ * * * TLV * 0 7 8 23 24 * +------+--------------+-----------------------------------+ * | TYPE | LENGTH | VALUE | * +------+--------------+-----------------------------------+ * |<------------- LENGTH------------->| * * * TLV Message * - A TLV message contains 0 or more TLV elements. * - A TLV message is terminated by a special TLV with CONFIG_TLV_MSG_TYPE_END. * * TLV Fields * Type - indicates type of information, defined as "config_tlv_msg_type_t". * Length - is the size, in bytes, of the data to follow for this element, * defined as "config_tlv_msg_length_t". * Value - is the variable sized set of bytes which contains * data for this element. */ /* TLV Message Type for Configuration Field Name. */ #define CONFIG_TLV_MSG_TYPE_FLD_NAME (0xa) /* TLV Message Type for Configuration Field Value. */ #define CONFIG_TLV_MSG_TYPE_FLD_VALUE (0xb) /* TLV type to indicates end of message. */ #define CONFIG_TLV_MSG_TYPE_END (0xfe) /* Typedef config_tlv_msg_type_t */ typedef uint8_t config_tlv_msg_type_t; /* Typedef config_tlv_msg_length_t */ typedef uint16_t config_tlv_msg_length_t; /* Size of TLV END Message */ #define CONFIG_TLV_MSG_END_SIZE (sizeof(uint8_t)) /* Macro to check message type is valid */ #define CONFIG_TLV_MSG_TYPE_VALID(type) \ ((type == CONFIG_TLV_MSG_TYPE_FLD_NAME) || \ (type == CONFIG_TLV_MSG_TYPE_FLD_VALUE)) /* Configuration TLV Message data structure */ typedef struct config_tlv_msg_s { /* Start of message in buffer */ uint8_t *start; /* End of message in buffer */ uint8_t *end; /* End of buffer */ uint8_t *buffer_end; /* Current ptr in buffer to write/read */ uint8_t *cur_ptr; /* Ptr to current TLV length in buffer */ uint8_t *tlv_length_ptr; /* Length for current TLV */ config_tlv_msg_length_t tlv_length; /* Bytes left to read in current TLV */ int tlv_left; } config_tlv_msg_t; /* * Port configuration database structure. This is mainly used for * config show command. */ typedef struct config_port_db_s { /* Port field data. */ config_fld_t *fld[CONFIG_PORT_MAX + 1]; } config_port_db_t; static config_port_db_t config_port_db[CONFIG_UNIT_MAX]; /* Configuration Lock. */ #define CONFIG_LOCK(u) \ do { \ config_db_t *db = &(config_info.config_db[u]); \ if (db->mutex) { \ sal_mutex_take(db->mutex, SAL_MUTEX_FOREVER); \ } \ } while (0) #define CONFIG_UNLOCK(u) \ do { \ config_db_t *db = &(config_info.config_db[u]); \ if (db->mutex) { \ sal_mutex_give(db->mutex); \ } \ } while (0) /****************************************************************************** * Private functions */ /*! * \brief Hashing string function. * * \param [in] str String. * * \retval Hash id. */ static int config_field_hash(const char *str) { uint32_t h = 5381; int i = 0; while(str[i] != 0 ) { h = ((h << 5) + h) + str[i]; i++; } h = (((h << 5) + h) + i) % CONFIG_HASH_MAX_COUNT; return h; } /*! * \brief Find configuration property field. * * \param [in] unit Unit number. * \param [in] name Property field name. * * \return Pointer to property field. */ static config_fld_t * config_field_find(int unit, const char *name) { config_db_t *cd = &(config_info.config_db[unit]); config_fld_t *fld = NULL; int hash = config_field_hash(name); CONFIG_LOCK(unit); fld = cd->fld[hash]; while (fld != NULL) { if (sal_strcmp(fld->name, name) == 0) { break; } fld = fld->next; } CONFIG_UNLOCK(unit); return fld; } /*! * \brief Insert a configuration field. * * \param [in] unit Unit number. * \param [in] name Field name string. * \param [in] value Field value string. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_field_insert(int unit, const char *name, const char *value) { config_db_t *cd = &(config_info.config_db[unit]); config_fld_t *found_fld = NULL; config_fld_t *fld = NULL; char *fld_value = NULL; int hash = config_field_hash(name); uint32_t alloc_size = 0; SHR_FUNC_ENTER(unit); CONFIG_LOCK(unit); /* Try to find an existing configuration field. */ found_fld = cd->fld[hash]; while (found_fld != NULL) { if (sal_strcmp(found_fld->name, name) == 0) { break; } found_fld = found_fld->next; } if (found_fld != NULL) { LOG_WARN(BSL_LOG_MODULE, (BSL_META_U(unit, "Overwriting duplicate config: %s = %s.\n"), name, value)); alloc_size = sal_strlen(value) + 1; SHR_ALLOC(fld_value, alloc_size, "ltswCfgValue"); SHR_NULL_CHECK(fld_value, SHR_E_MEMORY); sal_memcpy(fld_value, value, alloc_size); SHR_FREE(found_fld->value); found_fld->value = fld_value; SHR_EXIT(); } /* Insert new a configuration field. */ alloc_size = sizeof(config_fld_t); SHR_ALLOC(fld, alloc_size, "ltswCfgData"); SHR_NULL_CHECK(fld, SHR_E_MEMORY); sal_memset(fld, 0, alloc_size); alloc_size = sal_strlen(name) + 1; SHR_ALLOC(fld->name, alloc_size, "ltswCfgName"); SHR_NULL_CHECK(fld->name, SHR_E_MEMORY); sal_memcpy(fld->name, name, alloc_size); alloc_size = sal_strlen(value) + 1; SHR_ALLOC(fld->value, alloc_size, "ltswCfgValue"); SHR_NULL_CHECK(fld->value, SHR_E_MEMORY); sal_memcpy(fld->value, value, alloc_size); fld->next = cd->fld[hash]; cd->fld[hash] = fld; exit: if (SHR_FUNC_ERR()) { if (fld != NULL) { SHR_FREE(fld->name); SHR_FREE(fld->value); SHR_FREE(fld); } SHR_FREE(fld_value); } CONFIG_UNLOCK(unit); SHR_FUNC_EXIT(); } /*! * \brief Delete all configuration property fields. * * \param [in] unit Unit number. * * \return None. */ static void config_field_delete_all(int unit) { config_db_t *cd = &(config_info.config_db[unit]); config_fld_t *fld = NULL; int i = 0; for (i = 0; i < CONFIG_HASH_MAX_COUNT; i++) { while (cd->fld[i] != NULL) { fld = cd->fld[i]; cd->fld[i] = fld->next; SHR_FREE(fld->name); SHR_FREE(fld->value); SHR_FREE(fld); } } return; } /*! * \brief Translate configuration field name. * * \param [in] type Type of configuration table. * \param [in] asterisk Any key value. * \param [in] key Key value. * \param [in] fld_name Field name string. * \param [out]db_name New field name string stored in configuration DB. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_field_name_translate(config_tbl_type_t type, bool asterisk, int key, const char *fld_name, char *db_name) { int size = 0; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); if (asterisk) { size = sal_snprintf(db_name, CONFIG_NAME_MAX, "%s_%s_%s", fld_name, config_tbl_type_str[type], "*"); } else { size = sal_snprintf(db_name, CONFIG_NAME_MAX, "%s_%s_%d", fld_name, config_tbl_type_str[type], key); } if (size >= CONFIG_NAME_MAX) { LOG_ERROR(BSL_LOG_MODULE, (BSL_META("Unsupported string length for %s_%s%d.)!\n"), fld_name, config_tbl_type_str[type], key)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } exit: SHR_FUNC_EXIT(); } /*! * \brief Cleanup configuration module information. * * \param None * * \retval None */ static void config_info_cleanup(void) { int unit = 0; config_db_t *cd = NULL; config_info.inited = 0; /* Initialize configuration database. */ for (unit = 0; unit < CONFIG_UNIT_MAX; unit++) { CONFIG_LOCK(unit); config_field_delete_all(unit); CONFIG_UNLOCK(unit); cd = &(config_info.config_db[unit]); if (cd->mutex) { sal_mutex_destroy(cd->mutex); cd->mutex = NULL; } } /* Initialize configuration information. */ sal_memset(&config_info, 0, sizeof(config_info_t)); return; } /*! * \brief Parse a string to a uint32. * * \param [in] str String. * \param [out] value Output value. * \param [in] base Numeric base. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_str_to_uint32(const char *str, uint32_t *value, int base) { uint32_t val; char *end; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); val = sal_strtoul(str, &end, base); if (*str && *end) { /* Invalid conversion */ LOG_ERROR(BSL_LOG_MODULE, (BSL_META("Unable to convert %s\n"), str)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } else { *value = val; } exit: SHR_FUNC_EXIT(); } /*! * \brief Add a unit by number. * * \param [out] user Configuration user data. * \param [in] unit Unit number. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_add_unit(config_tbl_user_t *user, int unit) { SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); /* Add a unit. */ if (bcmdrd_dev_exists(unit)) { user->unit[unit] = true; } else { SHR_IF_ERR_EXIT(SHR_E_NOT_FOUND); } exit: SHR_FUNC_EXIT(); } /*! * \brief Add a unit by numeric string. * * \param [in] user Configuration user data. * \param [in] unit Unit number. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_add_unit_num(config_tbl_user_t *user, const char *str) { uint32_t value; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_IF_ERR_EXIT (config_str_to_uint32(str, &value, 0)); SHR_IF_ERR_EXIT (config_read_add_unit(user, value)); exit: SHR_FUNC_EXIT(); } /*! * \brief Add all units that currently exist. * * \param [in] user Configuration user data. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_add_all_units(config_tbl_user_t *user) { int unit; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); for (unit = 0; unit < CONFIG_UNIT_MAX; unit++) { if (!bcmdrd_dev_exists(unit)) { continue; } SHR_IF_ERR_EXIT (config_read_add_unit(user, unit)); } exit: SHR_FUNC_EXIT(); } /*! * \brief Add units that correspond to the given name. * * \param [in] user Configuration user data. * \param [in] str Unit name. * \param [out] found Name found flag. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_add_unit_name(config_tbl_user_t *user, const char *str, bool *found) { int len = 0; int unit = 0; int rv = 0; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); len = sal_strlen(str); *found = false; for (unit = 0; unit < CONFIG_UNIT_MAX; unit++) { if (!bcmdrd_dev_exists(unit)) { continue; } if (!sal_strncmp(bcmdrd_dev_name(unit), str, len)) { rv = config_read_add_unit(user, unit); if (SHR_SUCCESS(rv)) { *found = true; } else { SHR_IF_ERR_EXIT(rv); } } } exit: SHR_FUNC_EXIT(); } /*! * \brief Add a unit by string which may be an integer or a device name. * * \param [in] user Configuration user data. * \param [in] str Unit string. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_add_unit_str(config_tbl_user_t *user, const char *str) { bool found = false; int rv; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); if (!sal_strcmp(str, "*")) { SHR_IF_ERR_EXIT (config_read_add_all_units(user)); } else { rv = config_read_add_unit_name(user, str, &found); if (SHR_SUCCESS(rv) && !found) { rv = config_read_add_unit_num(user, str); } SHR_IF_ERR_EXIT(rv); } exit: SHR_FUNC_EXIT(); } /*! * \brief Attach configuration property field name. * * \param [in] user Gobal user data. * \param [in] name Field Name. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_field_attach(config_tbl_user_t *user, const char *name) { uint32_t alloc_size = 0; char *str = NULL; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); if (user->fld_name != NULL) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } alloc_size = sal_strlen(name) + 1; SHR_ALLOC(str, alloc_size, "ltswCfgName"); SHR_NULL_CHECK(str, SHR_E_MEMORY); sal_memcpy(str, name, alloc_size); user->fld_name = str; exit: SHR_FUNC_EXIT(); } /*! * \brief Add a configuration field. * * \param [in] user Configuration user data. * \param [in] value Field value string. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_field_add(config_tbl_user_t *user, const char *value) { int i = 0; int j = 0; config_key_range_t *key = NULL; char name[CONFIG_NAME_MAX]; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); if (user->fld_name == NULL) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } /* Add configuration data into hash table. */ for (i = 0; i < CONFIG_UNIT_MAX; i++) { if (!user->unit[i]) { continue; } if (user->type == CONFIG_TBL_GLOBAL) { SHR_IF_ERR_EXIT (config_field_insert(i, user->fld_name, value)); } else { if (user->key_range_list == NULL) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } key = user->key_range_list; while (key) { for (j = key->min; j <= key->max; j += key->step) { SHR_IF_ERR_EXIT (config_field_name_translate(user->type, key->asterisk, j, user->fld_name, name)); SHR_IF_ERR_EXIT (config_field_insert(i, name, value)); } key = key->next; } } } exit: SHR_FREE(user->fld_name); SHR_FUNC_EXIT(); } /*! * \brief Handle a configuration field. * * \param [in] info Reader data. * \param [in] user Configuration user data. * \param [in] str Field string. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_field_handle(bcmcfg_read_level_info_t *info, config_tbl_user_t *user, const char *str) { SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); switch (info->state) { case BCMCFG_READ_STATE_MAP: SHR_IF_ERR_EXIT (config_field_attach(user, str)); break; case BCMCFG_READ_STATE_MAP_V: SHR_IF_ERR_EXIT (config_field_add(user, str)); break; default: SHR_IF_ERR_CONT(SHR_E_INTERNAL); break; } exit: SHR_FUNC_EXIT(); } /*! * \brief Initialize key range list. * * \param [in] user Configuration user data. * * \retval None. */ static void config_key_range_list_init(config_tbl_user_t *user) { config_key_range_t *range = NULL; while (user->key_range_list) { range = user->key_range_list; user->key_range_list = range->next; SHR_FREE(range); } user->key_range_list = NULL; return; } /*! * \brief Initialize current range information. * * \param [out] user Configuration user data. * * \retval None. */ static void config_key_range_init(config_tbl_user_t *user) { sal_memset(&(user->key_range), 0, sizeof(config_key_range_t)); user->key_seq_mode = CONFIG_KEY_SEQ_MODE_SINGLE; return; } /*! * \brief Initialize current unit set. * * \param [out] user Configuration user data. * * \retval None. */ static void config_unit_init(config_tbl_user_t *user) { sal_memset(&(user->unit), 0, sizeof(bool) * CONFIG_UNIT_MAX); return; } /*! * \brief Attach configuration key range. * * \param [in] user Configuration user data. * \param [in] key_range Key range. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_key_attach(config_tbl_user_t *user, config_key_range_t *key_range) { uint32_t alloc_size = 0; config_key_range_t *key = NULL; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); if (key_range->next != NULL) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } alloc_size = sizeof(config_key_range_t); SHR_ALLOC(key, alloc_size, "ltswCfgKeyRange"); SHR_NULL_CHECK(key, SHR_E_MEMORY); sal_memcpy(key, key_range, alloc_size); key->next = user->key_range_list; user->key_range_list = key; exit: SHR_FUNC_EXIT(); } /*! * \brief Construct configuration key range. * * \param [in] info Reader data. * \param [in] user Configuration user data. * \param [in] str Field string. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_key_range_construct(bcmcfg_read_level_info_t *info, config_tbl_user_t *user, const char *str) { uint32_t value = 0; config_key_range_t *key_range = NULL; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); key_range = &(user->key_range); if (user->key_seq_mode == CONFIG_KEY_SEQ_MODE_SINGLE) { if (key_range->count > 0) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } } if (!sal_strcmp(str, "*")) { if (user->key_seq_mode != CONFIG_KEY_SEQ_MODE_SINGLE) { LOG_ERROR(BSL_LOG_MODULE, (BSL_META("%s:%d:%d Symbol element in key range\n"), info->locus, info->line, info->column)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } key_range->asterisk = true; } else { SHR_IF_ERR_EXIT (config_str_to_uint32(str, &value, 0)); key_range->asterisk = false; } switch (key_range->count) { case 0: key_range->min = value; key_range->max = value; key_range->step = 1; key_range->next = NULL; break; case 1: key_range->max = value; if (key_range->max < key_range->min) { LOG_ERROR(BSL_LOG_MODULE, (BSL_META("%s:%d:%d Min value is greater than max\n"), info->locus, info->line, info->column)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } break; case 2: if (value < 1) { LOG_ERROR(BSL_LOG_MODULE, (BSL_META("%s:%d:%d: Step size must be greater than 1\n"), info->locus, info->line, info->column)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } if (value > (key_range->max - key_range->min)) { LOG_ERROR(BSL_LOG_MODULE, (BSL_META("%s:%d:%d Step size is too large\n"), info->locus, info->line, info->column)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } key_range->step = value; break; default: LOG_ERROR(BSL_LOG_MODULE, (BSL_META("%s:%d:%d Too many elements in key range\n"), info->locus, info->line, info->column)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } key_range->count++; exit: SHR_FUNC_EXIT(); } /*! * \brief Handle a configuration key field. * * \param [in] info Reader data. * \param [in] user Configuration user data. * \param [in] str Field string. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_key_add(bcmcfg_read_level_info_t *info, config_tbl_user_t *user, const char *str) { SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); /* Construct configuration key range. */ SHR_IF_ERR_EXIT (config_key_range_construct(info, user, str)); /* * Single mode: * Add key into key range list directly for single mode. * Range mode: * To be added into key range list when range information is complete * (seq event handler - config_read_seq()) */ if (user->key_seq_mode == CONFIG_KEY_SEQ_MODE_SINGLE) { SHR_IF_ERR_EXIT (config_key_attach(user, &(user->key_range))); config_key_range_init(user); } exit: if (SHR_FUNC_ERR()) { config_key_range_init(user); } SHR_FUNC_EXIT(); } /*! * \brief Copy configuration data from previous level to current level. * * \param [in] start True if start, false if stop. * \param [in] context Reader context. * \param [in] user Property data. * * \retval None */ static void config_read_copy(bool start, bcmcfg_read_context_t *context, config_tbl_user_t *user) { config_stk_info_t *cur_tbl_info; config_stk_info_t *prev_tbl_info; if (start) { /* copy previous user data. */ cur_tbl_info = user->info + context->level; prev_tbl_info = user->info + context->level - 1; sal_memcpy(cur_tbl_info, prev_tbl_info, sizeof(config_stk_info_t)); } return; } /*! * \brief Manage table state transition. * * \param [in] user Configuration user data. * \param [in] info Configuration stack information. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static void config_table_state_next(config_tbl_user_t *user, config_stk_info_t *info) { config_tbl_state_t prev_state; prev_state = info->state; switch (info->state) { case UNIT: info->state = TABLE; break; case TABLE: if (user->type == CONFIG_TBL_GLOBAL) { info->state = FIELD; } else { info->state = KEY; } break; case KEY: info->state = FIELD; break; default: break; } if (prev_state != info->state) { LOG_VERBOSE(BSL_LOG_MODULE, (BSL_META("Map Read - Tbl_state change: (%d, %d)\n"), prev_state, info->state)); } return; } /*! * \brief Setup property reader. * * \param [in] start True if start, false if stop. * \param [in] user_data Handler user data. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_setup(bool start, bcmcfg_parse_info_t *info, void *user_data) { config_tbl_user_t *user = (config_tbl_user_t *)user_data; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); COMPILER_REFERENCE(start); SHR_NULL_CHECK(user, SHR_E_PARAM); if (info) { sal_memcpy(&config_parse_info, info, sizeof(bcmcfg_parse_info_t)); } if (!CONFIG_MODE_VALID) { SHR_EXIT(); } SHR_FREE(user->fld_name); SHR_FREE(user->key_range_list); sal_memset(user, 0, sizeof(config_tbl_user_t)); exit: SHR_FUNC_EXIT(); } /*! * \brief Handler YAML scalar event. * * \param [in] value Scalar value. * \param [in] context Reader context. * \param [in] user_data Handler user data. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_scalar(const char *value, bcmcfg_read_context_t *context, void *user_data) { bcmcfg_read_level_info_t *info = NULL; config_tbl_user_t *user = NULL; config_stk_info_t *tbl_info = NULL; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(context, SHR_E_PARAM); SHR_NULL_CHECK(user_data, SHR_E_PARAM); if (!CONFIG_MODE_VALID) { SHR_EXIT(); } info = context->info + context->level; LOG_VERBOSE(BSL_LOG_MODULE, (BSL_META("Scalar Read - State: (%d.%d)\n"), context->level, info->state)); user = (config_tbl_user_t *)user_data; tbl_info = user->info + context->level; LOG_VERBOSE(BSL_LOG_MODULE, (BSL_META("Scalar Read - table state %d, scalar value %s\n"), tbl_info->state, value)); switch (tbl_info->state) { case UNIT: /* Unit value. */ SHR_IF_ERR_EXIT (config_read_add_unit_str(user, value)); break; case TABLE: /* Table value. */ if (!sal_strcmp(value, config_tbl_type_str[CONFIG_TBL_GLOBAL])) { user->type = CONFIG_TBL_GLOBAL; } else if (!sal_strcmp(value, config_tbl_type_str[CONFIG_TBL_PORT])) { user->type = CONFIG_TBL_PORT; /* Initialize current key information. */ config_key_range_init(user); config_key_range_list_init(user); } else { LOG_ERROR(BSL_LOG_MODULE, (BSL_META("%s:%d:%d: Property: unsupported table %s.\n"), info->locus, info->line, info->column, value)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } break; case KEY: /* Key value. */ SHR_IF_ERR_EXIT (config_key_add(info, user, value)); break; case FIELD: /* Field value. */ SHR_IF_ERR_EXIT (config_field_handle(info, user, value)); break; default: SHR_IF_ERR_CONT(SHR_E_INTERNAL); break; } exit: SHR_FUNC_EXIT(); } /*! * \brief Handle YAML map event. * * \param [in] start True if start, false if stop. * \param [in] context Reader context. * \param [in] user_data Handler user data. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_map(bool start, bcmcfg_read_context_t *context, void *user_data) { bcmcfg_read_level_info_t *info = NULL; bcmcfg_read_level_info_t *prev = NULL; config_tbl_user_t *user = NULL; config_stk_info_t *tbl_info = NULL; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(context, SHR_E_PARAM); SHR_NULL_CHECK(user_data, SHR_E_PARAM); if (!CONFIG_MODE_VALID) { SHR_EXIT(); } info = context->info + context->level; prev = context->info + context->level - 1; user = (config_tbl_user_t *)user_data; config_read_copy(start, context, user); tbl_info = user->info + context->level; LOG_VERBOSE(BSL_LOG_MODULE, (BSL_META("Map Read - State: (%d.%d, %d.%d) Tbl_state: %d\n"), context->level - 1, prev->state, context->level, info->state, tbl_info->state)); if (start) { /* Try to transit configuration table state. */ config_table_state_next(user, tbl_info); } else { /* Try to transit configuration table state. */ switch (tbl_info->state) { case FIELD: /* Reset key range list when exiting FIELD state. */ if (user->type != CONFIG_TBL_GLOBAL) { config_key_range_list_init(user); } break; case TABLE: /* Reset unit set when exiting TABLE state. */ config_unit_init(user); break; default: break; } } exit: SHR_FUNC_EXIT(); } /*! * \brief Handle YAML sequence event. * * \param [in] start True if start, false if stop. * \param [in] context Reader context. * \param [in] user_data Handler user data. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_seq(bool start, bcmcfg_read_context_t *context, void *user_data) { bcmcfg_read_level_info_t *info = NULL; bcmcfg_read_level_info_t *prev = NULL; config_tbl_user_t *user = NULL; config_stk_info_t *tbl_info = NULL; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(context, SHR_E_PARAM); SHR_NULL_CHECK(user_data, SHR_E_PARAM); if (!CONFIG_MODE_VALID) { SHR_EXIT(); } info = context->info + context->level; prev = context->info + context->level - 1; user = (config_tbl_user_t *)user_data; config_read_copy(start, context, user); tbl_info = user->info + context->level; LOG_VERBOSE(BSL_LOG_MODULE, (BSL_META("Seq Read - State: (%d.%d, %d.%d) Tbl_state: %d\n"), context->level - 1, prev->state, context->level, info->state, tbl_info->state)); /* Handle key range mode within 2nd square brackets*/ if ((tbl_info->state == KEY) && (prev->state == BCMCFG_READ_STATE_SEQ)) { if (start) { if (user->key_seq_mode == CONFIG_KEY_SEQ_MODE_RANGE) { LOG_ERROR(BSL_LOG_MODULE, (BSL_META("%s:%d:%d Too many square brackets > 2.\n"), info->locus, info->line, info->column)); SHR_IF_ERR_EXIT(SHR_E_PARAM); } else { user->key_seq_mode = CONFIG_KEY_SEQ_MODE_RANGE; } } else { /* Insert key range field entries when exiting key range mode. */ SHR_IF_ERR_EXIT (config_key_attach(user, &(user->key_range))); config_key_range_init(user); } } exit: SHR_FUNC_EXIT(); } /*! * \brief Handle YAML doc event. * * \param [in] start True if start, false if stop. * \param [in] context Reader context. * \param [in] user_data Handler user data. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_doc(bool start, bcmcfg_read_context_t *context, void *user_data) { config_tbl_user_t *user = (config_tbl_user_t *)user_data; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(context, SHR_E_PARAM); SHR_NULL_CHECK(user, SHR_E_PARAM); if (!CONFIG_MODE_VALID) { SHR_EXIT(); } if (start) { SHR_FREE(user->fld_name); SHR_FREE(user->key_range_list); sal_memset(user, 0, sizeof(config_tbl_user_t)); } exit: SHR_FUNC_EXIT(); } /*! * \brief Handle abort notification. * * \param [in] context Reader context. * \param [in] user_data Handler user data. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_read_abort(bcmcfg_read_context_t *context, void *user_data) { config_tbl_user_t *user = (config_tbl_user_t *)user_data; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(context, SHR_E_PARAM); SHR_NULL_CHECK(user, SHR_E_PARAM); if (!CONFIG_MODE_VALID) { SHR_EXIT(); } /* Free dynamic memory. */ SHR_FREE(user->fld_name); SHR_FREE(user->key_range_list); exit: SHR_FUNC_EXIT(); } /*! * \brief Initialize a TLV message structure. * * \param [in] msg Pointer to TLV message. * * \retval None */ static void config_tlv_msg_t_init(config_tlv_msg_t *msg) { if (msg != NULL) { sal_memset(msg, 0, sizeof(config_tlv_msg_t)); } return; } /*! * \brief Init TLV message buffer. * * \param [out] msg Pointer to TLV message. * \param [in] buffer Pointer to buffer. * \param [in] length Buffer size. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_tlv_msg_buffer_init(config_tlv_msg_t *msg, uint8_t *buffer, int length) { SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(msg, SHR_E_PARAM); SHR_NULL_CHECK(buffer, SHR_E_PARAM); /* Set pointers to buffer */ msg->start = msg->end = msg->cur_ptr = buffer; msg->buffer_end = buffer + length; msg->tlv_length_ptr = NULL; msg->tlv_length = 0; msg->tlv_left = 0; exit: SHR_FUNC_EXIT(); } /*! * \brief Checks if there is available space in message for given value size. * * \param [in] msg Pointer to TLV message. * \param [in] size Size in bytes. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_tlv_msg_length_check(config_tlv_msg_t *msg, int size) { SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); /* Add End-TLV size (only once) */ if (msg->start == msg->end) { size += CONFIG_TLV_MSG_END_SIZE; } if ((msg->buffer_end - msg->end) < size) { SHR_IF_ERR_EXIT(SHR_E_MEMORY); } exit: SHR_FUNC_EXIT(); } /*! * \brief Add a new TLV to current message object. * * \param [in] msg Pointer to TLV message. * \param [in] type Message type. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ int config_tlv_msg_add(config_tlv_msg_t *msg, config_tlv_msg_type_t type) { SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(msg, SHR_E_PARAM); /* Check available space. */ SHR_IF_ERR_EXIT (config_tlv_msg_length_check(msg, sizeof(uint8_t) + sizeof(uint16_t))); /* Set TLV type. */ *((uint8_t *)(msg->cur_ptr)) = type; msg->cur_ptr += sizeof(uint8_t); /* Set TLV length which will be overwritten by value later. */ msg->tlv_length_ptr = msg->cur_ptr; msg->tlv_length = 0; sal_memcpy((void *)msg->cur_ptr, (void *)&msg->tlv_length, sizeof(uint16_t)); msg->cur_ptr += sizeof(uint16_t); /* Set End-TLV wich will be overwritten by subsequent TLV msg. */ *((uint8_t *)(msg->cur_ptr)) = CONFIG_TLV_MSG_TYPE_END; msg->end = msg->cur_ptr; exit: SHR_FUNC_EXIT(); } /*! * \brief Gets next TLV type and length from given message object. * * \param [in] msg Pointer to TLV message. * \param [out] type Message type. * \param [out] length Message length. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ int config_tlv_msg_get(config_tlv_msg_t *msg, config_tlv_msg_type_t *type, config_tlv_msg_length_t *length) { SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(msg, SHR_E_PARAM); SHR_NULL_CHECK(type, SHR_E_PARAM); if (msg->tlv_left != 0) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } msg->tlv_length_ptr = NULL; msg->tlv_length = 0; msg->tlv_left = 0; /* Get TLV type. */ *type = *((uint8_t *)msg->cur_ptr); msg->cur_ptr += sizeof(uint8_t); if (!CONFIG_TLV_MSG_TYPE_VALID(*type)) { SHR_IF_ERR_EXIT(SHR_E_NOT_FOUND); } /* Get TLV length. */ msg->tlv_length_ptr = msg->cur_ptr; sal_memcpy((void *)&msg->tlv_length, (void *)msg->cur_ptr, sizeof(uint16_t)); msg->cur_ptr += sizeof(uint16_t); msg->tlv_left = msg->tlv_length; if (length != NULL) { *length = msg->tlv_length; } exit: SHR_FUNC_EXIT(); } /*! * \brief Add string data in TLV message. * * \param [in] msg Pointer to TLV message. * \param [in] value String value. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_tlv_msg_string_value_add(config_tlv_msg_t *msg, const char *value) { int value_size = 0; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(msg, SHR_E_PARAM); SHR_NULL_CHECK(value, SHR_E_PARAM); value_size = sal_strlen(value) + 1; /* Check available space. */ SHR_IF_ERR_EXIT (config_tlv_msg_length_check(msg, value_size)); /* Write string value. */ sal_memcpy((char *)msg->cur_ptr, value, value_size); msg->cur_ptr += value_size; /* Update TLV length */ if (msg->tlv_length_ptr != NULL) { msg->tlv_length += value_size; sal_memcpy((void *)msg->tlv_length_ptr, (void *)&msg->tlv_length, sizeof(uint16_t)); } else { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } /* Set End-TLV wich will be overwritten by subsequent TLV msg. */ *((uint8_t *)(msg->cur_ptr)) = CONFIG_TLV_MSG_TYPE_END; msg->end = msg->cur_ptr; exit: SHR_FUNC_EXIT(); } /*! * \brief Get string data from TLV message. * * Get string data from TLV message. * * \param [in] msg Pointer to TLV message. * \param [in] value_max Max size of returning value. * \param [out] value Data value. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_tlv_msg_string_value_get(config_tlv_msg_t *msg, int value_max, char *value) { int length_left = 0; int value_size = 0; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); SHR_NULL_CHECK(msg, SHR_E_PARAM); SHR_NULL_CHECK(value, SHR_E_PARAM); if ((msg->cur_ptr + CONFIG_TLV_MSG_END_SIZE) >= msg->buffer_end) { SHR_IF_ERR_EXIT(SHR_E_NOT_FOUND); } /* Get unread number of bytes in TLV. */ if (msg->tlv_length_ptr != NULL) { length_left = msg->tlv_left; } else { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } if (length_left <= 0) { /* No more data to read */ SHR_IF_ERR_EXIT(SHR_E_NOT_FOUND); } value_size = sal_strlen((const char *)msg->cur_ptr) + 1; if (length_left < value_size) { /* Bytes left to read smaller than size */ SHR_IF_ERR_EXIT(SHR_E_FAIL); } /* Check if value fits */ if (value_max < value_size) { SHR_IF_ERR_EXIT(SHR_E_MEMORY); } /* Get string value. */ sal_memcpy((char *)value, (char *)msg->cur_ptr, value_size); msg->cur_ptr += value_size; msg->tlv_left -= value_size; exit: SHR_FUNC_EXIT(); } /*! * \brief Add configuation field information into HA with TLV format. * * \param [in] unit Unit number. * \param [in] type TLV Message type. * \param [in] msg Pointer to TLV message. * \param [in] str String value. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ int config_ha_field_add(int unit, config_tlv_msg_type_t type, config_tlv_msg_t *msg, const char *str) { SHR_FUNC_ENTER(unit); SHR_IF_ERR_EXIT (config_tlv_msg_add(msg, type)); SHR_IF_ERR_EXIT (config_tlv_msg_string_value_add(msg, str)); exit: SHR_FUNC_EXIT(); } /*! * \brief Get configuation field information from HA with TLV format. * * \param [in] unit Unit number. * \param [in] type TLV Message type. * \param [in] msg Pointer to TLV message. * \param [out]str String value. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ int config_ha_field_get(int unit, config_tlv_msg_type_t *type, config_tlv_msg_t *msg, char *str) { SHR_FUNC_ENTER(unit); SHR_IF_ERR_EXIT (config_tlv_msg_get(msg, type, NULL)); SHR_IF_ERR_EXIT (config_tlv_msg_string_value_get(msg, CONFIG_NAME_MAX, str)); exit: SHR_FUNC_EXIT(); } /*! * \brief Get configuration HA data size. * * \param [in] unit Unit number. * \param [out] size HA data size. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ int config_ha_size_get(int unit, int *size) { config_db_t *cd = &(config_info.config_db[unit]); config_fld_t *fld = NULL; int i = 0; int req_size = 0; SHR_FUNC_ENTER(unit); for (i = 0; i < CONFIG_HASH_MAX_COUNT; i++) { for (fld = cd->fld[i]; fld != NULL; fld = fld->next) { req_size += sizeof(config_tlv_msg_type_t); req_size += sizeof(config_tlv_msg_length_t); req_size += sal_strlen(fld->name) + 1; req_size += sizeof(config_tlv_msg_type_t); req_size += sizeof(config_tlv_msg_length_t); req_size += sal_strlen(fld->value) + 1; } } req_size += CONFIG_TLV_MSG_END_SIZE; *size = req_size; SHR_FUNC_EXIT(); } /*! * \brief Save configuration data into HA with TLV format. * * \param [in] unit Device unit number. * \param [in] tlv_msg Pointer to TLV message. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ int config_ha_save(int unit, config_tlv_msg_t *tlv_msg) { config_db_t *cd = &(config_info.config_db[unit]); config_fld_t *fld = NULL; int i = 0; SHR_FUNC_ENTER(unit); for (i = 0; i < CONFIG_HASH_MAX_COUNT; i++) { for (fld = cd->fld[i]; fld != NULL; fld = fld->next) { SHR_IF_ERR_EXIT (config_ha_field_add(unit, CONFIG_TLV_MSG_TYPE_FLD_NAME, tlv_msg, fld->name)); SHR_IF_ERR_EXIT (config_ha_field_add(unit, CONFIG_TLV_MSG_TYPE_FLD_VALUE, tlv_msg, fld->value)); } } exit: SHR_FUNC_EXIT(); } /*! * \brief Restore configuration data from HA with TLV format. * * \param [in] unit Device unit number. * \param [in] tlv_msg Pointer to TLV message. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ int config_ha_restore(int unit, config_tlv_msg_t *tlv_msg) { char name[CONFIG_NAME_MAX] = {'\0'}; char value[CONFIG_NAME_MAX] = {'\0'}; config_tlv_msg_type_t type; SHR_FUNC_ENTER(unit); while ((tlv_msg->cur_ptr + CONFIG_TLV_MSG_END_SIZE) < tlv_msg->buffer_end) { SHR_IF_ERR_EXIT (config_ha_field_get(unit, &type, tlv_msg , name)); if (type != CONFIG_TLV_MSG_TYPE_FLD_NAME) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } SHR_IF_ERR_EXIT (config_ha_field_get(unit, &type, tlv_msg, value)); if (type != CONFIG_TLV_MSG_TYPE_FLD_VALUE) { SHR_IF_ERR_EXIT(SHR_E_INTERNAL); } SHR_IF_ERR_EXIT (config_field_insert(unit, name, value)); } exit: SHR_FUNC_EXIT(); } /*! * \brief Construct port configuration database. * * \param [in] unit Device unit number. * \param [in] port Port id. * \param [in] name Field name. * \param [in] value Field value. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_port_db_construct(int unit, int port, char *name, char *value) { config_port_db_t *port_cfg = &config_port_db[unit]; config_fld_t *found_fld = NULL; config_fld_t *fld = NULL; uint32_t alloc_size = 0; SHR_FUNC_ENTER(unit); if (port > CONFIG_PORT_MAX) { SHR_ERR_EXIT(SHR_E_PARAM); } /* Try to find an existing configuration field. */ found_fld = port_cfg->fld[port]; while (found_fld != NULL) { if (sal_strcmp(found_fld->name, name) == 0) { break; } found_fld = found_fld->next; } if (found_fld != NULL) { /* Ignoring duplicate config field. */ SHR_EXIT(); } /* Insert new a configuration field. */ alloc_size = sizeof(config_fld_t); SHR_ALLOC(fld, alloc_size, "ltswPortCfgData"); SHR_NULL_CHECK(fld, SHR_E_MEMORY); sal_memset(fld, 0, alloc_size); alloc_size = sal_strlen(name) + 1; SHR_ALLOC(fld->name, alloc_size, "ltswPortCfgName"); SHR_NULL_CHECK(fld->name, SHR_E_MEMORY); sal_memcpy(fld->name, name, alloc_size); alloc_size = sal_strlen(value) + 1; SHR_ALLOC(fld->value, alloc_size, "ltswPortCfgValue"); SHR_NULL_CHECK(fld->value, SHR_E_MEMORY); sal_memcpy(fld->value, value, alloc_size); fld->next = port_cfg->fld[port]; port_cfg->fld[port] = fld; exit: if (SHR_FUNC_ERR() && (fld != NULL)) { SHR_FREE(fld->name); SHR_FREE(fld->value); SHR_FREE(fld); } SHR_FUNC_EXIT(); } /*! * \brief Delete all configuration property fields. * * \param [in] unit Unit number. * * \return None. */ static void config_port_db_delete_all(int unit) { config_port_db_t *port_cfg = &config_port_db[unit]; config_fld_t *fld = NULL; int i; for (i = 0; i < CONFIG_PORT_MAX + 1; i++) { while (port_cfg->fld[i] != NULL) { fld = port_cfg->fld[i]; port_cfg->fld[i] = fld->next; SHR_FREE(fld->name); SHR_FREE(fld->value); SHR_FREE(fld); } } return; } /*! * \brief Traverse port configuration database. * * Traverse port configuration and print it if any. * * \param [in] unit Device unit number. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_port_db_traverse(int unit) { config_port_db_t *port_cfg = &config_port_db[unit]; config_fld_t *fld = NULL; int i, indent = 8; bool hp = false, pip = false; for (i = 0; i < CONFIG_PORT_MAX + 1; i++) { pip = false; while (port_cfg->fld[i] != NULL) { fld = port_cfg->fld[i]; port_cfg->fld[i] = fld->next; /* Print port header. */ if (!hp) { cli_out("%*s%s:\n", indent, "", "port"); hp = true; } /* Print port id. */ if (!pip) { if (i == CONFIG_PORT_MAX) { cli_out("%*s%s:\n", indent + 4, "", "\"*\""); } else { cli_out("%*s%d:\n", indent + 4, "", i); } pip= true; } /* Print port config. */ cli_out("%*s%s: %s\n", indent + 8, "", fld->name, fld->value); } } return SHR_E_NONE; } /*! * \brief Traverse configuration database. * * During the traverse, the global configuration will be shown and * port database will be constructed. * * \param [in] unit Device unit number. * * \retval SHR_E_NONE No errors. * \retval !SHR_E_NONE Failure. */ static int config_db_traverse(int unit) { config_db_t *cd = &(config_info.config_db[unit]); config_fld_t *fld = NULL; int i = 0, port = 0, len, indent = 8; char find_name[CONFIG_NAME_MAX]; char name[CONFIG_NAME_MAX]; char value[CONFIG_NAME_MAX]; bool gp = false; SHR_FUNC_ENTER(unit); for (i = 0; i < CONFIG_HASH_MAX_COUNT; i++) { for (fld = cd->fld[i]; fld != NULL; fld = fld->next) { SHR_IF_ERR_VERBOSE_EXIT (config_field_name_translate(CONFIG_TBL_PORT, true, port, NULL, find_name)); /* Search '_port_*' in field name. */ if (sal_strstr(fld->name, find_name)) { len = sal_strlen(fld->name) - sal_strlen(find_name); sal_memcpy(name, fld->name, len); name[len] = '\0'; sal_memcpy(value, fld->value, CONFIG_NAME_MAX); /* Construct port config database. */ SHR_IF_ERR_VERBOSE_EXIT (config_port_db_construct(unit, CONFIG_PORT_MAX, name, value)); continue; } for (port = CONFIG_PORT_MAX - 1; port >= 0; port--) { SHR_IF_ERR_VERBOSE_EXIT (config_field_name_translate(CONFIG_TBL_PORT, false, port, NULL, find_name)); /* Search '_port_i' in field name. */ if (sal_strstr(fld->name, find_name)) { len = sal_strlen(fld->name) - sal_strlen(find_name); sal_memcpy(name, fld->name, len); name[len] = '\0'; sal_memcpy(value, fld->value, CONFIG_NAME_MAX); /* Construct port config database. */ SHR_IF_ERR_VERBOSE_EXIT (config_port_db_construct(unit, port, name, value)); break; } } if (port >= 0) { continue; } /* Print global header. */ if (!gp) { cli_out("%*s%s:\n", indent, "", "global"); gp = true; } /* Print global config. */ cli_out("%*s%s: %s\n", indent + 4, "", fld->name, fld->value); } } exit: SHR_FUNC_EXIT(); } /****************************************************************************** * Public internal HSDK functions */ char * soc_ltsw_config_str_get(int unit, const char *name) { config_fld_t *fld = NULL; if (!config_info.inited) { return NULL; } if (name != NULL) { fld = config_field_find(unit, name); if (fld != NULL) { return fld->value; } } return NULL; } char * soc_ltsw_config_port_str_get(int unit, int port, const char *name) { char find_name[CONFIG_NAME_MAX]; char *str = NULL; if (!config_info.inited) { return NULL; } if (name == NULL) { return NULL; } if (SHR_FAILURE (config_field_name_translate(CONFIG_TBL_PORT, true, 0, name, find_name))) { return NULL; } if ((str = soc_ltsw_config_str_get(unit, find_name)) != NULL) { return str; } if (SHR_FAILURE (config_field_name_translate(CONFIG_TBL_PORT, false, port, name, find_name))) { return NULL; } if ((str = soc_ltsw_config_str_get(unit, find_name)) != NULL) { return str; } return NULL; } int soc_ltsw_config_str_set(int unit, const char *name, const char *value) { SHR_FUNC_ENTER(unit); if (!config_info.inited) { SHR_IF_ERR_EXIT(SHR_E_INIT); } SHR_NULL_CHECK(name, SHR_E_PARAM); SHR_NULL_CHECK(value, SHR_E_PARAM); if (!bcmdrd_dev_exists(unit)) { SHR_IF_ERR_EXIT(SHR_E_PARAM); } SHR_IF_ERR_EXIT (config_field_insert(unit, name, value)); exit: SHR_FUNC_EXIT(); } int soc_ltsw_config_port_str_set(int unit, int port, const char *name, const char *value) { bool asterisk; char cfg_name[CONFIG_NAME_MAX]; SHR_FUNC_ENTER(unit); if (!config_info.inited) { SHR_IF_ERR_EXIT(SHR_E_INIT); } SHR_NULL_CHECK(name, SHR_E_PARAM); SHR_NULL_CHECK(value, SHR_E_PARAM); if (!bcmdrd_dev_exists(unit)) { SHR_IF_ERR_EXIT(SHR_E_PARAM); } asterisk = (port == -1) ? true : false; SHR_IF_ERR_EXIT (config_field_name_translate(CONFIG_TBL_PORT, asterisk, port, name, cfg_name)); SHR_IF_ERR_EXIT (config_field_insert(unit, cfg_name, value)); exit: SHR_FUNC_EXIT(); } int soc_ltsw_config_init(void) { int unit = 0; config_db_t *cd = NULL; SHR_FUNC_ENTER(BSL_UNIT_UNKNOWN); if (config_info.inited) { config_info_cleanup(); } for (unit = 0; unit < CONFIG_UNIT_MAX; unit++) { cd = &(config_info.config_db[unit]); if (cd->mutex== NULL) { cd->mutex = sal_mutex_create("ltswConfigMutex"); SHR_NULL_CHECK(cd->mutex, SHR_E_MEMORY); } } SHR_IF_ERR_EXIT (bcmcfg_read_handler_register(&config_read_hdl)); config_info.inited = 1; exit: if (SHR_FUNC_ERR()) { config_info_cleanup(); } SHR_FUNC_EXIT(); } int soc_ltsw_config_ha_save(int unit, void *buffer, int size) { config_tlv_msg_t tlv_msg; SHR_FUNC_ENTER(unit); SHR_NULL_CHECK(buffer, SHR_E_PARAM); config_tlv_msg_t_init(&tlv_msg); SHR_IF_ERR_EXIT (config_tlv_msg_buffer_init(&tlv_msg, (uint8_t *)buffer, size)); SHR_IF_ERR_EXIT (config_ha_save(unit, &tlv_msg)); exit: SHR_FUNC_EXIT(); } int soc_ltsw_config_ha_restore(int unit, void *buffer, int size) { config_tlv_msg_t tlv_msg; SHR_FUNC_ENTER(unit); config_tlv_msg_t_init(&tlv_msg); SHR_IF_ERR_EXIT (config_tlv_msg_buffer_init(&tlv_msg, (uint8_t *)buffer, size)); SHR_IF_ERR_EXIT (config_ha_restore(unit, &tlv_msg)); exit: SHR_FUNC_EXIT(); } int soc_ltsw_config_ha_size_get(int unit, int *size) { SHR_FUNC_ENTER(unit); SHR_NULL_CHECK(size, SHR_E_PARAM); SHR_IF_ERR_EXIT (config_ha_size_get(unit, size)); exit: SHR_FUNC_EXIT(); } void soc_ltsw_config_sw_dump(int unit) { config_db_t *cd = &(config_info.config_db[unit]); config_fld_t *fld = NULL; int i = 0; if (!config_info.inited) { return; } LOG_CLI((BSL_META_U(unit, "\nSW Information CONFIG - Dunit %d\n"), unit)); for (i = 0; i < CONFIG_HASH_MAX_COUNT; i++) { for (fld = cd->fld[i]; fld != NULL; fld = fld->next) { LOG_CLI((BSL_META_U(unit, "Config Property: hash %4d, %s = %s\n"), i, fld->name, fld->value)); } } return; } void soc_ltsw_show_config(int unit) { int indent = 4; /* Print header. */ cli_out("---\n"); cli_out("bcm_device:\n"); cli_out("%*s%d:\n", indent, "", unit); CONFIG_LOCK(unit); (void)config_db_traverse(unit); (void)config_port_db_traverse(unit); cli_out("...\n"); config_port_db_delete_all(unit); CONFIG_UNLOCK(unit); }
import sys from shapely.geometry import Polygon, LineString from .profile import Profile @Profile.hookspec def upper_contour_line(profile: Profile) -> LineString: """Upper bounding contour line of the profile.""" @Profile.hookspec def lower_contour_line(profile: Profile) -> LineString: """Lower bounding contour line of the profile.""" @Profile.hookspec def cross_section(profile: Profile) -> Polygon: """Cross-section polygon of the profile.""" @Profile.hookspec def height(profile: Profile) -> float: """Height of the profile.""" @Profile.hookspec def width(profile: Profile) -> float: """Width of the profile.""" @Profile.hookspec def types(profile: Profile) -> float: """A tuple of keywords to specify the shape types of the profile.""" @Profile.hookspec def equivalent_rectangle(profile: Profile) -> Polygon: """Get the dimensions of the equivalent rectangle of the profile.""" @Profile.hookspec def strain(profile: Profile) -> float: """Equivalent strain of the profile.""" @Profile.hookspec def temperature(profile: Profile) -> float: """Temperature of the profile.""" @Profile.hookspec def material(profile: Profile) -> str: """Material identifier string for use in several other hooks to get material properties.""" @Profile.hookspec def flow_stress(profile: Profile) -> str: """Flow stress of workpiece material.""" Profile.plugin_manager.add_hookspecs(sys.modules[__name__])
/** * Represents an unresolved method within a type, including * its original obfuscated name and signature/mapped name. */ static class UnresolvedMethod { private final String obfuscatedName; private final String argument; public UnresolvedMethod(String a, String b) { this.obfuscatedName = a; this.argument = b; } }
Cubosomes for Enhancing Intestinal Absorption of Fexofenadine Hydrochloride: In situ and in vivo Investigation Purpose The aim of this work was to probe cubosomes for enhanced intestinal absorption and oral bioavailability of poorly absorbable fexofenadine HCl (FEX-HCl). Materials and Methods Two cubosomal systems were fabricated utilizing glyceryl mono-oleate, a lyotropic mono lamellar lipid as oil phase and poloxamer407 as stabilizer at weight ratios of 8:2 and 7:3. The morphology of cubosomes was researched using transmission electron microscopy (TEM) and particle size was measured using photon correlation spectroscopy. FEX-HCl release was monitored in vitro. The effect of cubosomal encapsulation on intestinal absorption was assessed using in situ rabbit intestinal perfusion technique. Carrageenan induced rat paw edema model was utilized to monitor in vivo anti-inflammatory effect before and after cubosomal encapsulation. Results TEM revealed the existence of spherical and polygonal nanostructures arranged in honeycomb organization. Size measurement reflected nanoparticles with reduced size at higher poloxamer concentration. Release studies revealed liberation of FEX-HCl from cubosomes based on Higuchi kinetics model. The intestinal permeability data indicated incomplete absorption of FEX-HCl from simple aqueous solution with P-glycoprotein efflux contributing to this poor intestinal absorption. Incorporation of FEX-HCl in cubosomes enhanced membrane transport parameters. The intestinal absorption did not correlate with drug release suggesting that drug release is not the rate limiting with possible intact cubosomal transport. Cubosomal encapsulation of FEX-HCl significantly enhanced its in vivo anti-inflammatory efficacy compared to the aqueous FEX-HCl dispersion. Conclusion Cubosomes are promising novel carriers for enhancing intestinal absorption of FEX-HCl. Intact FEX-HCl-cubosomal absorption is possible via trans-lymphatic pathway but this requires further investigations. Introduction Oral medicament administration is the most preferred for drug delivery regarding its convenience and patient compliance. 1 However, innate physicochemical properties of newly developed active pharmaceutical ingredients (APIs) may hamper the feasibility of development of a suitable oral delivery system. 2 Oral delivery can be complicated by poor drug dissolution and/ or poor permeability through the gastrointestinal membrane. This process is governed by the biopharmaceutical classification system. 3 Biopharmaceutical class III APIs express compromised oral bioavailability owing to their hydrophilic nature with subsequent limited biological membranes permeability. 1 FEX-HCl is an FDA approved anti-histaminic medicament for allergic rhinitis and chronic idiopathic urticaria management. 4,5 It is considered to be more advantageous second-generation histamine H1 receptor antagonist concerning its selective, non-anticholinergic, non-sedating and safer effect. 6 Unfortunately, FEX-HCl belongs to BCS class III drugs and exhibits diminished oral bioavailability (33%) with the P-glycoprotein (P-gp) efflux being implicated as a key parameter in limiting its intestinal permeability. There are many evolving approaches for enhancement of FEX-HCl membrane permeability and oral bioavailability. These include preparation of FEX-HCl/cyclodextrin inclusion complex, 10 phospholipid complexation. 11 Using the solid dispersion approach, membrane permeability Preparation of the Tested Cubosomes Formulation Cubosomes are nanostructured systems self-assembled from amphiphilic lipids in water with the aid of suitable stabilizers. 14,15,17 Glyceryl mono-oleate (Peceol), a lyotropic mono lamellar lipid was selected as the lipid phase with Poloxamer407 being utilized as stabilizing polymer. 18,19 The composition of the selected cubosomal formulations included peceol and Poloxamer407 at a weight ratio of either 8:2 or 7:3, respectively (Table 1). FEX-HCl-loaded cubosomes were prepared through controlled hydration of peceol/poloxamer407 mixture using homogenization technique. This technique was previously utilized by other investigators for cubosomes preparation. 20,21 Briefly, peceol and poloxamer407 were mixed and heated on water bath at 60°C till complete melting. FEX-HCl was added to the melt with continuous stirring before gradual addition of water heated to similar temperature and agitation for 30 minutes by overhead homogenizer (Virtis-s23, USA) to give a crude homogenous dispersion. Cubosomal dispersion was equilibrated for an overnight at room temperature before bath sonication for 15 minutes (Elmasonic S23, Germany). Transmission Electron Microscopy (TEM) Morphological analysis of the liquid crystalline cubosomes applied TEM analysis using transmission electron microscope (JEOL-JSM-1400 PLUS, Tokyo, Japan). This involved loading the tested cubosomal dispersions on copper grid prior to staining with uranyl acetate for 5 minutes and lead citrate for 2 minutes, subsequently. TEM photomicrographs were captured for the air dried stained cubosomes samples. Particle Size Analysis The prepared cubosomal dispersions were suitably diluted with double filtered water and sonicated for few minutes to create uniform clumps-free dispersions. The mean particle size and polydispersity index (PDI) were assessed utilizing dynamic light scattering technique (DLS). This employed a Nano-ZS Zetasizer (Malvern Instruments Ltd., Worcestershire, UK). Particle size analysis was performed at 25°C with light scattering at an angle of 90°. The computed data were presented as the mean particle size (Z-average) ± the standard deviation (SD). Assay of Drug Analysis of in vitro release samples employed UV spectrophotometry but quantification of FEX-HCl in the in situ intestinal perfusion samples was accomplished using high pressure liquid chromatography (HPLC). Spectrophotometric FEX-HCl analysis was conducted at 218 nm using UV-visible spectrophotometer (Thermo Fisher Scientific, USA). Standard drug solutions at concentrations of 10, 12, 15, 18 and 20 μg/mL were prepared in 0.001N HCl, phosphate buffered saline (pH 6.8) and phosphate buffered saline (pH 7.4). The absorbance of each was recorded and used to construct the calibration graphs that were utilized for subsequent quantification of FEX-HCL in the release studies at the corresponding pH value. Chromatographic drug analysis was accomplished employing HPLC system (Agilent Technologies 1260 infinity, DE, Germany). The system is supplied with a variable wavelength UV detector (VWD 1260) and an automated sampling system (TCC 1260). This computer aided system-control utilizes Agilent Open LAB ChemStation software. Isocratic drug separation was attained employing a reversed phase column (ODS, 15 cm x 4.6 mm) with an average particle size of 5 μm (GL Sciences Inc., Tokyo, Japan). Chromatographic analysis of FEX-HCl was accomplished following the methodology established by Oliveira and coworkers with slight modification. 22 The mobile phase comprised a mixture of 0.01 M potassium di-hydrogen phosphate buffer containing 0.1% v/v of triethylamine (pH adjusted to 3.2 with o-phosphoric acid), acetonitrile and methanol at a ratio of 50:30:20 v/v/v, respectively. The samples injection volume was 3545 programmed to be 30 μL with the mobile phase being allowed to flow at a rate of 1.2 mL/min and the column effluent was monitored at 218 nm. In vitro Release Studies FEX-HCl release from its prepared cubosomal formulations was monitored employing dialysis method. This used dialysis membrane (cellulose tubing, cut off 14000 Da, Sigma-Aldrich, St. Louis, MO), which was formerly soaked in water for 24 hours to ensure complete swelling and constant pore width throughout the whole experiment. 23 The dialysis sacs were loaded with a known volume (2 mL) of the tested cubosomes dispersions and were incubated at 37°C in 100 mL of 0.001N HCl which was utilized as release medium. The dialysate was subjected to gentle stirring intermittently with 5 mL samples being collected at predetermined time points (1, 2, 3, 4, 6, and 8 h). The release medium was replenished after each sample with equivalent volume of fresh medium preheated to 37°C to maintain sink condition. Furthermore, F2 cubosomes were dialyzed against 100 mL phosphate buffered saline (PBS) adjusted to pH values of 6.8 or 7.4. Samples were collected at 10 minutes intervals for 2 hours with fresh release medium of equivalent volume being reloaded after each sample. This was conducted to mimic the in vivo physiological pH of the perfused intestinal segment which allow subsequent correlation analysis to be conducted between in vitro release and in situ membrane permeability data. 24 The amounts of FEX-HCl in collected samples were analyzed spectrophotometrically and the cumulative amount released was expressed as percentage of the total amount in the dialysis sac. Release profiles were constructed via plotting percentage cumulative amount released as a function of corresponding time points. The release efficiency values were computed for the developed release profiles from the area under plot at time t relative to that calculated supposing 100% drug release at all time points. 25 The release experiments were conducted in triplicates. In situ Intestinal Perfusion Studies FEX-HCl intestinal membrane transport was assessed from its aqueous solution and F2 cubosomes formulation using in situ rabbit intestinal perfusion technique. Twelve albino rabbits with an average weight of 2kg were employed in this study. The study protocol and animal manipulations were performed according to the Ethical Committee approval of College of Pharmacy, University of Tanta (approval number, 181218). This protocol conforms with the National Institute of Health guide for the care and use of laboratory animals. Drug absorption from four intestinal segments was investigated. The intestinal segments employed in the study were duodenum, jejunum, ileum and colon. The surgical procedures and segment preparation were achieved according to well-established methodology. Briefly, the rabbits under study were fasted overnight with free water supply. On the day of the experiment, the rabbit was injected by chlorpromazine HCl (2 mg/kg) as a muscle relaxant then anesthetized with ketamine HCl (50 mg/kg) in two sequential doses separated by 15 minutes. The anaesthetized rabbit was placed in a supine position lying on a heating pad. A midline abdominal longitudinal incision with an average length of 7 cm was made after shaving of hair and cleansing of the abdominal area. The intestinal segments under study were exposed and their ends were tied off with surgical threads. The desired intestinal length of each isolated segment was adjusted and then cannulated. A three-way stopcock cannula was utilized to cannulate the proximal end receiving the perfusion solution, while L-shaped cannula was employed to cannulate the distal end for sample collection. This isolates the segment under study and eliminates the effect of food while maintaining tissue viability. The measured intestinal lengths were adjusted at 15, 30, 30, and 10 cm for duodenum, jejunum, ileum, and ascending colon, respectively. Warm saline was utilized in order to cleanse the cannulated intestinal segments before assembling the selected segments in a multi-S-pattern to assure steady flow rate. The segments under study were kept moist and warm by repeated application of 37 °C normal saline to a gauze cover of the isolated segments. 26 FEX-HCl was included in the control perfusion solution in phosphate buffered saline and cubosomal dispersion at a concentration of 22.5μg/mL. This concentration was estimated regarding the maximum daily dose of FEX-HCl and the average daily fluid input into gastrointestinal tract. 27 The pH values of the prepared perfusion solutions were adjusted to 6.6 for perfusion into the duodenum, 6.8 for perfusion through the colon and to 7.4 for perfusion through jejunum and ileum segments. The temperature of the prepared solutions was adjusted to 37•C prior to perfusion. The pump flow rate https://doi.org/10.2147/IJN.S370235 DovePress International Journal of Nanomedicine 2022:17 3546 was set at 0.27 mL/min for two hours which was kept constant utilizing a Harvard-22 perfusion pump (Harvard Apparatus, Millis, MA, USA). Perfusate samples outflowing from the exposed segments were collected from the L-shaped cannula every 10 minutes for 2 hours. The volume of each sample was accurately measured before samples centrifugation for 5 minutes to precipitate any mucus debris. The samples collected during the second hour were analyzed for drug content employing the developed HPLC assay method to monitor drug absorption at steady state. 2 Finally, the animal was sacrificed, and the isolated segments were excised for determining their actual lengths which were used for data analysis. Analysis of Intestinal Perfusion Data Data obtained from the in situ technique are used for determination of the net water flux and the membrane permeability parameters of the investigated drug. The water flux was determined by calculating the difference between the expected and the actual perfusate volumes. The calculated permeation parameters have been utilized to study the mechanism of drug transport throughout the gastrointestinal tract. The complete methodology for calculating the membrane transport parameters are documented in literature reviews. The following sections will summarize the employed methodology for data analysis. Absorptive Clearance The remaining amount of FEX-HCl in each sample (C out ) was determined according to the corrected concentration of the drug in the collected sample regarding the net water flux. The ratio between the corrected FEX-HCl concentration exiting the selected segment (C out ) and the amount of the drug flowing into the segment (C in ) was utilized to calculate the fraction of FEX-HCl remaining in each sample after perfusion. The remaining fraction at the steady state {(C out /C in ) ss } was determined taking the mean of the remaining fractions in the collected samples of the second hour of perfusion. This was used to determine the permeability area product (Pe.A) which is known as the absorptive clearance determined in mL/min utilizing the following equation: (1) Where (Pe) is a symbol for the apparent permeability coefficient (cm/min), (A) represents the effective surface area (cm 2 ) and (Q) denotes the average flow rate of the perfusate through the investigated segment (mL/min). The fraction absorbed was calculated according to the following equation at the steady state: (2) The anatomical reserve length (ARL) parameter is related to the intestinal drug absorption which is described as the intestinal length remaining after complete absorption of the drug. The ARL was determined according to the following equation: Where L* is the maximum anatomical length of the selected intestinal segment ready for absorption (cm) and l* is the length of the intestinal segment required for complete drug absorption (cm). In theory, the luminal drug concentration cannot be reduced to zero at the intestinal length (l*) due to the nature of logarithmic function. Consequently, an extremely small percent of the remaining solute in the intestinal lumen is thought to be an indicator for complete drug absorption. This percent is assumed to be 5% at it is substituted in Equation (1). Accordingly, the length required for 95% absorption of the drug (L 95% ) was determined utilizing following equation: (4) Where Pe.A is the absorptive clearance normalized to intestinal length and l* is L 95% for the drug under study. 2,26 Solvent Drag Effect on the Intestinal Absorptive Clearance The mechanism of the drug transport through the intestinal membranes was investigated by studying the influence of water flux on the drug absorption. This was performed by plotting the absorptive clearance normalized to intestinal Dovepress Sultan et al length (Pe.A/l) versus the net water flux per unit intestinal length (J w /l) which is calculated from the difference between the predictable sample volume in a certain time interval (Q in ) and the actual perfusate volume attained from the intestinal segment at the same time interval (Q out ). This is stated by the next equation: The amount of drug absorbed per unit time is reliant on the participation of two pathways of drug absorption that are the diffusive transcellular and the convective paracellular processes. Accordingly, the rate of drug absorption (Js) which is calculated in (µg/min) is determined from the following equation: The diffusive process is represented by the part of equation at which Ks is the drug diffusive permeability coefficient, C is the drug concentration in the intestinal lumen and C p is the plasma drug concentration, while the convective process is represented by (Ø s Jw C), at which Ø s is the sieving coefficient of the given drug. From Equation (6) we can make an approximation to Equation (7) during the steady state, considering the blood sink condition. Where J ss is the drug flux at the steady state (µg/min), D is the diffusion coefficient of the drug, K p denotes the drug (octanol/water) partition coefficient, A symbolizes the effective surface area of drug absorption, Δx represents the path length and C ss is the length averaged solute concentration in the gastrointestinal lumen at the steady state (µg/mL). As a result, rearrangement of the equation (7) gives: (8) The term J ss /C ss is the overall absorptive clearance of the given drug (mL/min) that is achieved by different permeation pathways, and it is calculated as the permeability surface area product Pe.A. Plotting the absorptive clearance versus the net water flux both normalized to the intestinal length yields a line with a slope representing a measure for the sieving coefficient (Ø s ) and the intercept with the y-axis gives a measure for the transcellular diffusive contribution to the overall absorptive clearance. 28 In vivo Evaluation of Anti-Inflammatory Effect The anti-inflammatory activity of cubosomes incorporated FEX-HCl was investigated employing carrageenan induced paw edema method. 32 The protocol of study and animal manipulation procedures were approved by the Ethical Committee, College of Pharmacy, Tanta University (Approval number 181218). The study employed 24 male Wistar albino rats with an average weight of 200±20 g. Appropriate housing of the utilized animals was allowed for three days preceding the experiment for adaptation to lab conditions with free access to standard pellet diet and water being given. According to the experimental design, the rats were divided into three groups and fasted overnight prior to experiment with free access to water. The tested cubosomal formulation (F2) was orally administered to the first animal group and an aqueous dispersion of unprocessed FEX-HCl in distilled water (6.35 mg/mL) was administered to the second group (positive control group) utilizing a feeding syringe with the last group receiving no medication (negative control group). The orally administered volume was adjusted to deliver a dose equivalent to 15.87 mg/kg which was estimated relying on the FDA dose conversion tables. 33 Acute inflammation induction involved intraplantar injection of 100 μL carrageenan solution in distilled water (1% w/v). Each rat was anaesthetized by ether inhalation and injected into the sub plantar region at midline of the right hind paw 15 minutes after oral FEX-HCl administration. Baseline paw volume measurement at zero time was performed immediately before carrageenan intraplantar injection using a Vernier caliper. Paw edema volume at predetermined post injection time intervals (0.25, 0.5, 1, 2, 3, 4, 5 and 6 h) was measured and % edema volume was computed using the following equation: 34 The edema formation curve was constructed by plotting the recorded % edema volume values as a function of time. Area under the curve (AUC) represents the total edema formation and was calculated to assess the efficacy of the tested cubosomal formulation relative to control groups. 35,36 Statistical Analysis Kruskal-Wallis test was applied to assess statistical data significance (P<0.05). Individual variation between tested formulations was explored utilizing post-hoc Tukey's multiple comparison. These were performed using SPSS 23. Results Transmission Electron Microscopy (TEM) Figure 1 shows representative transmission electron micrographs of cubosomes prepared using peceol with poloxa-mer407 at weight ratios of 8:2 and 7:3. The dispersion employed homogenization of 1 gram of each mixture in 25mL of water. This provides final poloxamer concentration of 0.8% and 1.2% w/v, respectively. For cubosomes incorporating lower poloxamer concentration (F1), TEM micrographs revealed spherical nanostructures free from any aggregates ( Figure 1A-C). Increasing poloxamer concentration in F2 cubosomes resulted in creation of spherical and polygonal structures arranged in honeycomb organization ( Figure 1D-F). This morphological discrepancy of the micro-graphed cubosomes was reported by previous investigators who prepared cubosomes using lyotropic lipid with poloxamer. 16 The mean particle size values for the captured nanostructures were calculated to be 106±37 nm and 98±52 nm for cubosomes containing lower and higher poloxamer concentrations, respectively. Particle Size Analysis Figure 2 shows the particle size distribution of the tested cubosomes. The recorded average particle size values were 162.1 nm (SD= 46.8) and 112.2 nm (SD= 7) for F1 and F2 cubosomes, respectively. This correlates to the recorded particle size rank measured from TEM micrographs. Moreover, the computed polydispersity index value for F1 formulation was 0.202 (SD = 0.016) with the value recorded for F2 formulation being increased to 0.317 (SD = 0.015) reflecting higher discrepancy for the fabricated F2 cubosomes. This finding is supported by the morphological investigations which showed the development of mixed nano-architectures. In vitro Release Studies The release rate of FEX-HCl from fabricated cubosomes formulations was monitored using 0.001N HCl as release medium. The recorded release profiles for F1 and F2 cubosomes are shown in Figure 3A. These profiles revealed sustained drug release from both cubosomal formulations with almost 98.8% and 97.5% of the loaded drug being released after 8 hours from F1 and F2 formulations, respectively. This was further evidenced from the computed overall release efficiency values which were 60.3% ± 1.5% for F1 cubosomes and 58.6%± 1.2% for F2 cubosomes. With respect to the FEX-HCl release rate constant, there was no significant difference between F1 and F2 cubosomes. FEX-HCl release from F2 cubosomes was monitored at pH values of 6.8 and 7.4 for 2 hours at 37°C simulating the experimental conditions of in situ perfusion studies. This permits subsequent correlation analysis between the released amounts of FEX-HCl and in situ intestinal drug absorption at the same time points. Figure 3B presents the release profiles constructed for F2 cubosomes at pH values of 6.8 and 7.4 relative to that recorded using 0.001N HCl as release The release kinetics of FEX-HCl from cubosomes were estimated by fitting the data to zero, first, and Higuchi release kinetics models. Linear regression was conducted for the data after fitting to different models and the correlation coefficient (R 2 ) was calculated for each model. The calculated R 2 values are presented in Table 2. The fitting reflected that the cubosomes liberated FEX-HCl by Higuchi release kinetics suggesting matrix diffusion-based release. In situ Intestinal Perfusion of Fexofenadine HCl The primary objective of this research was to investigate the efficacy of cubosomes for intestinal absorption enhancement. The in situ intestinal membrane transport parameters of FEX-HCl are presented in Table 3. Perfusion of simple aqueous solution of FEX-HCl showed incomplete absorption of FEX-HCl from the tested intestinal segments. The incomplete absorption was noticed from the membrane transport parameters which reflected negative values for the ARL which were calculated to be −308. Table 1. Error bars represent SD (n= 3). 3551 colon, respectively ( Table 3). The recorded ARL values reflect site dependent absorption with the segmental drug absorption being ranked as colon > duodenum > jejunum > ileum. This rank order reflects poor absorption from the distal parts of the small intestine. The correlation between water flux and absorptive clearance was researched to probe the relative contribution of paracellular and transcellular pathways on the absorptive clearance of FEX-HCl. This involved plotting the absorptive clearance per unit length as a function of the net water flux per unit length ( Figure 4). These plots were subjected for linear regression analysis and the intercept was used to calculate the contribution of transcellular absorption (at zero water flux). This was used to calculate the percentage transcellular and paracellular absorption. The calculated values are presented in Table 4. The regression analysis indicated dependence of the absorptive clearance on the water flux as shown from the slope values of the regression line of each segment plot. These slopes were significantly different from zero (p < 0.05). This indicates the existence of a role of paracellular pathway in the overall absorptive clearance of FEX-HCl. The relative contribution of paracellular to transcellular absorption depended on the perfused segment. The percentage paracellular absorption was 64.21, 42.49, 100 and 68.77% for the duodenum, jejunum, ileum and colon, respectively ( Table 4). Incorporation of FEX-HCl into cubosomes increased the intestinal absorption of the drug compared with the corresponding aqueous solution. The absorptive clearance normalized to segment length was increased by 1.25, 4.78, 2.25 and 2.22-fold after perfusion of FEX-HCl cubosomes in case of duodenum, jejunum, ileum and colon, respectively. This increase in the absorptive clearance was reflected as an increase in the %Fa/L with significant reduction in value of L 95% compared with the parameters recorded after perfusion of aqueous FEX-HCl solution (Table 3). Interestingly, the increase in the absorptive clearance was associated with noticeable increase in the contribution of the transcellular pathway in drug absorption from all segments. This was estimated from the absorptive clearance versus lengthnormalized water flux plots constructed for FEX-HCl loaded cubosomes ( Figure 5). For example, the % transcellular absorption was increased from zero to 64.41% in case of ileum after perfusion of cubosomal dispersion (Table 4). 3552 Correlation analysis of the intestinal absorption data to the released amounts of FEX-HCl at the same time points, determined under the same experimental conditions was conducted. This employed bivariate Pearson's correlation. The computed results reflected no in vitro-in situ correlation which was shown from the recorded non-significant Pearson's correlation coefficient "r" values (P > 0.05). The computed "r" values were 0.749, 0.127, −0.270, −0.257 for the correlation between drug release and % fraction absorbed from duodenum, jejunum, ileum and colon, respectively (P > 0.05). Evaluation of Anti-Inflammatory Effect The anti-inflammatory effect of FEX-HCl was monitored using carrageenan provoked paw edema technique. Figure 6 shows the increase in paw volume due to edema formation as a function of time after administration of FEX-HCl in the form of aqueous suspension or cubosomes. The edema formation was monitored with reference to that induced in rats administering plain water (negative control). Injection of carrageenan to the control rats resulted progressive swelling to reach maximum volume (peak edema) 3 hours after carrageenan injection. The edema started to resolve slowly after the maximum but remained above 60% increase in the paw volume after 6 hours. The area under the edema formation curve was 407.27% hour for the negative control group. Administration of a single dose FEX-HCl simple suspension resulted in marginal reduction (non-significant) in the edema volume compared to negative control group. The AUC was nonsignificantly reduced (P > 0.05) to reach 355.25% hour. Administration of FEX-HCl cubosomal dispersion resulted in significant reduction in edema formation compared with the negative control group or that treated with simple suspension. The AUC was significantly reduced (P < 0.05) to reach 157.61% hour after administration of cubosomes ( Figure 6). Discussion Two FEX-HCl loaded cubosomal formulations were constructed. These formulations, F1 and F2 incorporated peceol and poloxamer407 at weight ratios of 8:2 and 7:3, respectively. Morphological evaluation of F1 cubosomes using TEM revealed spherical nanostructure with F2 cubosomes presenting spherical and polygonal nanoparticles. Organization of cubosomes as honeycomb architecture was revealed for F2 cubosomes ( Figure 1F). This complies with the literature describing honeycomb structural organization for cubosomes. 15 The tested poloxamer407 concentrations were in the acceptable range which was previously reported. 37 The recorded morphology complies with the published data which suggested development of spherical and polygonal cubosomes. 37,38 The recorded size values for F1 and F2 formulations were comparable. Particle size distribution data correlate to that recorded employing TEM. The computed PDI value for F1 cubosomes revealed homogenous nanostructured particles. Higher PDI value was shown for F2 formulation supporting the morphological discrepancy recorded using TEM analysis. These results conform with the published work on similar formulations with size reduction being recorded at higher poloxamer concentrations. 39,40 The UV spectrophotometric analysis was sensitive, selective and accurate enough to successfully quantify the amounts of FEX-HCl released in vitro using 0.001N HCl, PBS (pH 6.8) and PBS (pH 7.4) as release media. The developed HPLC method was successfully employed to determine the concentration of FEX-HCl in the perfusate which was obtained after in situ intestinal perfusion study. The selection of HPLC was based on its ability to selectively detect and quantify FEX-HCl without interference from endogenous materials which may be eluted in the perfusate samples. Previous investigations highlighted that the maximum concentration of poloxamer407 that can impart stabilization and size reduction of cubosomal systems is 1.5% w/v, above this concentration no further size reduction can take place. 37 Accordingly, formulation containing poloxamer407 with peceol at a weight ratio of 3:7 (F2 cubosomes) was selected for in situ and in vivo evaluation. In vitro release studies of FEX-HCl encapsulated cubosomes revealed sustained drug release over 8 hours. This pattern was recorded for both F1 and F2 cubosomes. Release sustainability of cubosomes encapsulated drugs was previously reported by other investigators. 37 3554 FEX-HCl release from cubosomes formulation selected for in situ intestinal permeability investigation was monitored at pH values 6.8 and 7.4 for 2 hours. This allows for investigation of the effect of drug release on intestinal absorption. FEX-HCl release from F2 cubosomes revealed pH dependent release pattern. This was evidenced from the significantly reduced release efficiency values at pH values 6.8 and 7.4 compared to that recorded using 0.001N HCl as release medium. This can be explained taking into consideration the reported pH dependent solubility of FEX-HCl. 41 Reduced FEX-HCl solubility at the higher pH values increases its partitioning into cubosomal lipid bilayer with subsequent reduction of release efficiency. 38 The release kinetics of FEX-HCl from cubosomal formulations followed Higuchi model suggesting matrix diffusionbased release. This may be ascribed to the possible cubosomal lamellar structure. Similar release kinetics were recorded by other investigators from cubosomes. 42 Similar release rate was recorded from the tested cubosomal formulations employing 0.001N HCl as release medium. This is acceptable taking into consideration the composition of both formulations which differs only in relative proportions of peceol to poloxamer407. Both formulations were physically similar with particles of comparable size and it will be acceptable to have similar release pattern taking into consideration the recorded release kinetics which depended on the architecture of cubosomes. In situ rabbit intestinal diffusion technique was selected to monitor the efficacy of cubosomes for intestinal absorption enhancement. This selection depended on the reported advantages of such model. This strategy eliminates the effect of complicating factors such as food, drug dissolution and the variability of stomach residence time. In addition, the model allows preservation of the tissue viability with the rabbit intestine providing additional advantage of good correlation with human intestinal structure. Such strategy provides chance for deduction of a lot of information about the intestinal absorption, its pathways and limiting factors. 26,27,31,43,44 Perfusion of simple aqueous solution of FEX-HCl showed incomplete drug absorption from the tested intestinal segments. The recorded incomplete absorption of FEX-HCl from simple aqueous solution correlates with previous literature reports which categorized FEX-HCl as class III drug due to its poor permeation through biological membranes. 1,9,11 The recorded ARL values reflect site dependent absorption with poor absorption from the distal parts of the small intestine. The regional difference suggests possible contribution for the P-gp efflux transporter in limited intestinal absorption of FEX-HCl. This suggestion is based on the regional expression of P-gp in the intestine which is expected to be more abundant in the ileum followed by the jejunum. 28,45,46 The contribution of P-gp efflux to the limited intestinal absorption of FEX-HCl has been highlighted in literature reports. 1,9,11 The correlation between water flux and absorptive clearance reflected dependence of the absorptive clearance on the water flux suggesting a role of paracellular pathway in the overall absorptive clearance of FEX-HCl. The existence of 100% paracellular absorption in the ileum is surprising taking into consideration its relatively large surface compared with the colon but this may be attributed to the effect of P-gp efflux transporters which minimize the transcellular absorption. These transporters are expressed to greater extent in the ileum. 28 Incorporation of FEX-HCl into cubosomes increased the intestinal absorption of the drug compared with the corresponding aqueous solution. Interestingly, the increase in the absorptive clearance was associated with noticeable increase in the contribution of the transcellular pathway in drug absorption from all segments. These findings suggest that drug loaded cubosomes permeate mainly via transcellular route. The suggested permeation pathways of cubosomal FEX-HCl is schematically illustrated in Figure 7. Another supposition depends on the ability of cubosomal components to fluidize the intestinal membrane with subsequent enhancement of membrane permeability. Enhanced trans-lymphatic transport is another possibility for augmented intestinal absorption from cubosomes. The later pathway may provide the benefit of bypassed hepatic metabolism. The Peceol which is the principle component of cubosomes was shown to increase intestinal membrane permeability via inhibition of P-gp drug efflux and/or augmenting trans-lymphatic transport of drugs. 48 The P-gp inhibitory effect was indicated from in vitro studies employing Caco-2 cell culture which proved down-regulation of P-gp expression in presence of peceol. 49,50 Moreover, poloxamer407 which is the second component of cubosomes is believed to impart membrane permeabilization in addition to its capacity to inhibit the CYP 450 enzymes. The later can reduce presystemic metabolism which can be shown in vivo. 51 The anti-inflammatory effect of FEX-HCl cubosomes was researched to verify the recorded enhanced in situ intestinal absorption using carrageenan provoked paw edema technique. Carrageenan induced edema formation was monitored after administration of plain water (negative control), FEX-HCl simple aqueous dispersion or cubosomal formulation. The negative control rats produced typical edema formation curve which was similar to that recorded by other researchers who employed the same. 32,34,52,53 Administration of a single dose FEX-HCl simple suspension was not effective. The recorded non-significant effect of FEX-HCl aqueous dispersion correlates with the recorded in situ intestinal perfusion data which reflected its incomplete absorption from the GIT. This implies poor bioavailability after oral administration which has been shown by other investigators who classified FEX-HCl as poorly permeable drug. 1,9 Poor permeability of FEX-HCl was also assisted by P-gp efflux which has been reflected in our in situ intestinal perfusion studies and highlighted in literature reports. 1,9,11 The recorded enhancement in the anti-inflammatory effect of FEX-HCl after cubosomal administration correlates with the recorded increase in the intestinal permeability from cubosomes. This can suggest that cubosomal dispersion is able to enhance drug absorption of poorly permeable drugs. Alternative mechanisms have been suggested for enhanced oral bioavailability from cubosomes. Being colloidal dispersion, cubosomes can traverse the intestine via lymphatic pathway bypassing the negative effects of pre-systemic disposition. 17,27,47 The effect of colloidal nature of cubosomes is further magnified by the nature of the primary components. For example, peceol is believed to inhibit P-gp efflux and augment trans-lymphatic transport of drug. Poloxamer can also contribute by permeabilization of intestinal membrane with additional inhibitory effect on CYP 450 enzymes. 51 These factors combine to explain the recorded increase in the in vivo efficacy of FEX-HCl after administration in cubosomal formulation. To understand further the mechanism of enhanced intestinal absorption of FEX-HCl from cubosomes, the intestinal absorption data were correlated to the amount of drug released at the same time points, determined under the same experimental conditions. The computed Pearson's correlation coefficient values were non-significant in all intestinal segments with negative correlation being noticed in ileum and colon segments. The results indicated that drug release is not the rate limiting factor in enhanced intestinal absorption of FEX-HCl from cubosomes. This is logic taking into consideration the poor permeability nature of FEX-HCl. These results suggest that the recorded enhancement can be mainly due to permeation of drug loaded cubosomes probably via trans-lymphatic pathway. However, this requires further investigations. Conclusion Cubosomes comprising glyceryl mono-oleate with poloxamer407 were successfully prepared as nanostructures with combined spherical and polygonal morphology. Fexofenadine HCl (FEX-HCl) undergoes site-dependent absorptive clearance with the permeability ranking as colon < duodenum < jejunum < ileum, suggesting its liability for intestinal efflux. Cubosomal encapsulation of FEX-HCl increased its intestinal permeability compared with the corresponding aqueous solution. The ability of cubosomes to enhance the intestinal permeability depends on their components capacity to inhibit P-gp efflux transporters and fluidize intestinal membrane. Drug absorption from cubosomes favored the transcellular pathway. In vivo investigation of anti-inflammatory effect of FEX-HCl revealed cubosomal superiority over the aqueous drug dispersion. The study thus introduced cubosomes as novel carrier for enhanced oral absorption of hydrophilic poorly permeable drugs.
<reponame>Holdrick/TakeTwo-WebAPI from couchdb.client import Document from couchdb.client import Row from unittest.mock import MagicMock def getRow(flaggedString, category, info): firstDoc = Document() firstDoc["flagged_string"] = flaggedString firstDoc["category"] = category firstDoc["info"] = info firstRow = Row() firstRow['doc'] = firstDoc return firstRow def setupMocks(getDbMock, rows): mockDbViewResults = MagicMock() mockDbViewResults.return_value = iter(rows) dbMock = MagicMock() dbMock.view = mockDbViewResults getDbMock.return_value=dbMock
import * as React from 'react'; import { Prompt } from "react-router"; import { FormikActions, FormikProps, FormikErrors } from 'formik'; import { History } from 'history'; import flat from 'flat'; import ListingFormNavContainer from './ListingFormNav.container'; import GeneralWrapper from 'components/GeneralWrapper'; import { ListingInput } from 'networking/listings'; import TabNavBar from 'components/TabNavBar'; interface Props { history: History; id: string; formikProps: FormikProps<ListingInput>; onSubmit: (values: ListingInput, actions: FormikActions<Object>) => void; setNextCrumb: (route?: string) => void; showAlert?: boolean; } const ListingFormNav = ({ formikProps, history, id, onSubmit, setNextCrumb, showAlert }: Props): JSX.Element => { const listingFormNavConfig = [ { title: 'Listing Info', to: `/host/listings/${id}/listing_info`, }, { title: 'Accommodations', to: `/host/listings/${id}/accommodations`, }, { title: 'Pricing & Availability', to: `/host/listings/${id}/pricing_availability`, }, { title: 'Check-in Details', to: `/host/listings/${id}/checkin_details`, }, ]; return ( <ListingFormNavContainer> <GeneralWrapper> <Prompt when={showAlert} message={!formikProps.isValid ? formatListingErrorsAlert(flat(formikProps.errors)) : 'Listing has unsaved changes. Are you sure you want to proceed?'}> </Prompt> <TabNavBar config={listingFormNavConfig} /> <a onClick={() => { if (!formikProps.isValid) { history.push('/host/listings'); } else { setNextCrumb(''); onSubmit(formikProps.values, formikProps); } }}> Save &amp; Exit </a> </GeneralWrapper> </ListingFormNavContainer> ); } export default ListingFormNav; function formatListingErrorsAlert(errors: FormikErrors<ListingInput>): string { return `Listing has unsaved changes due to the following errors:\n\n${Object.values(errors).join('\n').toString()}\n\nAre you sure you want to proceed?`; };
//Shade of Jindo class mob_shade_of_jindo : public CreatureScript { public: mob_shade_of_jindo() : CreatureScript("mob_shade_of_jindo") { } struct mob_shade_of_jindoAI : public ScriptedAI { mob_shade_of_jindoAI(Creature *c) : ScriptedAI(c) {} uint32 ShadowShock_Timer; void Reset() { ShadowShock_Timer = 1000; DoCast(me, SPELL_INVISIBLE, true); } void EnterCombat(Unit * ){} void UpdateAI (const uint32 diff) { if (ShadowShock_Timer <= diff) { DoCast(me->getVictim(), SPELL_SHADOWSHOCK); ShadowShock_Timer = 2000; } else ShadowShock_Timer -= diff; DoMeleeAttackIfReady(); } }; CreatureAI* GetAI(Creature* creature) const { return new mob_shade_of_jindoAI(creature); } }
# Third-party libraries from time import perf_counter import tensorflow as tf # 1.15 import numpy as np import os # Modules from figures import fields_fig_out, cost_fig_out, dev_fig_out, one_dim_fig_out, q_fig_out, q_dev_fig_out, sep_q_fig_out from inout import set_paths, write_tec_var from data_loader import load_data from pde_solution import heat_model_fn from ibcs import set_ibcs, grid_gen from nn import init_w_b_fnn # Launch on gpu '0' or cpu '-1' os.environ['CUDA_VISIBLE_DEVICES'] = '0' if tf.test.gpu_device_name(): print('GPU device is employed') else: print('CPU device is employed') print('####################################################', '\n', '# SETTING INPUT PARAMETERS AND READING DATA ########', '\n', '####################################################', '\n') # GENERAL PARAMETERS l = 1.0 # Sizez of the domain [m] n = 50 # Number nodes in the whole field (including ghost ones for BCs) # PARAMETERS FOR ITERATIONS AND OUTPUT CONTROL max_it = 10000 # Maximum iterations for PDE solution out_freq = 25 # Output frequency during PDE soution eps = 1.e-6 # Convergence criterion [K] # NN HYPERPARAMETERS lambd = 0.e-4 # Regularization parameter learn_rate = 1.e-2 # Learning rate lr_decay = 0.9999 # Learning rate decay parameter drop_rate = 0.e0 # Dropout rate in fully connected layers epochs = 100 # Number of training epochs mod_sav_hold = 90 # Save the NN after this number of epochs nn_out_freq = 100 # Frequency of NN information output # FLAGS is_data_ibc = True # Use data for I&BCs; otherwise - linear initial distribution is_solve_pde = True # Solve PDEs using NN after training is_plot_init = True # Plot I&BCs in the beginning is_plot_split = False # Plot some of the split fields 3x3 is_train_nn = True # Train a NN is_read_nn_bef = False # Read NN before training (if a pre-trained one exists) is_read_nn_aft = True # Read NN after training or use last step one is_src_plot = True # Plot source fields after training is_tec_out = True # Output tecplot files during PDE solution # PATHS TO THE FOLDERS work_dir, data_dir, fig_dir, csv_dir, tec_dir, nn_dir = set_paths() # DATA data, q_max, q_min, t_max, t_min = load_data(n, data_dir) # GRID h, x_split, y_split, x, y = grid_gen(n, l) print('####################################################', '\n', '# SETTING AND OUTPUTTING I&BCs #####################', '\n', '####################################################', '\n') # Set I&BCs q_init, t_init, q_1, q_2, q_3 = set_ibcs(n, h, data.test.tar, data.test.inp, data.test.num_datasets, is_data_ibc, q_max, q_min) # Tecplot files res_plt_t_q = [] for k in range(data.test.num_datasets): # Open tecplot files res_plt_t_q.append(open(tec_dir + 'test/case_' + str(k+1) + '_t_q.plt', 'w')) # Tecplot files write_tec_var(n, t_init[k, :, :], q_init[k, :, :], y, x, 0, res_plt_t_q[k], 't', 'q') # Initial fields if is_plot_init: # Whole fields fields_fig_out(k, y, x, t_init[k, :, :], q_init[k, :, :], fig_dir + 'test/init/') # Separated sources sep_q_fig_out(k, y, x, q_1[k, :, :], q_2[k, :, :], q_3[k, :, :], fig_dir + 'test/init/sep/') # Split fields 3x3 - for debugging (check correctness of splitting) if is_plot_split: i = j = 0 for k in range(0,(n-2)*(n-2)): if (k < (n-2)): # first row #if (k % (n-2) == 0): # first column #if ((k+1) % (n-2) == 0): # last column #if (k >= (n-2)*(n-3)): # last row fields_fig_out(k, y_split[j,:], x_split[i,:], data.train.inp[k, :, :], data.train.tar[k, :, :], fig_dir + 'train/split/') #fields_fig_out(k, y_split[j,:], x_split[i,:], data.validation.inp[k, :, :], data.validation.tar[k, :, :], fig_dir + 'valid/split/') j += 1 if ((k+1) % (n-2) == 0) and (k > 0): i += 1 j = 0 print('####################################################', '\n', '# TENSORFLOW GRAPH #################################', '\n', '####################################################', '\n') # Start the tensorflow session sess = tf.compat.v1.Session() # Weights and biases initialization for nn weights, biases = init_w_b_fnn() # Initialize constants hh_tf = tf.constant(value=h*h, dtype=tf.float64, shape=()) c025_tf = tf.constant(value=0.25, dtype=tf.float64, shape=()) c05_tf = tf.constant(value=0.5, dtype=tf.float64, shape=()) # Placeholders t_inp_ph = tf.compat.v1.placeholder(tf.float64, shape=[None, 3, 3], name='t_inp_ph') # input temperature t_tar_ph = tf.compat.v1.placeholder(tf.float64, shape=[None, 3, 3], name='t_tar_ph') # target temperature rate_ph = tf.compat.v1.placeholder(tf.float64, shape=(), name='rate_ph') # dropout rate # Update temperature t_new, q_nn, q_ex, l2_q = heat_model_fn(t_inp_ph, rate_ph, c025_tf, hh_tf, weights, biases, q_max, q_min, t_max, t_min) # L2 regularization reg = lambd*(tf.nn.l2_loss(weights['h1']) + tf.nn.l2_loss(weights['h2']) + \ tf.nn.l2_loss(weights['h3']) + tf.nn.l2_loss(weights['h4']) + \ tf.nn.l2_loss(weights['h5'])) # Cost function cost_train = tf.nn.l2_loss(t_new - tf.reshape(t_inp_ph[:, 1:-1, 1:-1], (-1, 1))) + reg cost = tf.nn.l2_loss(t_new - tf.reshape(t_inp_ph[:, 1:-1, 1:-1], (-1, 1))) # Adam optimizer optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=learn_rate, beta1=0.9, beta2=0.999, epsilon=1.e-7, use_locking=False, name='Adam').minimize(cost_train) # Saver saver = tf.compat.v1.train.Saver() # Initialize state to ICs sess.run(tf.compat.v1.global_variables_initializer()) # NN TRAINING if is_train_nn: if is_read_nn_bef: print('####################################################', '\n', '# READING THE SAVED NN #############################', '\n', '####################################################', '\n') saver = tf.compat.v1.train.import_meta_graph(nn_dir + 'fnn.meta') saver.restore(sess, nn_dir + 'fnn') print('####################################################', '\n', '# NN TRAINING ######################################', '\n', '####################################################', '\n') best_epoch = 0 best_cost = 1.e6 best_l2_q = 1.e6 cost_train = np.zeros(epochs, dtype=np.float64) l2_q_train = np.zeros(epochs, dtype=np.float64) cost_valid = np.zeros(epochs, dtype=np.float64) l2_q_valid = np.zeros(epochs, dtype=np.float64) batch_num = 10 # Train NN for i in range(epochs): for i_b in range(batch_num): # Train batch_train_x, batch_train_y = data.train.next_batch(int(data.train.num_datasets/batch_num)) fd_train = {t_inp_ph: batch_train_x, t_tar_ph: batch_train_x, rate_ph: drop_rate} sess.run(optimizer, feed_dict=fd_train) # Evaluate training cost fd_evalu = {t_inp_ph: data.train.inp[:, :, :], t_tar_ph: data.train.inp[:, :, :], rate_ph: 0.0} cost_train[i] = sess.run(cost, feed_dict=fd_evalu) l2_q_train[i] = sess.run(l2_q, feed_dict=fd_evalu) # Evaluate validation cost fd_valid = {t_inp_ph: data.validation.inp[:, :, :], t_tar_ph: data.validation.inp[:, :, :], rate_ph: 0.0} cost_valid[i] = sess.run(cost, feed_dict=fd_valid) l2_q_valid[i] = sess.run(l2_q, feed_dict=fd_valid) if (cost_valid[i] < best_cost): # Print info best_cost = cost_valid[i] best_l2_q = l2_q_valid[i] best_epoch = i # Save nn if (i > mod_sav_hold): print('Saving the model. Epoch = ', i) saver.save(sess, nn_dir + 'fnn') # Screen output if i % nn_out_freq == 0 or i==(epochs-1): print ("Epoch:", '%04d' % (i+1), "Training cost =", "{:.12f}".format(cost_train[i])) print (" ", "Validation cost =", "{:.12f}".format(cost_valid[i])) print (" ", "Training L2 norm =", "{:.12f}".format(l2_q_train[i])) print (" ", "Validation L2 norm =", "{:.12f}".format(l2_q_valid[i])) # Learn rat decay learn_rate *= lr_decay # Information on the best epoch print ("Best epoch = ", best_epoch) print ("Best validation cost = ", best_cost) print ("Validation L2 norm at that epoch = ", best_l2_q) print ("Training cost at that epoch = ", cost_train[best_epoch]) print ("Training L2 norm at that epoch = ", l2_q_train[best_epoch]) # Plot cost and RMSE cost_fig_out(epochs, cost_train, cost_valid, l2_q_train, l2_q_valid, fig_dir + 'epochs/') if is_read_nn_aft: print('####################################################', '\n', '# READING THE SAVED NN #############################', '\n', '####################################################', '\n') saver = tf.compat.v1.train.import_meta_graph(nn_dir + 'fnn.meta') saver.restore(sess, nn_dir + 'fnn') if is_src_plot: print('####################################################', '\n', '# PLOTTING SOURCES FIELDS ##########################', '\n', '####################################################', '\n') fd_evalu = {t_inp_ph: data.train.inp[:, :, :], rate_ph: 0.0} q_nn_plot = q_nn.eval(session=sess, feed_dict=fd_evalu) q_nn_plot = np.reshape(q_nn_plot, (data.train.num_datasets,-1)) q_ex_plot = sess.run(q_ex, feed_dict=fd_evalu) q_ex_plot = np.reshape(q_ex_plot, (data.train.num_datasets,-1)) q_split_plot = np.reshape(data.train.tar[:, 1:-1, 1:-1], (data.train.num_datasets,-1)) q_split_plot = q_split_plot*(q_max - q_min) + q_min t_new_plot = sess.run(t_new, feed_dict=fd_evalu) t_new_plot = np.reshape(t_new_plot, (data.train.num_datasets,-1)) t_split_plot = np.reshape(data.train.inp[:, 1:-1, 1:-1], (data.train.num_datasets,-1)) q_dev_fig_out(1, q_nn_plot, q_split_plot, fig_dir + 'epochs/q/dev/') q_dev_fig_out(2, q_ex_plot, q_split_plot, fig_dir + 'epochs/q/dev/') q_dev_fig_out(3, t_new_plot, t_split_plot, fig_dir + 'epochs/t/dev/') if is_solve_pde: print('####################################################', '\n', '# SOLUTION OF PDE ##################################', '\n', '####################################################', '\n') # Initialize variables t_pde = tf.Variable(initial_value=t_init, dtype=tf.float64) # Update temperature t_new_pde_, q_nn_pde_, q_ex_pde_, l2_q_pde = heat_model_fn(t_pde, 0.0, c025_tf, hh_tf, weights, biases, q_max, q_min, t_max, t_min) t_new_pde = tf.reshape(t_new_pde_, (-1, (n-2), (n-2))) q_nn_pde = tf.reshape(q_nn_pde_, (-1, (n-2), (n-2))) q_ex_pde = tf.reshape(q_ex_pde_, (-1, (n-2), (n-2))) # Update step step_temp = tf.group(t_pde[:, 1:-1, 1:-1].assign(t_new_pde)) # Convergence abs_er = tf.abs(t_new_pde - t_pde[:, 1:-1, 1:-1]) # Initialize state to initial conditions sess.run(tf.initialize_variables([t_pde])) # Arrays for errors and cost abs_er_max = np.zeros(shape=(max_it, data.test.num_datasets), dtype=np.float64) # Time iterations start_time_it = perf_counter() it = 0 max_er = 1.e6 while (it < max_it) and (max_er > eps): # PDE solution step_temp.run(session=sess) # Error for k in range(data.test.num_datasets): abs_er_max[it, k] = np.max(sess.run(abs_er)[k, :, :]) max_er = np.max(abs_er_max[it, :]) # Next iteration it += 1 # Information if (it % out_freq == 0): print('--------------------------------------------', '\n', 'Iteration =', it, '\n', 'Max error = ', max_er, '\n', 'L2(q) = ', sess.run(l2_q_pde)) # Tecplot files if is_tec_out: for k in range(data.test.num_datasets): write_tec_var(n-2, sess.run(t_pde[k, 1:-1, 1:-1]), sess.run(q_nn_pde[k, :, :]), x[1:-1], y[1:-1], it, res_plt_t_q[k], 'p', 'q') # Timer end_time_it = perf_counter() print('--------------------------------------------', '\n', 'SOLUTION OF PDE IS FINISHED. TIME CONSUMED (S) =', end_time_it-start_time_it, '\n', 'NUMBER OF ITERATIONS =', it) print('####################################################', '\n', '# OUTPUT RESULTS ###################################', '\n', '####################################################', '\n') # Last step values t_fin = sess.run(t_pde) q_fin = sess.run(q_nn_pde) # Deviations for all datasets dev_fig_out(np.reshape(t_fin, (n*n*data.test.num_datasets)), np.reshape(data.test.inp, (n*n*data.test.num_datasets)), fig_dir + 'test/t_dev/') q_dev_fig_out(1000, np.reshape(q_fin, ((n-2)*(n-2)*data.test.num_datasets)), np.reshape(data.test.tar[:,1:-1,1:-1]*(q_max-q_min) + q_min, ((n-2)*(n-2)*data.test.num_datasets)), fig_dir + 'test/q_dev/') for k in range(data.test.num_datasets): # Tecplot files write_tec_var(n-2, t_fin[k, 1:-1, 1:-1], q_fin[k, :, :], x[1:-1], y[1:-1], it, res_plt_t_q[k], 'p', 'q') # Close tecplot files res_plt_t_q[k].close() # Norms t_norm = np.amax(np.abs(t_fin[k, :, :] - data.test.inp[k, :, :])) q_norm = np.amax(np.abs(q_fin[k, :, :] - data.test.tar[k, 1:-1, 1:-1]*(q_max-q_min) + q_min)) max = np.amax(q_fin) min = np.amin(q_fin) norms = np.array([t_norm, q_norm, max, min]) np.savetxt(csv_dir + 'case_' + str(k+1) + '_norms.dat', norms, delimiter = " ") # Fields #fields_fig_out(k, x[1:-1], y[1:-1], t_fin[k, 1:-1, 1:-1], q_fin[k, :, :], fig_dir + 'test/res/fields/') q_fig_out(k, x[1:-1], y[1:-1], q_fin[k, :, :], data.test.tar[k, 1:-1, 1:-1]*(q_max-q_min) + q_min, fig_dir + 'test/res/q/') q_fig_out(k, x[1:-1], y[1:-1], t_fin[k, 1:-1, 1:-1], data.test.inp[k, 1:-1, 1:-1], fig_dir + 'test/res/t/') # 1D results one_dim_fig_out(k, it, abs_er_max, eps, fig_dir + 'test/res/1D/') # Close Tensorflow session sess.close()
def p2p_download_end(self, data_size): t = self.now() self.current_p2p_exchanges -= 1 self.p2p_downloading.insert_or_update(t, self.current_downloading) self.p2p_downloaded_data += data_size
#include <stdint.h> #include <stddef.h> #include "libc.h" /* * Compares two strings `s1` and `s2` and returns the diffence between the first * characters that differ. */ int strcmp(const char *s1, const char *s2) { // The index of the current byte size_t i; // The length of `s1` size_t l1; // The length of `s2` size_t l2; i = 0; l1 = strlen(s1); l2 = strlen(s2); while (i < l1 && i < l2 && s1[i] == s2[i]) ++i; return (((unsigned char *) s1)[i] - ((unsigned char *) s2)[i]); }