content
stringlengths
7
2.61M
<gh_stars>0 import { Component, ElementRef } from '@angular/core'; import { Moment } from 'moment'; import { AbstractSelectorComponent } from '../common/abstract-select.component'; import { UiDateTimeState } from '../common/datetime.state'; import { IDate } from '../common/datetime.interface'; @Component({ selector: 'ui-hour-selector', template: ` <div class="ui-datetime-container"> <ul class="hours-of-day ui-datetime-dates"> <li *ngFor="let dateElement of dateListing; let index = index;" class="hour-of-day ui-datetime-date" (mousedown)="selectDate($event, dateElement, index)" [ngClass]="{ 'selected': dateElement.selected, 'active': dateElement.active, 'disabled': !enabled || dateElement.disabled }"> {{dateElement.text}} </li> </ul> </div> ` }) export class UiHourSelectorComponent extends AbstractSelectorComponent { // ------------------------------------------------------------------------- // Constructor // ------------------------------------------------------------------------- constructor(state: UiDateTimeState) { super(state, 'hour'); this.stepsPerRow = 3; this.stepsPerCol = 1; this.perdiodIncrement = 1; } // ------------------------------------------------------------------------- // Public Methods // ------------------------------------------------------------------------- // Override super method generateDateListing(): IDate[] { this.start = this.value.hour(this.value.hour() < 12 ? 0 : 12); this.end = this.start.clone().hour(this.start.hour() + 12); let setSelected: boolean = false; const dateElements: IDate[] = []; for (let i = 1; i < 13; i++) { let current: Moment = this.start.clone().add(i, 'hour'); let dateElement: IDate = { mode: this.displayMode, text: current.format('hh'), date: current, selected: false, active: false, disabled: false }; // set 'disabled' attribute if (!this.isSelectable(current)) { dateElement.disabled = true; } // set 'selected' attribute if (!setSelected && this.isSame(this.value, current)) { dateElement.selected = true; setSelected = true; if (this.focusIndex === -3) { this.focusIndex = i-1; } } dateElements.push(dateElement); } return dateElements; } }
Q: How to encourage New Users (and help the Moderators)? Related to the Moderators Pro-Tempore Nomination thread... and more of an observation than a question... Notwithstanding the need to Encouraging new users through comments and refraining from swift closing/rapid/multiple down-voting it is important that ALL site members (with the pre-requisite reputation) support the working of the site by Editing (rarely Voting Down) poor answers and questions, and Flagging for attention those posts that need it. The more constructive posts that site users make, the less moderator activity needs to be invoked. So far, we're doing OK :) We don't need to be moderators to help with the running of the site. A: Andrew, I note that you have not (yet) asked a question on the main site. When you do, I am sure that you will appreciate the sort of tolerance of new posters that is being discussed in Encouraging new users through comments and refraining from swift closing/rapid/multiple down-voting When people are unsure whether or not to downvote, edit or flag a question, they should consider not only the (highly desirable) aim of ensuring quality in the product but also the need to build a community that will deliver the product we all want. A little forbearance will go a long way in encouraging new members to stay long enough to make worthwhile contributions. A: Andrew, are you suggesting that people aren't working to improve questions? Perhaps you could support this with some examples. Otherwise, I'm not sure why you've posted this. Like @Fortiter, I would prefer to see people working to improve questions that are asked before down-voting them if at all possible. Yes, there are always going to be questions that aren't salvageable, perhaps because they're so far off topic that they don't qualify for the site, or because the questioner isn't willing to work on them with the help of others to improve them. I still believe strongly in helping and encouraging people first whenever possible. A: I don't think Andrew was trying to imply that people were slacking. Stack Exchange Q&A sites work a little differently than traditional forums. Most forum posts are just random snapshots of the Internet. They're a specific moment in time, and those posts are forever frozen with all their flaws, vague phrasing, typos, grammatical errors, the mood of the people at the time, and so forth. On Stack Exchange, unlike the forums, every user can participate in site moderation, through activities such as editing, voting, commenting, voting to close and reopen, editing tag wikis, and participating in reviews. Additionally, since the public beta, Stack Exchange has seen a lot of new users who are new to Stack Exchange, and who are used to the forums, and who haven't yet learned that they too can participate in these things. Therefore, I don't think that Andrew's post was targeted at anyone specifically or intended to say there was a significant lack of participation, just that more is better. Remember, these meta posts are public; anyone can read them, even users who don't even have an account on the main site. Therefore, the message may not be targeted at you specifically. ;)
/** * @author auto create * @version */ public class GetBaselineStatusResponse extends AcsResponse { private Boolean success; private String errorCode; private String errorMessage; private Integer httpStatusCode; private String requestId; private Data data; public Boolean getSuccess() { return this.success; } public void setSuccess(Boolean success) { this.success = success; } public String getErrorCode() { return this.errorCode; } public void setErrorCode(String errorCode) { this.errorCode = errorCode; } public String getErrorMessage() { return this.errorMessage; } public void setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; } public Integer getHttpStatusCode() { return this.httpStatusCode; } public void setHttpStatusCode(Integer httpStatusCode) { this.httpStatusCode = httpStatusCode; } public String getRequestId() { return this.requestId; } public void setRequestId(String requestId) { this.requestId = requestId; } public Data getData() { return this.data; } public void setData(Data data) { this.data = data; } public static class Data { private String baselineName; private Long baselineId; private Long bizdate; private String owner; private Long expTime; private Long finishTime; private Long endCast; private Long slaTime; private Integer priority; private Long projectId; private Float buffer; private String status; private String finishStatus; private Integer inGroupId; private LastInstance lastInstance; private BlockInstance blockInstance; public String getBaselineName() { return this.baselineName; } public void setBaselineName(String baselineName) { this.baselineName = baselineName; } public Long getBaselineId() { return this.baselineId; } public void setBaselineId(Long baselineId) { this.baselineId = baselineId; } public Long getBizdate() { return this.bizdate; } public void setBizdate(Long bizdate) { this.bizdate = bizdate; } public String getOwner() { return this.owner; } public void setOwner(String owner) { this.owner = owner; } public Long getExpTime() { return this.expTime; } public void setExpTime(Long expTime) { this.expTime = expTime; } public Long getFinishTime() { return this.finishTime; } public void setFinishTime(Long finishTime) { this.finishTime = finishTime; } public Long getEndCast() { return this.endCast; } public void setEndCast(Long endCast) { this.endCast = endCast; } public Long getSlaTime() { return this.slaTime; } public void setSlaTime(Long slaTime) { this.slaTime = slaTime; } public Integer getPriority() { return this.priority; } public void setPriority(Integer priority) { this.priority = priority; } public Long getProjectId() { return this.projectId; } public void setProjectId(Long projectId) { this.projectId = projectId; } public Float getBuffer() { return this.buffer; } public void setBuffer(Float buffer) { this.buffer = buffer; } public String getStatus() { return this.status; } public void setStatus(String status) { this.status = status; } public String getFinishStatus() { return this.finishStatus; } public void setFinishStatus(String finishStatus) { this.finishStatus = finishStatus; } public Integer getInGroupId() { return this.inGroupId; } public void setInGroupId(Integer inGroupId) { this.inGroupId = inGroupId; } public LastInstance getLastInstance() { return this.lastInstance; } public void setLastInstance(LastInstance lastInstance) { this.lastInstance = lastInstance; } public BlockInstance getBlockInstance() { return this.blockInstance; } public void setBlockInstance(BlockInstance blockInstance) { this.blockInstance = blockInstance; } public static class LastInstance { private Long instanceId; private String status; private Long projectId; private String owner; private Long nodeId; private Long finishTime; private Long endCast; private String nodeName; public Long getInstanceId() { return this.instanceId; } public void setInstanceId(Long instanceId) { this.instanceId = instanceId; } public String getStatus() { return this.status; } public void setStatus(String status) { this.status = status; } public Long getProjectId() { return this.projectId; } public void setProjectId(Long projectId) { this.projectId = projectId; } public String getOwner() { return this.owner; } public void setOwner(String owner) { this.owner = owner; } public Long getNodeId() { return this.nodeId; } public void setNodeId(Long nodeId) { this.nodeId = nodeId; } public Long getFinishTime() { return this.finishTime; } public void setFinishTime(Long finishTime) { this.finishTime = finishTime; } public Long getEndCast() { return this.endCast; } public void setEndCast(Long endCast) { this.endCast = endCast; } public String getNodeName() { return this.nodeName; } public void setNodeName(String nodeName) { this.nodeName = nodeName; } } public static class BlockInstance { private Long instanceId; private String status; private Long projectId; private String owner; private Long nodeId; private Long finishTime; private Long endCast; private String nodeName; public Long getInstanceId() { return this.instanceId; } public void setInstanceId(Long instanceId) { this.instanceId = instanceId; } public String getStatus() { return this.status; } public void setStatus(String status) { this.status = status; } public Long getProjectId() { return this.projectId; } public void setProjectId(Long projectId) { this.projectId = projectId; } public String getOwner() { return this.owner; } public void setOwner(String owner) { this.owner = owner; } public Long getNodeId() { return this.nodeId; } public void setNodeId(Long nodeId) { this.nodeId = nodeId; } public Long getFinishTime() { return this.finishTime; } public void setFinishTime(Long finishTime) { this.finishTime = finishTime; } public Long getEndCast() { return this.endCast; } public void setEndCast(Long endCast) { this.endCast = endCast; } public String getNodeName() { return this.nodeName; } public void setNodeName(String nodeName) { this.nodeName = nodeName; } } } @Override public GetBaselineStatusResponse getInstance(UnmarshallerContext context) { return GetBaselineStatusResponseUnmarshaller.unmarshall(this, context); } @Override public boolean checkShowJsonItemName() { return false; } }
A neural prediction of multi-sensor systems In actual engineering a typical problem concerns the prediction (classification)of successive states of a real world system. The state is often characterized by several measures related to a multi-sensor array. We propose in the paper a clustering approach to the automatic determination of significant zones in the mulitdimensional space where data can be represented and by which the information about the characteristic system state can be classified. Using the approach we will obtain multidimensional time series, which will be validated by considering a particular application concerning the prediction of the vehicular traffic flow.
def close_round(a: util.number, prec: int = None) -> util.number: if a is None: return 0 if type(a) == complex: return complex(close_round(a.real, prec), close_round(a.imag, prec)) if type(a) == int: return a if type(a) != float: return a if "e" in str(a): sig, exp = str(a).split("e") sig = float(sig) rnd = round(sig, prec) if is_close(rnd, sig): sig = rnd return float(str(sig) + "e" + exp) else: rnd = round(a, prec) if is_close(rnd, a): return rnd return a
import type { SidebarConfig } from '@vuepress/theme-default' export const getStarted: SidebarConfig = { text: 'iOS Guide', children: [ '/get-started/', ], } export const guides: SidebarConfig = { text: 'Guides', children: [ '/saving-blobs/', '/resigning-apps/', '/blocking-jailbreak-detection/', { text: 'FutureRestore', link:'/futurerestore/', }, ], } export const troubleshooting: SidebarConfig = { text: 'Troubleshooting', children: [ '/troubleshooting/', '/faq/', '/types-of-jailbreak/', { text: 'Discord', link: 'https://discord.gg/jb' }, ], } export const en: SidebarConfig = { '/': [ getStarted, { text: 'Jailbreaking', children: [ '/installing-taurine/', '/installing-odyssey/', '/installing-chimera/', '/installing-odysseyra1n/', '/using-odysseyn1x/', ], }, { text: 'Package Managers', children: [ { text: 'An explanation', link: '/package-managers/' }, '/using-sileo/', '/using-cydia/', '/recommended-repos/', ], }, guides, troubleshooting, ], '/get-started/iphone': [ getStarted, { text: 'iPhone', children: [ { text: 'iPhone 13', link: '/get-started/iphone/13/', }, { text: 'iPhone 12', link: '/get-started/iphone/12/', }, { text: 'iPhone SE (2020)', link: '/get-started/iphone/se-2/', }, { text: 'iPhone 11', link: '/get-started/iphone/11/', }, { text: 'iPhone XS', link: '/get-started/iphone/xs/', }, { text: 'iPhone XR', link: '/get-started/iphone/xr/', }, { text: 'iPhone X', link: '/get-started/iphone/x/', }, { text: 'iPhone 8', link: '/get-started/iphone/8/', }, { text: 'iPhone 7', link: '/get-started/iphone/7/', }, { text: 'iPhone SE', link: '/get-started/iphone/se/', }, { text: 'iPhone 6S', link: '/get-started/iphone/6s/', }, { text: 'iPhone 6', link: '/get-started/iphone/6/', }, { text: 'iPhone 5S', link: '/get-started/iphone/5s/', }, { text: 'iPhone 5C', link: '/get-started/iphone/5c/', }, { text: 'iPhone 5', link: '/get-started/iphone/5/', }, { text: 'iPhone 4S', link: '/get-started/iphone/4s/', }, { text: 'iPhone 4', link: '/get-started/iphone/4/', }, { text: 'iPhone 3GS', link: '/get-started/iphone/3gs/', }, { text: 'iPhone 3G', link: '/get-started/iphone/3g/', }, { text: 'iPhone 2G', link: '/get-started/iphone/2g/', }, ], }, ], '/get-started/ipad': [ getStarted, { text: 'iPad', children: [ { text: 'iPad 9', link: '/get-started/ipad/9/', }, { text: 'iPad 8', link: '/get-started/ipad/8/', }, { text: 'iPad 7', link: '/get-started/ipad/7/', }, { text: 'iPad 6', link: '/get-started/ipad/6/', }, { text: 'iPad 5', link: '/get-started/ipad/5/', }, { text: 'iPad 4', link: '/get-started/ipad/4/', }, { text: 'iPad 3', link: '/get-started/ipad/3/', }, { text: 'iPad 2', link: '/get-started/ipad/2/', }, { text: 'iPad', link: '/get-started/ipad/1/', }, ], }, { text: 'iPad mini', children: [ { text: 'iPad mini 6', link: '/get-started/ipad/mini-6/', }, { text: 'iPad mini 5', link: '/get-started/ipad/mini-5/', }, { text: 'iPad mini 4', link: '/get-started/ipad/mini-4/', }, { text: 'iPad mini 3', link: '/get-started/ipad/mini-3/', }, { text: 'iPad mini 2', link: '/get-started/ipad/mini-2/', }, { text: 'iPad mini', link: '/get-started/ipad/mini/', }, ], }, { text: 'iPad Pro', children: [ { text: 'iPad Pro 5', link: '/get-started/ipad/pro-5/', }, { text: 'iPad Pro 4', link: '/get-started/ipad/pro-4/', }, { text: 'iPad Pro 3', link: '/get-started/ipad/pro-3/', }, { text: 'iPad Pro 2', link: '/get-started/ipad/pro-2/', }, { text: 'iPad Pro', link: '/get-started/ipad/pro/', }, ], }, { text: 'iPad Air', children: [ { text: 'iPad Air 4', link: '/get-started/ipad/air-4/', }, { text: 'iPad Air 3', link: '/get-started/ipad/air-3/', }, { text: 'iPad Air 2', link: '/get-started/ipad/air-2/', }, { text: 'iPad Air', link: '/get-started/ipad/air/', }, ], }, ], '/get-started/ipod': [ getStarted, { text: 'iPod', children: [ { text: 'iPod Touch 7', link: '/get-started/ipod/7/', }, { text: 'iPod Touch 6', link: '/get-started/ipod/6/', }, { text: 'iPod Touch 5', link: '/get-started/ipod/5/', }, { text: 'iPod Touch 4', link: '/get-started/ipod/4/', }, { text: 'iPod Touch 3', link: '/get-started/ipod/3/', }, { text: 'iPod Touch 2', link: '/get-started/ipod/2/', }, { text: 'iPod Touch', link: '/get-started/ipod/1/', }, ], }, ], '/installing-taurine/': [ getStarted, { text: 'Taurine', children: [ '/installing-taurine/', '/installing-taurine/using-sileo/', ], }, guides, troubleshooting, ], '/installing-odyssey/': [ getStarted, { text: 'Odyssey', children: [ '/installing-odyssey/', '/installing-odyssey/using-sileo/', ], }, guides, troubleshooting, ], '/updating-to-12-5-5/': [ getStarted, { text: 'Chimera', children: [ '/updating-to-12-5-5/', '/installing-chimera/', '/installing-chimera/using-sileo/', ], }, guides, troubleshooting, ], '/updating-to-12-5-4-(blobless)/': [ getStarted, { text: 'Chimera', children: [ { text: 'Updating to 12.5.4', link: '/updating-to-12-5-4-(blobless)/', }, '/installing-chimera/', '/installing-chimera/using-sileo/', ], }, guides, troubleshooting, ], '/installing-chimera/': [ getStarted, { text: 'Chimera', children: [ '/installing-chimera/', '/installing-chimera/using-sileo/', ], }, guides, troubleshooting, ], '/installing-odysseyra1n/': [ getStarted, { text: 'Odysseyra1n', children: [ { text: 'Installing Odysseyra1n', link: '/installing-odysseyra1n/', children: [ { text: 'macOS', link: '/installing-odysseyra1n/macos/', }, { text: 'Linux', link: '/installing-odysseyra1n/linux/', }, ], }, '/installing-odysseyra1n/using-sileo/', ], }, guides, troubleshooting, ], '/installing-odysseyra1n-a9x/': [ getStarted, { text: 'Odysseyra1n (A9X)', children: [ { text: 'Installing Odysseyra1n (A9X)', link: '/installing-odysseyra1n-a9x/', children: [ { text: 'macOS', link: '/installing-odysseyra1n-a9x/macos/', }, { text: 'Linux', link: '/installing-odysseyra1n-a9x/linux/', }, ], }, '/installing-odysseyra1n-a9x/using-sileo/', ], }, guides, troubleshooting, ], '/using-odysseyn1x/': [ getStarted, { text: 'Odysseyn1x', children: [ '/using-odysseyn1x/', '/using-odysseyn1x/using-sileo/', ], }, guides, troubleshooting, ], '/using-odysseyn1x-a9x/': [ getStarted, { text: 'Odysseyn1x (A9X)', children: [ '/using-odysseyn1x-a9x/', '/using-odysseyn1x-a9x/using-sileo/', ], }, guides, troubleshooting, ], '/installing-unc0ver/': [ getStarted, { text: 'unc0ver', children: [ '/installing-unc0ver/', '/installing-unc0ver/using-cydia/', ], }, guides, troubleshooting, ], '/installing-unc0ver-fugu14/': [ getStarted, { text: 'unc0ver (Fugu14)', children: [ '/installing-unc0ver-fugu14/', '/installing-unc0ver-fugu14/using-cydia/', ], }, guides, troubleshooting, ], '/installing-electra/': [ getStarted, { text: 'Electra', children: [ '/installing-electra/', '/installing-electra/using-sileo/', ], }, guides, troubleshooting, ], '/installing-doubleh3lix/': [ getStarted, { text: 'doubleh3lix', children: [ '/installing-doubleh3lix/', '/installing-doubleh3lix/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-10-3-3/': [ getStarted, { text: 'Meridian', children: [ '/updating-to-10-3-3/', '/using-meridian/', '/using-meridian/using-cydia/', ], }, guides, troubleshooting, ], '/using-meridian/': [ getStarted, { text: 'Meridian', children: [ '/using-meridian/', '/using-meridian/using-cydia/', ], }, guides, troubleshooting, ], '/installing-pangu933/': [ getStarted, { text: 'Pangu933', children: [ '/installing-pangu933/', '/installing-pangu933/using-cydia/', ], }, guides, troubleshooting, ], '/installing-kok3shi/': [ getStarted, { text: 'Kok3shi', children: [ '/installing-kok3shi/', '/installing-kok3shi/using-cydia/', ], }, guides, troubleshooting, ], '/installing-pangu7/': [ getStarted, { text: 'Pangu7', children: [ '/installing-pangu7/', '/installing-pangu7/using-cydia/', ], }, guides, troubleshooting, ], '/installing-evasi0n7/': [ getStarted, { text: 'Evasi0n7', children: [ '/installing-evasi0n7/', '/installing-evasi0n7/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-10-3-4/': [ getStarted, { text: 'h3lix', children: [ '/updating-to-10-3-4/', '/installing-h3lix/', '/installing-h3lix/using-cydia/', ], }, guides, troubleshooting, ], '/installing-h3lix/': [ getStarted, { text: 'h3lix', children: [ '/installing-h3lix/', '/installing-h3lix/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-9-3-5/': [ getStarted, { text: 'Phœnix', children: [ '/updating-to-9-3-5/', '/installing-phoenix/', '/installing-phoenix/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-9-3-5-ipsw/': [ getStarted, { text: 'Phœnix', children: [ { text: 'Updating to 9.3.5', link: '/updating-to-9-3-5-ipsw/', }, '/installing-phoenix/', '/installing-phoenix/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-9-3-6/': [ getStarted, { text: 'Phœnix', children: [ '/updating-to-9-3-6/', '/installing-phoenix/', '/installing-phoenix/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-9-3-6-ipsw/': [ getStarted, { text: 'Phœnix', children: [ { text: 'Updating to 9.3.6', link: '/updating-to-9-3-6-ipsw/', }, '/installing-phoenix/', '/installing-phoenix/using-cydia/', ], }, guides, troubleshooting, ], '/installing-phoenix/': [ getStarted, { text: 'Phœnix', children: [ '/installing-phoenix/', '/installing-phoenix/using-cydia/', ], }, guides, troubleshooting, ], '/installing-homedepot/': [ getStarted, { text: 'HomeDepot', children: [ '/installing-homedepot/', '/installing-homedepot/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-8-4-1/': [ getStarted, { text: 'EtasonJB', children: [ '/updating-to-8-4-1/', '/installing-etasonjb/', '/installing-etasonjb/using-cydia/', ], }, guides, troubleshooting, ], '/installing-etasonjb/': [ getStarted, { text: 'EtasonJB', children: [ '/installing-etasonjb/', '/installing-etasonjb/using-cydia/', ], }, guides, troubleshooting, ], '/updating-to-6-1-3/': [ getStarted, { text: 'p0sixspwn', children: [ '/updating-to-6-1-3/', '/installing-p0sixspwn/', '/installing-p0sixspwn/using-cydia/', ], }, guides, troubleshooting, ], '/installing-p0sixspwn/': [ getStarted, { text: 'p0sixspwn', children: [ '/installing-p0sixspwn/', '/installing-p0sixspwn/using-cydia/', ], }, guides, troubleshooting, ], '/installing-daibutsu/': [ getStarted, { text: 'p0sixspwn', children: [ '/installing-p0sixspwn/', '/installing-p0sixspwn/using-cydia/', ], }, guides, troubleshooting, ], '/using-jailbreakme-saffron/': [ getStarted, { text: 'JailbreakMe Saffron', children: [ '/using-jailbreakme-saffron/', '/using-jailbreakme-saffron/using-cydia/', ], }, guides, troubleshooting, ], '/using-jailbreakme-star/': [ getStarted, { text: 'JailbreakMe Star', children: [ '/using-jailbreakme-star/', '/using-jailbreakme-star/using-cydia/', ], }, guides, troubleshooting, ], }
<reponame>aurthurm/my-site # Generated by Django 2.1.1 on 2018-09-15 18:15 import ckeditor.fields from django.db import migrations, models import django.db.models.deletion import home.models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='HomePageHeadings', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('services_title', models.CharField(max_length=255, verbose_name='Services Title')), ('teaching_title', models.CharField(max_length=255, verbose_name='Teaching Title')), ('technologies_title', models.CharField(max_length=255, verbose_name='Technologies Title')), ], ), migrations.CreateModel( name='ServicesEntry', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Services Title')), ('content', ckeditor.fields.RichTextField()), ], ), migrations.CreateModel( name='Subject', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Subjet Title')), ], ), migrations.CreateModel( name='TeachingEntry', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Teaching Title')), ('content', ckeditor.fields.RichTextField()), ('tag', models.CharField(max_length=255, verbose_name='tag')), ('category', models.CharField(max_length=255, verbose_name='category')), ('subject', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='home.Subject')), ], ), migrations.CreateModel( name='TechTools', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255, verbose_name='Image Title')), ('image', models.ImageField(blank=True, help_text='Used for illustration.', upload_to=home.models.image_upload_to_dispatcher, verbose_name='image')), ], ), ]
// BuyCreatorCoin delegates an amount to a validator and associates a post func (k Keeper) PerformBuyCreatorCoin( ctx sdk.Context, username string, creator sdk.AccAddress, buyer sdk.AccAddress, valAddr sdk.ValAddress, amount sdk.Int) error { coin := sdk.NewCoin(fmt.Sprintf("cc/%s/%s", username, creator.String()), amount) stake, found, err := k.GetStake(ctx, 0, curatingtypes.PostID{}, buyer) if err != nil { return err } amt := amount if found { amt = stake.Amount.Add(amount) valAddr, err = sdk.ValAddressFromBech32(stake.Validator) if err != nil { return err } } validator, found := k.stakingKeeper.GetValidator(ctx, valAddr) if !found { return stakingtypes.ErrNoValidatorFound } stake = types.NewStake(0, curatingtypes.PostID{}, buyer, valAddr, amt) k.SetStake(ctx, buyer, stake) _, err = k.stakingKeeper.Delegate(ctx, buyer, amount, stakingtypes.Unbonded, validator, true) if err != nil { return err } if err := k.bankKeeper.MintCoins( ctx, types.ModuleName, sdk.NewCoins(coin), ); err != nil { return err } if err := k.bankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, buyer, sdk.NewCoins(coin)); err != nil { panic( fmt.Sprintf( "unable to send coins from module to account despite previously minting coins to module account: %v", err), ) } ctx.EventManager().EmitEvents(sdk.Events{ sdk.NewEvent( types.EventTypeBuyCreatorCoin, sdk.NewAttribute(types.AttributeKeyUsername, username), sdk.NewAttribute(types.AttributeKeyCreator, creator.String()), sdk.NewAttribute(types.AttributeKeyBuyer, buyer.String()), sdk.NewAttribute(types.AttributeKeyValidator, valAddr.String()), sdk.NewAttribute(types.AttributeKeyAmount, amount.String()), ), }) return nil }
The decision will lead to the creation of 2.15 lakh additional seats in Central Educational Institutions in the next two years, sources said. Out of these, 1,18,983 seats will be created in 2019-20 and another 95,783 in 2020-21. The 10 per cent reservations to EWS, a path breaking move of the Narendra Modi government, has led to the sanction of Rs 4,315.15 crore to be approved for the 158 Central Educational Institutions (CEIs) for its implementation. The cabinet approval came days after the first round of polling in the Lok Sabha elections. Sources said the Ministry of Human Resource Development (MHRD) had issued instructions in January to all the CEIs to provide 10 per cent reservation to EWS without adversely affecting the proportionate reservations for Scheduled Castes/Scheduled Tribes and Socially and Economically Backward Classes and also not reducing seat availability in the General category. The 10 per cent reservation for EWS, approved in January when the 103rd Constitutional Amendment Bill was posted in Parliament is already facing judicial scrutiny.
import math from typing import TYPE_CHECKING, List, Optional, Text, Union import numpy as np from pandas import DataFrame from shapely.geometry import Point from pymove.utils.constants import ( DIST_TO_PREV, EARTH_RADIUS, GEOMETRY, LATITUDE, LONGITUDE, SPEED_TO_PREV, TIME_TO_PREV, ) if TYPE_CHECKING: from pymove.core.dask import DaskMoveDataFrame from pymove.core.pandas import PandasMoveDataFrame def lat_meters(lat: float) -> float: """ Transform latitude degree to meters. Parameters ---------- lat : float This represent latitude value. Returns ------- float Represents the corresponding latitude value in meters. Examples -------- Latitude in Fortaleza: -3.8162973555 >>> from pymove.utils.conversions import lat_meters >>> lat_meters(-3.8162973555) 110826.6722516857 """ rlat = float(lat) * math.pi / 180 # meter per degree Latitude meters_lat = ( 111132.92 - 559.82 * math.cos(2 * rlat) + 1.175 * math.cos(4 * rlat) ) # meter per degree Longitude meters_lgn = 111412.84 * math.cos(rlat) - 93.5 * math.cos(3 * rlat) meters = (meters_lat + meters_lgn) / 2 return meters def meters_to_eps( radius_meters: float, earth_radius: Optional[float] = EARTH_RADIUS ) -> float: """ Converts radius in meters to eps Parameters ---------- radius_meters : float radius in meters earth_radius : float, optional radius of the earth in the location, by default EARTH_RADIUS Returns ------- float radius in eps """ return radius_meters / earth_radius def list_to_str(input_list: List, delimiter: Optional[Text] = ',') -> Text: """ Concatenates list elements, joining them by the separator specified by the parameter "delimiter". Parameters ---------- input_list : list List with elements to be joined. delimiter : str, optional The separator used between elements, by default ','. Returns ------- str Returns a string, resulting from concatenation of list elements, separeted by the delimiter. """ return delimiter.join( [x if isinstance(x, str) else repr(x) for x in input_list] ) def list_to_csv_str(input_list: List) -> Text: """ Concatenates the elements of the list, joining them by ",". Parameters ---------- input_list : list List with elements to be joined. Returns ------- str Returns a string, resulting from concatenation of list elements, separeted by ",". Example ------- >>> from pymove import conversions >>> a = [1, 2, 3, 4, 5] >>> conversions.list_to_csv_str(a) '1 1:2 2:3 3:4 4:5' """ return list_to_str(input_list) def list_to_svm_line(original_list: List) -> Text: """ Concatenates list elements in consecutive element pairs. Parameters ---------- original_list : list The elements to be joined Returns ------- str Returns a string, resulting from concatenation of list elements in consecutive element pairs, separeted by " ". Example ------- >>> from pymove import conversions >>> a = [1, 2, 3, 4, 5] >>> conversions.list_to_svm_line(a) '1 1:2 2:3 3:4 4:5' """ list_size = len(original_list) svm_line = '%s ' % original_list[0] for i in range(1, list_size): svm_line += '%s:%s ' % (i, original_list[i]) return svm_line.rstrip() def lon_to_x_spherical(lon: float) -> float: """ Convert longitude to X EPSG:3857 WGS 84/Pseudo-Mercator. Parameters ---------- lon : float Represents longitude. Returns ------- float X offset from your original position in meters. Examples -------- >>> from pymove import conversions >>> conversions.lon_to_x_spherical(-38.501597 ) -4285978.17 References ---------- https://epsg.io/transform """ return 6378137 * np.radians(lon) def lat_to_y_spherical(lat: float) -> float: """ Convert latitude to Y EPSG:3857 WGS 84/Pseudo-Mercator. Parameters ---------- lat : float Represents latitude. Returns ------- float Y offset from your original position in meters. Examples -------- >>> from pymove import conversions >>> conversions.lat_to_y_spherical(-3.797864) -423086.2213610324 References ---------- https://epsg.io/transform """ return 6378137 * np.log(np.tan(np.pi / 4 + np.radians(lat) / 2.0)) def x_to_lon_spherical(x: float) -> float: """ Convert X EPSG:3857 WGS 84 / Pseudo-Mercator to longitude. Parameters ---------- x : float X offset from your original position in meters. Returns ------- float Represents longitude. Examples -------- >>> from pymove import conversions >>> conversions.x_to_lon_spherical(-4285978.17) -38.501597 References ---------- https://epsg.io/transform """ return np.degrees(x / 6378137.0) def y_to_lat_spherical(y: float) -> float: """ Convert Y EPSG:3857 WGS 84 / Pseudo-Mercator to latitude. Parameters ---------- y : float Y offset from your original position in meters. Returns ------- float Represents latitude. Examples -------- >>> from pymove import conversions >>> conversions.y2_lat_spherical(-423086.22) -3.797864 References ---------- https://epsg.io/transform """ return np.degrees(np.arctan(np.sinh(y / 6378137.0))) def geometry_points_to_lat_and_lon( move_data: DataFrame, geometry_label: Optional[Text] = GEOMETRY, drop_geometry: Optional[bool] = True, inplace: Optional[bool] = True ) -> DataFrame: """ Converts the geometry column to latitude and longitude columns (named 'lat' and 'lon'), removing geometries that are not of the Point type. Parameters ---------- move_data : DataFrame Input trajectory data. geometry: str, optional Represents column name of the geometry column, by default GEOMETRY drop_geometry: bool, optional Option to drop the geometry column, by default True inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] move_data = move_data[ move_data[geometry_label].map(type) == Point ] move_data[LONGITUDE] = move_data[geometry_label].map(lambda p: p.x) move_data[LATITUDE] = move_data[geometry_label].map(lambda q: q.y) if drop_geometry: move_data.drop(geometry_label, axis=1, inplace=True) if not inplace: return move_data def lat_and_lon_decimal_degrees_to_decimal( move_data: DataFrame, latitude: Optional[Text] = LATITUDE, longitude: Optional[Text] = LONGITUDE ) -> DataFrame: """ Converts latitude and longitude format from decimal degrees to decimal format. Parameters ---------- move_data : DataFrame Input trajectory data. latitude: str, optional Represents column name of the latitude column, by default LATITUDE longitude: str, optional Represents column name of the longitude column, by default LONGITUDE Returns ------- DataFrame A new dataframe with the converted feature """ def _decimal_degree_to_decimal(row): if (row[latitude][-1:] == 'N'): row[latitude] = float(row[latitude][:-1]) else: row[latitude] = float(row[latitude][:-1]) * -1 if (row[longitude][-1:] == 'E'): row[longitude] = float(row[longitude][:-1]) else: row[longitude] = float(row[longitude][:-1]) * -1 return row return move_data.apply(_decimal_degree_to_decimal, axis=1) def ms_to_kmh( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_speed: Optional[Text] = SPEED_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[bool] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in ms, in label_speed column to kmh. Parameters ---------- move_data : DataFrame Input trajectory data. label_speed : str, optional Represents column name of speed, by default SPEED_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_speed not in move_data: move_data.generate_dist_time_speed_features() move_data[label_speed] = move_data[label_speed].apply( lambda row: row * 3.6 ) if new_label is not None: move_data.rename(columns={label_speed: new_label}, inplace=True) if not inplace: return move_data def kmh_to_ms( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_speed: Optional[Text] = SPEED_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in kmh, in label_speed column to ms. Parameters ---------- move_data : DataFame Input trajectory data. label_speed : str, optional Represents column name of speed, by default SPEED_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_speed not in move_data: move_data.generate_dist_time_speed_features() ms_to_kmh(move_data, label_speed) move_data[label_speed] = move_data[label_speed].apply( lambda row: row / 3.6 ) if new_label is not None: move_data.rename(columns={label_speed: new_label}, inplace=True) if not inplace: return move_data def meters_to_kilometers( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_distance: Optional[Text] = DIST_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in meters, in label_distance column to kilometers. Parameters ---------- move_data : DataFame Input trajectory data. label_distance : str, optional Represents column name of speed, by default DIST_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_distance not in move_data: move_data.generate_dist_time_speed_features() move_data[label_distance] = move_data[label_distance].apply( lambda row: row / 1000 ) if new_label is not None: move_data.rename(columns={label_distance: new_label}, inplace=True) if not inplace: return move_data def kilometers_to_meters( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_distance: Optional[Text] = DIST_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in kilometers, in label_distance column to meters. Parameters ---------- move_data : DataFame Input trajectory data. label_distance : str, optional Represents column name of speed, by default DIST_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_distance not in move_data: move_data.generate_dist_time_speed_features() meters_to_kilometers(move_data, label_distance) move_data[label_distance] = move_data[label_distance].apply( lambda row: row * 1000 ) if new_label is not None: move_data.rename(columns={label_distance: new_label}, inplace=True) if not inplace: return move_data def seconds_to_minutes( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_time: Optional[Text] = TIME_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in seconds, in label_distance column to minutes. Parameters ---------- move_data : DataFame Input trajectory data. label_time : str, optional Represents column name of speed, by default TIME_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_time not in move_data: move_data.generate_dist_time_speed_features() move_data[label_time] = move_data[label_time].apply( lambda row: row / 60.0 ) if new_label is not None: move_data.rename(columns={label_time: new_label}, inplace=True) if not inplace: return move_data def minute_to_seconds( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_time: Optional[Text] = TIME_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in minutes, in label_distance column to seconds. Parameters ---------- move_data : DataFame Input trajectory data. label_time : str, optional Represents column name of speed, by default TIME_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_time not in move_data: move_data.generate_dist_time_speed_features() seconds_to_minutes(move_data, label_time) move_data['time_to_prev'] = move_data['time_to_prev'].apply( lambda row: row * 60.0 ) if new_label is not None: move_data.rename(columns={label_time: new_label}, inplace=True) if not inplace: return move_data def minute_to_hours( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_time: Optional[Text] = TIME_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in minutes, in label_distance column to hours. Parameters ---------- move_data : DataFame Input trajectory data. label_time : str, optional Represents column name of speed, by default TIME_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_time not in move_data: move_data.generate_dist_time_speed_features() seconds_to_minutes(move_data, label_time) move_data[label_time] = move_data[label_time].apply( lambda row: row / 60.0 ) if new_label is not None: move_data.rename(columns={label_time: new_label}, inplace=True) if not inplace: return move_data def hours_to_minute( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_time: Optional[Text] = TIME_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in hours, in label_distance column to minute. Parameters ---------- move_data : DataFame Input trajectory data. label_time : str, optional Represents column name of speed, by default TIME_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_time not in move_data: move_data.generate_dist_time_speed_features() seconds_to_hours(move_data, label_time) move_data[label_time] = move_data[label_time].apply( lambda row: row * 60.0 ) if new_label is not None: move_data.rename(columns={label_time: new_label}, inplace=True) if not inplace: return move_data def seconds_to_hours( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_time: Optional[Text] = TIME_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in seconds, in label_distance column to hours. Parameters ---------- move_data : DataFame Input trajectory data. label_time : str, optional Represents column name of speed, by default TIME_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_time not in move_data: move_data.generate_dist_time_speed_features() move_data[label_time] = move_data[label_time].apply( lambda row: row / 3600.0 ) if new_label is not None: move_data.rename(columns={label_time: new_label}, inplace=True) if not inplace: return move_data def hours_to_seconds( move_data: Union['PandasMoveDataFrame', 'DaskMoveDataFrame'], label_time: Optional[Text] = TIME_TO_PREV, new_label: Optional[Text] = None, inplace: Optional[Text] = True, ) -> Optional[Union['PandasMoveDataFrame', 'DaskMoveDataFrame']]: """ Convert values, in hours, in label_distance column to seconds. Parameters ---------- move_data : DataFame Input trajectory data. label_time : str, optional Represents column name of speed, by default TIME_TO_PREV new_label: str, optional Represents a new column that will contain the conversion result, by default None inplace: bool, optional Whether the operation will be done in the original dataframe, by default True Returns ------- DataFrame A new dataframe with the converted feature or None """ if not inplace: move_data = move_data[:] if label_time not in move_data: move_data.generate_dist_time_speed_features() seconds_to_hours(move_data, label_time) move_data[label_time] = move_data[label_time].apply( lambda row: row * 3600.0 ) if new_label is not None: move_data.rename(columns={label_time: new_label}, inplace=True) if not inplace: return move_data
Once again the elitists on Capitol Hill have set their own agenda above that of the constituency. The Saturday vote on the government-run healthcare bill, should be noted as a blatant disregard for the wants and needs of the citizen. That seems to be setting the stage for “taxation, without proper representation“. The ever-increasing cost of such legislation can do nothing good for the citizenry. The government-run rationing of healthcare for seniors and veterans, an over-reaching tax hike for small business, and forced coverage with a criminal mandate is just the base of my concern. The very aspect of such a government reach is surely to be seen as an act against the Constitution. The Constitution is a platform for our very existence, as a nation. Without such accord, we would fall to a dismal standing. In all the writings of this glorious document, I am still not able to understand where the document sets the accordance of the right to healthcare. Further, I can see no legitimacy for such a government-mandated policy. I have taken upon myself to read the Constitution, and it's precedence among the founding of our Republic. Now, let it be known, I am not a constitutional scholar. Yet, it is found to be educational for the citizen to read the Constitution. I encourage all to enlighten themselves with such readings. My children are fully aware of our founding documents, and are encouraged to read and ask questions about such. It can only secure your status as a citizen. As well as a duty and responsibility for all Americans. As to the constitutionality of government-run healthcare, I cannot find it. I would like to know what the the constituency thinks on this matter. What do you think? Is it constitutional? If so, why and how? The great State of Indiana has been burdened by the agenda of the elitists on Capitol Hill, as well as their own State Assembly. The blame has to be laid upon those of guilt, and a measure of that blame can be laid squarely with our local government. Our State Assembly has been nowhere to be seen while the sovereignty of our rights, as a state, has been disseminated. Where are they now? Have they bothered to ask if you needed support from the state level? No, of course not. They use the excuse of, “the federal mandate overtakes that of the state”. Well, all I ask for is representation. Will they stand up and defend the Indiana Constitution? My door has not had a knock. Nor has my phone rang. The support of the people cannot be seen as coming from our own representatives of the state. They should be polling the constituency on what the needs and wants are. Am I right? We pay our taxes, now we want representation. If our very own legislators aren't there to support our needs and wants, then where is it to come from? As Citizens' Sovereign, we are the legitimate convening authority. Such authority is inherent in both the U S Constitution and the Indiana Constitution. U S Constitution: Amendment I Congress shall make no law respecting an establishment of religion, or prohibiting the free exercise thereof; or abridging the freedom of speech, or of the press; or the right of the people peaceably to assemble, and to petition the government for a redress of grievances. Amendment IX The enumeration in the Constitution, of certain rights, shall not be construed to deny or disparage others retained by the people. Amendment X The powers not delegated to the United States by the Constitution, nor prohibited by it to the states, are reserved to the states respectively, or to the people. Indiana Constitution: ARTICLE 1. Bill of Rights. Section 1. WE DECLARE, That all people are created equal; that they are endowed by their CREATOR with certain inalienable rights; that among these are life, liberty, and the pursuit of happiness; that all power is inherent in the people; and that all free governments are, and of right ought to be, founded on their authority, and instituted for their peace, safety, and well-being. For the advancement of these ends, the people have, at all times, an indefeasible right to alter and reform their government. Section 2. All people shall be secured in the natural right to worship ALMIGHTY GOD, according to the dictates of their own consciences. Section 3. No law shall, in any case whatever, control the free exercise and enjoyment of religious opinions, or interfere with the rights of conscience. It can be found especially intriguing that Section One of the Indiana Constitution makes statement of “…an indefeasible right to alter and reform their government.” An amazing statement if taken as to the very definition of indefeasible. “Not capable of being annulled, voided, or undone.” That very meaning could be a designation of American Citizen. Am I wrong? If yes, then how so? It is time to set the stage for a return to our own sovereignty, and therefore the proof shown to our elected officials. This proof is the right of authority to that of the Citizen Sovereign. The duties and responsibilities of our elected officials are to be enforced. The enforcement of such duties will be shown by a simple vote. That vote will set the mandate. This vote must be shown at all levels of government. The very aspect of our founding was the need for local representation. Thus, through such legation, the people show strong. Again, I must thank all readers for taking the time to ponder my thoughts on this matter. Remember that I am just one man, and it is all “One Man's Opinion”. “REFORM, REFORM, REFORM!”
Study on the Correction of Saturated SAR Data In this paper, the saturation characteristic of saturated SAR raw data is discussed and a new method based on optimal quantization theory to restore the raw data is proposed. A flow chart shows the procession of the algorithm. Experimental results show that the performances of radar signal processing using the restored data are better than those of traditional means using the raw data.
#include <boost/test/unit_test.hpp> #include <cmath> #include "coordinates.hpp" #include "equal_json.hpp" #include "fixture.hpp" #include "engine/api/flatbuffers/fbresult_generated.h" #include "osrm/coordinate.hpp" #include "osrm/engine_config.hpp" #include "osrm/exception.hpp" #include "osrm/json_container.hpp" #include "osrm/osrm.hpp" #include "osrm/route_parameters.hpp" #include "osrm/status.hpp" osrm::Status run_route_json(const osrm::OSRM &osrm, const osrm::RouteParameters &params, osrm::json::Object &json_result, bool use_json_only_api) { if (use_json_only_api) { return osrm.Route(params, json_result); } osrm::engine::api::ResultT result = osrm::json::Object(); auto rc = osrm.Route(params, result); json_result = result.get<osrm::json::Object>(); return rc; } BOOST_AUTO_TEST_SUITE(route) void test_route_same_coordinates_fixture(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params; params.steps = true; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); // unset snapping dependent hint for (auto &itr : json_result.values["waypoints"].get<json::Array>().values) { // Hint values aren't stable, so blank it out itr.get<json::Object>().values["hint"] = ""; // Round value to 6 decimal places for double comparison later itr.get<json::Object>().values["distance"] = round(itr.get<json::Object>().values["distance"].get<json::Number>().value * 1000000); } const auto location = json::Array{{{7.437070}, {43.749248}}}; json::Object reference{ {{"code", "Ok"}, {"waypoints", json::Array{{json::Object{{{"name", "<NAME>"}, {"location", location}, {"distance", round(0.137249 * 1000000)}, {"hint", ""}}}, json::Object{{{"name", "<NAME>"}, {"location", location}, {"distance", round(0.137249 * 1000000)}, {"hint", ""}}}}}}, {"routes", json::Array{{json::Object{ {{"distance", 0.}, {"duration", 0.}, {"weight", 0.}, {"weight_name", "routability"}, {"geometry", "yw_jGupkl@??"}, {"legs", json::Array{{json::Object{ {{"distance", 0.}, {"duration", 0.}, {"weight", 0.}, {"summary", "Bou<NAME>"}, {"steps", json::Array{{{json::Object{{{"duration", 0.}, {"distance", 0.}, {"weight", 0.}, {"geometry", "yw_jGupkl@??"}, {"name", "<NAME>"}, {"mode", "driving"}, {"driving_side", "right"}, {"maneuver", json::Object{{ {"location", location}, {"bearing_before", 0}, {"bearing_after", 238}, {"type", "depart"}, }}}, {"intersections", json::Array{{json::Object{ {{"location", location}, {"bearings", json::Array{{238}}}, {"entry", json::Array{{json::True()}}}, {"out", 0}}}}}}}}}, json::Object{{{"duration", 0.}, {"distance", 0.}, {"weight", 0.}, {"geometry", "yw_jGupkl@"}, {"name", "<NAME>"}, {"mode", "driving"}, {"driving_side", "right"}, {"maneuver", json::Object{{{"location", location}, {"bearing_before", 238}, {"bearing_after", 0}, {"type", "arrive"}}}}, {"intersections", json::Array{{json::Object{ {{"location", location}, {"bearings", json::Array{{58}}}, {"entry", json::Array{{json::True()}}}, {"in", 0}}}}}} }}}}}}}}}}}}}}}}}; CHECK_EQUAL_JSON(reference, json_result); } BOOST_AUTO_TEST_CASE(test_route_same_coordinates_fixture_old_api) { test_route_same_coordinates_fixture(true); } BOOST_AUTO_TEST_CASE(test_route_same_coordinates_fixture_new_api) { test_route_same_coordinates_fixture(false); } void test_route_same_coordinates(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params; params.steps = true; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); const auto code = json_result.values.at("code").get<json::String>().value; BOOST_CHECK_EQUAL(code, "Ok"); const auto &waypoints = json_result.values.at("waypoints").get<json::Array>().values; BOOST_CHECK(waypoints.size() == params.coordinates.size()); for (const auto &waypoint : waypoints) { const auto &waypoint_object = waypoint.get<json::Object>(); // nothing can be said about name, empty or contains name of the street const auto name = waypoint_object.values.at("name").get<json::String>().value; BOOST_CHECK(((void)name, true)); const auto location = waypoint_object.values.at("location").get<json::Array>().values; const auto longitude = location[0].get<json::Number>().value; const auto latitude = location[1].get<json::Number>().value; BOOST_CHECK(longitude >= -180. && longitude <= 180.); BOOST_CHECK(latitude >= -90. && latitude <= 90.); const auto hint = waypoint_object.values.at("hint").get<json::String>().value; BOOST_CHECK(!hint.empty()); } const auto &routes = json_result.values.at("routes").get<json::Array>().values; BOOST_REQUIRE_GT(routes.size(), 0); for (const auto &route : routes) { const auto &route_object = route.get<json::Object>(); const auto distance = route_object.values.at("distance").get<json::Number>().value; BOOST_CHECK_EQUAL(distance, 0); const auto duration = route_object.values.at("duration").get<json::Number>().value; BOOST_CHECK_EQUAL(duration, 0); // geometries=polyline by default const auto geometry = route_object.values.at("geometry").get<json::String>().value; BOOST_CHECK(!geometry.empty()); const auto &legs = route_object.values.at("legs").get<json::Array>().values; BOOST_CHECK(!legs.empty()); for (const auto &leg : legs) { const auto &leg_object = leg.get<json::Object>(); const auto distance = leg_object.values.at("distance").get<json::Number>().value; BOOST_CHECK_EQUAL(distance, 0); const auto duration = leg_object.values.at("duration").get<json::Number>().value; BOOST_CHECK_EQUAL(duration, 0); // nothing can be said about summary, empty or contains human readable summary const auto summary = leg_object.values.at("summary").get<json::String>().value; BOOST_CHECK(((void)summary, true)); const auto &steps = leg_object.values.at("steps").get<json::Array>().values; BOOST_CHECK(!steps.empty()); std::size_t step_count = 0; for (const auto &step : steps) { const auto &step_object = step.get<json::Object>(); const auto distance = step_object.values.at("distance").get<json::Number>().value; BOOST_CHECK_EQUAL(distance, 0); const auto duration = step_object.values.at("duration").get<json::Number>().value; BOOST_CHECK_EQUAL(duration, 0); // geometries=polyline by default const auto geometry = step_object.values.at("geometry").get<json::String>().value; BOOST_CHECK(!geometry.empty()); // nothing can be said about name, empty or contains way name const auto name = step_object.values.at("name").get<json::String>().value; BOOST_CHECK(((void)name, true)); // nothing can be said about mode, contains mode of transportation const auto mode = step_object.values.at("mode").get<json::String>().value; BOOST_CHECK(!name.empty()); const auto &maneuver = step_object.values.at("maneuver").get<json::Object>().values; const auto type = maneuver.at("type").get<json::String>().value; BOOST_CHECK(!type.empty()); const auto &intersections = step_object.values.at("intersections").get<json::Array>().values; for (auto &intersection : intersections) { const auto &intersection_object = intersection.get<json::Object>().values; const auto location = intersection_object.at("location").get<json::Array>().values; const auto longitude = location[0].get<json::Number>().value; const auto latitude = location[1].get<json::Number>().value; BOOST_CHECK(longitude >= -180. && longitude <= 180.); BOOST_CHECK(latitude >= -90. && latitude <= 90.); const auto &bearings = intersection_object.at("bearings").get<json::Array>().values; BOOST_CHECK(!bearings.empty()); const auto &entries = intersection_object.at("entry").get<json::Array>().values; BOOST_CHECK(bearings.size() == entries.size()); for (const auto &bearing : bearings) BOOST_CHECK(0. <= bearing.get<json::Number>().value && bearing.get<json::Number>().value <= 360.); if (step_count > 0) { const auto in = intersection_object.at("in").get<json::Number>().value; BOOST_CHECK(in < bearings.size()); } if (step_count + 1 < steps.size()) { const auto out = intersection_object.at("out").get<json::Number>().value; BOOST_CHECK(out < bearings.size()); } } // modifier is optional // TODO(daniel-j-h): // exit is optional // TODO(daniel-j-h): ++step_count; } } } } BOOST_AUTO_TEST_CASE(test_route_same_coordinates_old_api) { test_route_same_coordinates(true); } BOOST_AUTO_TEST_CASE(test_route_same_coordinates_new_api) { test_route_same_coordinates(false); } void test_route_same_coordinates_no_waypoints(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params; params.skip_waypoints = true; params.steps = true; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); const auto code = json_result.values.at("code").get<json::String>().value; BOOST_CHECK_EQUAL(code, "Ok"); BOOST_CHECK(json_result.values.find("waypoints") == json_result.values.end()); const auto &routes = json_result.values.at("routes").get<json::Array>().values; BOOST_REQUIRE_GT(routes.size(), 0); for (const auto &route : routes) { const auto &route_object = route.get<json::Object>(); const auto distance = route_object.values.at("distance").get<json::Number>().value; BOOST_CHECK_EQUAL(distance, 0); const auto duration = route_object.values.at("duration").get<json::Number>().value; BOOST_CHECK_EQUAL(duration, 0); // geometries=polyline by default const auto geometry = route_object.values.at("geometry").get<json::String>().value; BOOST_CHECK(!geometry.empty()); const auto &legs = route_object.values.at("legs").get<json::Array>().values; BOOST_CHECK(!legs.empty()); // The rest of legs contents is verified by test_route_same_coordinates } } BOOST_AUTO_TEST_CASE(test_route_same_coordinates_no_waypoints_old_api) { test_route_same_coordinates_no_waypoints(true); } BOOST_AUTO_TEST_CASE(test_route_same_coordinates_no_waypoints_new_api) { test_route_same_coordinates_no_waypoints(false); } void test_route_response_for_locations_in_small_component(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; const auto locations = get_locations_in_small_component(); RouteParameters params; params.coordinates.push_back(locations.at(0)); params.coordinates.push_back(locations.at(1)); params.coordinates.push_back(locations.at(2)); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); const auto code = json_result.values.at("code").get<json::String>().value; BOOST_CHECK_EQUAL(code, "Ok"); const auto &waypoints = json_result.values.at("waypoints").get<json::Array>().values; BOOST_CHECK_EQUAL(waypoints.size(), params.coordinates.size()); for (const auto &waypoint : waypoints) { const auto &waypoint_object = waypoint.get<json::Object>(); const auto location = waypoint_object.values.at("location").get<json::Array>().values; const auto longitude = location[0].get<json::Number>().value; const auto latitude = location[1].get<json::Number>().value; BOOST_CHECK(longitude >= -180. && longitude <= 180.); BOOST_CHECK(latitude >= -90. && latitude <= 90.); } } BOOST_AUTO_TEST_CASE(test_route_response_for_locations_in_small_component_old_api) { test_route_response_for_locations_in_small_component(true); } BOOST_AUTO_TEST_CASE(test_route_response_for_locations_in_small_component_new_api) { test_route_response_for_locations_in_small_component(false); } void test_route_response_for_locations_in_big_component(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; const auto locations = get_locations_in_big_component(); RouteParameters params; params.coordinates.push_back(locations.at(0)); params.coordinates.push_back(locations.at(1)); params.coordinates.push_back(locations.at(2)); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); const auto code = json_result.values.at("code").get<json::String>().value; BOOST_CHECK_EQUAL(code, "Ok"); const auto &waypoints = json_result.values.at("waypoints").get<json::Array>().values; BOOST_CHECK_EQUAL(waypoints.size(), params.coordinates.size()); for (const auto &waypoint : waypoints) { const auto &waypoint_object = waypoint.get<json::Object>(); const auto location = waypoint_object.values.at("location").get<json::Array>().values; const auto longitude = location[0].get<json::Number>().value; const auto latitude = location[1].get<json::Number>().value; BOOST_CHECK(longitude >= -180. && longitude <= 180.); BOOST_CHECK(latitude >= -90. && latitude <= 90.); } } BOOST_AUTO_TEST_CASE(test_route_response_for_locations_in_big_component_old_api) { test_route_response_for_locations_in_big_component(true); } BOOST_AUTO_TEST_CASE(test_route_response_for_locations_in_big_component_new_api) { test_route_response_for_locations_in_big_component(false); } void test_route_response_for_locations_across_components(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; const auto big_component = get_locations_in_big_component(); const auto small_component = get_locations_in_small_component(); RouteParameters params; params.coordinates.push_back(small_component.at(0)); params.coordinates.push_back(big_component.at(0)); params.coordinates.push_back(small_component.at(1)); params.coordinates.push_back(big_component.at(1)); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); const auto code = json_result.values.at("code").get<json::String>().value; BOOST_CHECK_EQUAL(code, "Ok"); const auto &waypoints = json_result.values.at("waypoints").get<json::Array>().values; BOOST_CHECK_EQUAL(waypoints.size(), params.coordinates.size()); for (const auto &waypoint : waypoints) { const auto &waypoint_object = waypoint.get<json::Object>(); const auto location = waypoint_object.values.at("location").get<json::Array>().values; const auto longitude = location[0].get<json::Number>().value; const auto latitude = location[1].get<json::Number>().value; BOOST_CHECK(longitude >= -180. && longitude <= 180.); BOOST_CHECK(latitude >= -90. && latitude <= 90.); } } BOOST_AUTO_TEST_CASE(test_route_response_for_locations_across_components_old_api) { test_route_response_for_locations_across_components(true); } BOOST_AUTO_TEST_CASE(test_route_response_for_locations_across_components_new_api) { test_route_response_for_locations_across_components(false); } void test_route_user_disables_generating_hints(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params; params.steps = true; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); params.generate_hints = false; json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); for (auto waypoint : json_result.values["waypoints"].get<json::Array>().values) BOOST_CHECK_EQUAL(waypoint.get<json::Object>().values.count("hint"), 0); } BOOST_AUTO_TEST_CASE(test_route_user_disables_generating_hints_old_api) { test_route_user_disables_generating_hints(true); } BOOST_AUTO_TEST_CASE(test_route_user_disables_generating_hints_new_api) { test_route_user_disables_generating_hints(false); } void speed_annotation_matches_duration_and_distance(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params; params.annotations_type = RouteParameters::AnnotationsType::Duration | RouteParameters::AnnotationsType::Distance | RouteParameters::AnnotationsType::Speed; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); const auto &routes = json_result.values["routes"].get<json::Array>().values; const auto &legs = routes[0].get<json::Object>().values.at("legs").get<json::Array>().values; const auto &annotation = legs[0].get<json::Object>().values.at("annotation").get<json::Object>(); const auto &speeds = annotation.values.at("speed").get<json::Array>().values; const auto &durations = annotation.values.at("duration").get<json::Array>().values; const auto &distances = annotation.values.at("distance").get<json::Array>().values; int length = speeds.size(); BOOST_CHECK_EQUAL(length, 1); for (int i = 0; i < length; i++) { auto speed = speeds[i].get<json::Number>().value; auto duration = durations[i].get<json::Number>().value; auto distance = distances[i].get<json::Number>().value; auto calc = std::round(distance / duration * 10.) / 10.; BOOST_CHECK_EQUAL(speed, std::isnan(calc) ? 0 : calc); // Because we route from/to the same location, all annotations should be 0; BOOST_CHECK_EQUAL(speed, 0); BOOST_CHECK_EQUAL(distance, 0); BOOST_CHECK_EQUAL(duration, 0); } } BOOST_AUTO_TEST_CASE(speed_annotation_matches_duration_and_distance_old_api) { speed_annotation_matches_duration_and_distance(true); } BOOST_AUTO_TEST_CASE(speed_annotation_matches_duration_and_distance_new_api) { speed_annotation_matches_duration_and_distance(false); } void test_manual_setting_of_annotations_property(bool use_json_only_api) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params{}; params.annotations = true; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); json::Object json_result; const auto rc = run_route_json(osrm, params, json_result, use_json_only_api); BOOST_CHECK(rc == Status::Ok); const auto code = json_result.values.at("code").get<json::String>().value; BOOST_CHECK_EQUAL(code, "Ok"); auto annotations = json_result.values["routes"] .get<json::Array>() .values[0] .get<json::Object>() .values["legs"] .get<json::Array>() .values[0] .get<json::Object>() .values["annotation"] .get<json::Object>() .values; BOOST_CHECK_EQUAL(annotations.size(), 6); } BOOST_AUTO_TEST_CASE(test_manual_setting_of_annotations_property_old_api) { test_manual_setting_of_annotations_property(true); } BOOST_AUTO_TEST_CASE(test_manual_setting_of_annotations_property_new_api) { test_manual_setting_of_annotations_property(false); } BOOST_AUTO_TEST_CASE(test_route_serialize_fb) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params; params.steps = true; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); engine::api::ResultT result = flatbuffers::FlatBufferBuilder(); const auto rc = osrm.Route(params, result); BOOST_CHECK(rc == Status::Ok); auto &fb_result = result.get<flatbuffers::FlatBufferBuilder>(); auto fb = engine::api::fbresult::GetFBResult(fb_result.GetBufferPointer()); BOOST_CHECK(!fb->error()); BOOST_CHECK(fb->waypoints() != nullptr); const auto waypoints = fb->waypoints(); BOOST_CHECK(waypoints->size() == params.coordinates.size()); for (const auto waypoint : *waypoints) { const auto longitude = waypoint->location()->longitude(); const auto latitude = waypoint->location()->latitude(); BOOST_CHECK(longitude >= -180. && longitude <= 180.); BOOST_CHECK(latitude >= -90. && latitude <= 90.); BOOST_CHECK(!waypoint->hint()->str().empty()); } BOOST_CHECK(fb->routes() != nullptr); const auto routes = fb->routes(); BOOST_REQUIRE_GT(routes->size(), 0); for (const auto route : *routes) { BOOST_CHECK_EQUAL(route->distance(), 0); BOOST_CHECK_EQUAL(route->duration(), 0); const auto &legs = route->legs(); BOOST_CHECK(legs->size() > 0); for (const auto leg : *legs) { BOOST_CHECK_EQUAL(leg->distance(), 0); BOOST_CHECK_EQUAL(leg->duration(), 0); BOOST_CHECK(leg->steps() != nullptr); const auto steps = leg->steps(); BOOST_CHECK(steps->size() > 0); std::size_t step_count = 0; for (const auto step : *steps) { BOOST_CHECK_EQUAL(step->distance(), 0); BOOST_CHECK_EQUAL(step->duration(), 0); BOOST_CHECK(step->maneuver() != nullptr); BOOST_CHECK(step->intersections() != nullptr); const auto intersections = step->intersections(); for (auto intersection : *intersections) { const auto longitude = intersection->location()->longitude(); const auto latitude = intersection->location()->latitude(); BOOST_CHECK(longitude >= -180. && longitude <= 180.); BOOST_CHECK(latitude >= -90. && latitude <= 90.); BOOST_CHECK(intersection->bearings() != nullptr); const auto bearings = intersection->bearings(); BOOST_CHECK(bearings->size() > 0); for (const auto bearing : *bearings) BOOST_CHECK(0. <= bearing && bearing <= 360.); if (step_count > 0) { BOOST_CHECK(intersection->in_bearing() < bearings->size()); } if (step_count + 1 < steps->size()) { BOOST_CHECK(intersection->out_bearing() < bearings->size()); } } ++step_count; } } } } BOOST_AUTO_TEST_CASE(test_route_serialize_fb_skip_waypoints) { auto osrm = getOSRM(OSRM_TEST_DATA_DIR "/ch/monaco.osrm"); using namespace osrm; RouteParameters params; params.skip_waypoints = true; params.steps = true; params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); params.coordinates.push_back(get_dummy_location()); engine::api::ResultT result = flatbuffers::FlatBufferBuilder(); const auto rc = osrm.Route(params, result); BOOST_CHECK(rc == Status::Ok); auto &fb_result = result.get<flatbuffers::FlatBufferBuilder>(); auto fb = engine::api::fbresult::GetFBResult(fb_result.GetBufferPointer()); BOOST_CHECK(!fb->error()); BOOST_CHECK(fb->waypoints() == nullptr); BOOST_CHECK(fb->routes() != nullptr); const auto routes = fb->routes(); BOOST_REQUIRE_GT(routes->size(), 0); for (const auto route : *routes) { BOOST_CHECK_EQUAL(route->distance(), 0); BOOST_CHECK_EQUAL(route->duration(), 0); const auto &legs = route->legs(); BOOST_CHECK(legs->size() > 0); // Rest of the content is verified by test_route_serialize_fb } } BOOST_AUTO_TEST_SUITE_END()
// TestCurry tests the curry concept. func TestCurry(t *testing.T) { upper := curry("arg", func(arg string) (string, error) { return strings.ToUpper(arg), nil }) assert.NotNil(t, upper) asUppercase, err := upper() assert.NoError(t, err) assert.Equal(t, "ARG", asUppercase) }
A HILARIOUS internet gallery shows exactly why you should never give your mobile phone number to a neighbour. When it comes to annoying neighbours, we’ve all been there. Whether they’re too loud or partial to a strange request, having an unfortunate neighbour can cause all sorts of grief. While some of us are blessed with perfect people next door others aren’t quite as fortunate – and their texts prove exactly why you might want to think twice about sharing your number with a neighbour. From noisy gardeners to early morning vacuumers, these neighbours have seriously annoyed those living next door – and unfortunately they’d given them their mobile number first. Hoover would put up with this? Do you want them to cook it too? Can of worms - open. Previously, we revealed the most hilarious messages that have been sent to the wrong number and the funniest text messages sent by grandparents.
<filename>web/src/pages/Blog/EditBlogPage/EditBlogPage.tsx import EditBlogCell from 'src/components/Blog/EditBlogCell' type BlogPageProps = { id: number } const EditBlogPage = ({ id }: BlogPageProps) => { return <EditBlogCell id={id} /> } export default EditBlogPage
. 1,484 clones of 104 strains representing 11 genera of the family Enterobacteriaceae and having different levels of lysozyme activity were tested for the presence of this activity. In the populations of 53.9% of strains under study the heterogeneity of the levels of antilysozyme activity was noted. In 13.4% of strains under study clones having no antilysozyme activity were detected. Among Escherichia, Hafnia and Enterobacter strains without antilysozyme activity individual clones having such activity were detected; these clones constituted 3-17% of the total number of the studied clones.
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.xml.breadcrumbs; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.EditorSettingsExternalizable; import org.jetbrains.annotations.NotNull; class ToggleBreadcrumbsSettingsAction extends ToggleBreadcrumbsAction { static final class ShowAbove extends ToggleBreadcrumbsSettingsAction { ShowAbove() { super(true, true); } } static final class ShowBelow extends ToggleBreadcrumbsSettingsAction { ShowBelow() { super(true, false); } } static final class HideBoth extends ToggleBreadcrumbsSettingsAction { HideBoth() { super(false, false); } } private final boolean show; private final boolean above; private ToggleBreadcrumbsSettingsAction(boolean show, boolean above) { this.show = show; this.above = above; } @Override public boolean isSelected(@NotNull AnActionEvent event) { boolean selected = super.isSelected(event); if (show && selected) { return above == EditorSettingsExternalizable.getInstance().isBreadcrumbsAbove(); } return !show && !selected; } @Override public void setSelected(@NotNull AnActionEvent event, boolean selected) { Editor editor = findEditor(event); boolean modified = editor != null && BreadcrumbsForceShownSettings.setForcedShown(null, editor); EditorSettingsExternalizable settings = EditorSettingsExternalizable.getInstance(); if (settings.setBreadcrumbsShown(show)) modified = true; if (show) { if (settings.setBreadcrumbsAbove(above)) modified = true; String languageID = findLanguageID(event); if (languageID != null && settings.setBreadcrumbsShownFor(languageID, true)) modified = true; } if (modified) { UISettings.getInstance().fireUISettingsChanged(); } } }
import { Command } from 'discord-akairo'; import { Message, MessageEmbed } from 'discord.js'; export default class InviteCommand extends Command { public constructor() { super('invite', { aliases: ['invite'], description: { content: 'Get the bot\'s invite link' }, category: 'myCommands', ratelimit: 0, }); } public async exec(message: Message): Promise<Message> { const embed = new MessageEmbed() .setAuthor(`${this.client.user.username} | Invite`) .setDescription(`Hey **${message.author.username}**! You can invite me [here](https://discord.com/oauth2/authorize?client_id=723414837975318540&permissions=8&scope=bot) \n\nI hope you like the bot!`) .setColor("RANDOM") return message.util.send(embed); } }
<reponame>akshay-raj-m/newfile x=10 y=22 c=x+y print(c) def(foo(x): print(x-1) foo(3)
package com.smilegatemegaport.coupon.controller; import com.smilegatemegaport.coupon.domain.dto.CouponRequest; import com.smilegatemegaport.coupon.domain.dto.CouponResponse; import com.smilegatemegaport.coupon.domain.entity.Coupon; import com.smilegatemegaport.coupon.exception.CouponException; import com.smilegatemegaport.coupon.service.CouponService; import lombok.RequiredArgsConstructor; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PageableHandlerMethodArgumentResolver; import org.springframework.data.web.config.PageableHandlerMethodArgumentResolverCustomizer; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Component; import org.springframework.web.bind.annotation.*; @RestController @RequestMapping("/api/v1/coupon") @RequiredArgsConstructor public class CouponController { private final CouponService couponService; @PostMapping(consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(HttpStatus.OK) public void issueCoupon(@RequestBody CouponRequest couponRequest) { couponService.issueCoupon(couponRequest.getPhoneNumber()); } @GetMapping(produces = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(HttpStatus.OK) public Page<Coupon> getCoupons(Pageable pageable) { return couponService.getCoupons(pageable); } @ExceptionHandler public ResponseEntity<CouponResponse> handleException(CouponException e) { return ResponseEntity.status(HttpStatus.CONFLICT).body(e.toCouponResponse()); } @Component public static class CustomConfig implements PageableHandlerMethodArgumentResolverCustomizer { @Override public void customize(PageableHandlerMethodArgumentResolver pr) { pr.setOneIndexedParameters(true); } } }
<gh_stars>0 /* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License, Version 1.0 only * (the "License"). You may not use this file except in compliance * with the License. * * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE * or http://www.opensolaris.org/os/licensing. * See the License for the specific language governing permissions * and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at usr/src/OPENSOLARIS.LICENSE. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ /* * Copyright 2004 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. */ /* * hci1394_ixl_comp.c * Isochronous IXL Compiler. * The compiler converts the general hardware independent IXL command * blocks into OpenHCI DMA descriptors. */ #include <sys/kmem.h> #include <sys/types.h> #include <sys/conf.h> #include <sys/ddi.h> #include <sys/sunddi.h> #include <sys/tnf_probe.h> #include <sys/1394/h1394.h> #include <sys/1394/ixl1394.h> #include <sys/1394/adapters/hci1394.h> /* compiler allocation size for DMA descriptors. 8000 is 500 descriptors */ #define HCI1394_IXL_PAGESIZE 8000 /* invalid opcode */ #define IXL1394_OP_INVALID (0 | IXL1394_OPTY_OTHER) /* * maximum number of interrupts permitted for a single context in which * the context does not advance to the next DMA descriptor. Interrupts are * triggered by 1) hardware completing a DMA descriptor block which has the * interrupt (i) bits set, 2) a cycle_inconsistent interrupt, or 3) a cycle_lost * interrupt. Once the max is reached, the HCI1394_IXL_INTR_NOADV error is * returned. */ int hci1394_ixl_max_noadv_intrs = 8; static void hci1394_compile_ixl_init(hci1394_comp_ixl_vars_t *wvp, hci1394_state_t *soft_statep, hci1394_iso_ctxt_t *ctxtp, ixl1394_command_t *ixlp); static void hci1394_compile_ixl_endup(hci1394_comp_ixl_vars_t *wvp); static void hci1394_parse_ixl(hci1394_comp_ixl_vars_t *wvp, ixl1394_command_t *ixlp); static void hci1394_finalize_all_xfer_desc(hci1394_comp_ixl_vars_t *wvp); static void hci1394_finalize_cur_xfer_desc(hci1394_comp_ixl_vars_t *wvp); static void hci1394_bld_recv_pkt_desc(hci1394_comp_ixl_vars_t *wvp); static void hci1394_bld_recv_buf_ppb_desc(hci1394_comp_ixl_vars_t *wvp); static void hci1394_bld_recv_buf_fill_desc(hci1394_comp_ixl_vars_t *wvp); static void hci1394_bld_xmit_pkt_desc(hci1394_comp_ixl_vars_t *wvp); static void hci1394_bld_xmit_buf_desc(hci1394_comp_ixl_vars_t *wvp); static void hci1394_bld_xmit_hdronly_nopkt_desc(hci1394_comp_ixl_vars_t *wvp); static int hci1394_bld_dma_mem_desc_blk(hci1394_comp_ixl_vars_t *wvp, caddr_t *dma_descpp, uint32_t *dma_desc_bound); static void hci1394_set_xmit_pkt_hdr(hci1394_comp_ixl_vars_t *wvp); static void hci1394_set_xmit_skip_mode(hci1394_comp_ixl_vars_t *wvp); static void hci1394_set_xmit_storevalue_desc(hci1394_comp_ixl_vars_t *wvp); static int hci1394_set_next_xfer_buf(hci1394_comp_ixl_vars_t *wvp, uint32_t bufp, uint16_t size); static int hci1394_flush_end_desc_check(hci1394_comp_ixl_vars_t *wvp, uint32_t count); static int hci1394_flush_hci_cache(hci1394_comp_ixl_vars_t *wvp); static uint32_t hci1394_alloc_storevalue_dma_mem(hci1394_comp_ixl_vars_t *wvp); static hci1394_xfer_ctl_t *hci1394_alloc_xfer_ctl(hci1394_comp_ixl_vars_t *wvp, uint32_t dmacnt); static void *hci1394_alloc_dma_mem(hci1394_comp_ixl_vars_t *wvp, uint32_t size, uint32_t *dma_bound); static boolean_t hci1394_is_opcode_valid(uint16_t ixlopcode); /* * FULL LIST OF ACCEPTED IXL COMMAND OPCOCDES: * Receive Only: Transmit Only: * IXL1394_OP_RECV_PKT_ST IXL1394_OP_SEND_PKT_WHDR_ST * IXL1394_OP_RECV_PKT IXL1394_OP_SEND_PKT_ST * IXL1394_OP_RECV_BUF IXL1394_OP_SEND_PKT * IXL1394_OP_SET_SYNCWAIT IXL1394_OP_SEND_BUF * IXL1394_OP_SEND_HDR_ONLY * Receive or Transmit: IXL1394_OP_SEND_NO_PKT * IXL1394_OP_CALLBACK IXL1394_OP_SET_TAGSYNC * IXL1394_OP_LABEL IXL1394_OP_SET_SKIPMODE * IXL1394_OP_JUMP IXL1394_OP_STORE_TIMESTAMP */ /* * hci1394_compile_ixl() * Top level ixl compiler entry point. Scans ixl and builds openHCI 1.0 * descriptor blocks in dma memory. */ int hci1394_compile_ixl(hci1394_state_t *soft_statep, hci1394_iso_ctxt_t *ctxtp, ixl1394_command_t *ixlp, int *resultp) { hci1394_comp_ixl_vars_t wv; /* working variables used throughout */ ASSERT(soft_statep != NULL); ASSERT(ctxtp != NULL); TNF_PROBE_0_DEBUG(hci1394_compile_ixl_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* Initialize compiler working variables */ hci1394_compile_ixl_init(&wv, soft_statep, ctxtp, ixlp); /* * First pass: * Parse ixl commands, building desc blocks, until end of IXL * linked list. */ hci1394_parse_ixl(&wv, ixlp); /* * Second pass: * Resolve all generated descriptor block jump and skip addresses. * Set interrupt enable in descriptor blocks which have callback * operations in their execution scope. (Previously store_timesamp * operations were counted also.) Set interrupt enable in descriptor * blocks which were introduced by an ixl label command. */ if (wv.dma_bld_error == 0) { hci1394_finalize_all_xfer_desc(&wv); } /* Endup: finalize and cleanup ixl compile, return result */ hci1394_compile_ixl_endup(&wv); *resultp = wv.dma_bld_error; if (*resultp != 0) { TNF_PROBE_0_DEBUG(hci1394_compile_ixl_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_FAILURE); } else { TNF_PROBE_0_DEBUG(hci1394_compile_ixl_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_SUCCESS); } } /* * hci1394_compile_ixl_init() * Initialize the isoch context structure associated with the IXL * program, and initialize the temporary working variables structure. */ static void hci1394_compile_ixl_init(hci1394_comp_ixl_vars_t *wvp, hci1394_state_t *soft_statep, hci1394_iso_ctxt_t *ctxtp, ixl1394_command_t *ixlp) { TNF_PROBE_0_DEBUG(hci1394_compile_ixl_init_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* initialize common recv/xmit compile values */ wvp->soft_statep = soft_statep; wvp->ctxtp = ctxtp; /* init/clear ctxtp values */ ctxtp->dma_mem_execp = 0; ctxtp->dma_firstp = NULL; ctxtp->dma_last_time = 0; ctxtp->xcs_firstp = NULL; ctxtp->ixl_exec_depth = 0; ctxtp->ixl_execp = NULL; ctxtp->ixl_firstp = ixlp; ctxtp->default_skipxferp = NULL; /* * the context's max_noadv_intrs is set here instead of in isoch init * because the default is patchable and would only be picked up this way */ ctxtp->max_noadv_intrs = hci1394_ixl_max_noadv_intrs; /* init working variables */ wvp->xcs_firstp = NULL; wvp->xcs_currentp = NULL; wvp->dma_firstp = NULL; wvp->dma_currentp = NULL; wvp->dma_bld_error = 0; wvp->ixl_io_mode = ctxtp->ctxt_flags; wvp->ixl_cur_cmdp = NULL; wvp->ixl_cur_xfer_stp = NULL; wvp->ixl_cur_labelp = NULL; wvp->ixl_xfer_st_cnt = 0; /* count of xfer start commands found */ wvp->xfer_state = XFER_NONE; /* none, pkt, buf, skip, hdronly */ wvp->xfer_hci_flush = 0; /* updateable - xfer, jump, set */ wvp->xfer_pktlen = 0; wvp->xfer_bufcnt = 0; wvp->descriptors = 0; /* START RECV ONLY SECTION */ wvp->ixl_setsyncwait_cnt = 0; /* START XMIT ONLY SECTION */ wvp->ixl_settagsync_cmdp = NULL; wvp->ixl_setskipmode_cmdp = NULL; wvp->default_skipmode = ctxtp->default_skipmode; /* nxt,self,stop,jmp */ wvp->default_skiplabelp = ctxtp->default_skiplabelp; wvp->default_skipxferp = NULL; wvp->skipmode = ctxtp->default_skipmode; wvp->skiplabelp = NULL; wvp->skipxferp = NULL; wvp->default_tag = ctxtp->default_tag; wvp->default_sync = ctxtp->default_sync; wvp->storevalue_bufp = hci1394_alloc_storevalue_dma_mem(wvp); wvp->storevalue_data = 0; wvp->xmit_pkthdr1 = 0; wvp->xmit_pkthdr2 = 0; /* END XMIT ONLY SECTION */ TNF_PROBE_0_DEBUG(hci1394_compile_ixl_init_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_compile_ixl_endup() * This routine is called just before the main hci1394_compile_ixl() exits. * It checks for errors and performs the appropriate cleanup, or it rolls any * relevant info from the working variables struct into the context structure */ static void hci1394_compile_ixl_endup(hci1394_comp_ixl_vars_t *wvp) { ixl1394_command_t *ixl_exec_stp; hci1394_idma_desc_mem_t *dma_nextp; int err; TNF_PROBE_0_DEBUG(hci1394_compile_ixl_endup_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* error if no descriptor blocks found in ixl & created in dma memory */ if ((wvp->dma_bld_error == 0) && (wvp->ixl_xfer_st_cnt == 0)) { TNF_PROBE_1(hci1394_compile_ixl_endup_nodata_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_ENO_DATA_PKTS: prog has no data packets"); wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS; } /* if no errors yet, find the first IXL command that's a transfer cmd */ if (wvp->dma_bld_error == 0) { err = hci1394_ixl_find_next_exec_xfer(wvp->ctxtp->ixl_firstp, NULL, &ixl_exec_stp); /* error if a label<->jump loop, or no xfer */ if ((err == DDI_FAILURE) || (ixl_exec_stp == NULL)) { TNF_PROBE_1(hci1394_compile_ixl_endup_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_ENO_DATA_PKTS: loop or no xfer detected"); wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS; } } /* Sync all the DMA descriptor buffers */ dma_nextp = wvp->ctxtp->dma_firstp; while (dma_nextp != NULL) { err = ddi_dma_sync(dma_nextp->mem.bi_dma_handle, (off_t)dma_nextp->mem.bi_kaddr, dma_nextp->mem.bi_length, DDI_DMA_SYNC_FORDEV); if (err != DDI_SUCCESS) { wvp->dma_bld_error = IXL1394_EINTERNAL_ERROR; TNF_PROBE_1(hci1394_compile_ixl_endup_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_INTERNAL_ERROR: dma_sync() failed"); break; } /* advance to next dma memory descriptor */ dma_nextp = dma_nextp->dma_nextp; } /* * If error, cleanup and return. delete all allocated xfer_ctl structs * and all dma descriptor page memory and its dma memory blocks too. */ if (wvp->dma_bld_error != 0) { wvp->ctxtp->xcs_firstp = (void *)wvp->xcs_firstp; wvp->ctxtp->dma_firstp = wvp->dma_firstp; hci1394_ixl_cleanup(wvp->soft_statep, wvp->ctxtp); TNF_PROBE_0_DEBUG(hci1394_compile_ixl_endup_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* can only get to here if the first ixl transfer command is found */ /* set required processing vars into ctxtp struct */ wvp->ctxtp->default_skipxferp = wvp->default_skipxferp; wvp->ctxtp->dma_mem_execp = 0; /* * the transfer command's compiler private xfer_ctl structure has the * appropriate bound address */ wvp->ctxtp->dma_mem_execp = (uint32_t)((hci1394_xfer_ctl_t *) ixl_exec_stp->compiler_privatep)->dma[0].dma_bound; wvp->ctxtp->xcs_firstp = (void *)wvp->xcs_firstp; wvp->ctxtp->dma_firstp = wvp->dma_firstp; wvp->ctxtp->dma_last_time = 0; wvp->ctxtp->ixl_exec_depth = 0; wvp->ctxtp->ixl_execp = NULL; /* compile done */ TNF_PROBE_0_DEBUG(hci1394_compile_ixl_endup_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_parse_ixl() * Scan IXL program and build ohci DMA descriptor blocks in dma memory. * * Parse/process succeeding ixl commands until end of IXL linked list is * reached. Evaluate ixl syntax and build (xmit or recv) descriptor * blocks. To aid execution time evaluation of current location, enable * status recording on each descriptor block built. * On xmit, set sync & tag bits. On recv, optionally set wait for sync bit. */ static void hci1394_parse_ixl(hci1394_comp_ixl_vars_t *wvp, ixl1394_command_t *ixlp) { ixl1394_command_t *ixlnextp = ixlp; /* addr of next ixl cmd */ ixl1394_command_t *ixlcurp = NULL; /* addr of current ixl cmd */ uint16_t ixlopcode = 0; /* opcode of currnt ixl cmd */ uint32_t pktsize; uint32_t pktcnt; TNF_PROBE_0_DEBUG(hci1394_parse_ixl_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* follow ixl links until reach end or find error */ while ((ixlnextp != NULL) && (wvp->dma_bld_error == 0)) { /* set this command as the current ixl command */ wvp->ixl_cur_cmdp = ixlcurp = ixlnextp; ixlnextp = ixlcurp->next_ixlp; ixlopcode = ixlcurp->ixl_opcode; /* init compiler controlled values in current ixl command */ ixlcurp->compiler_privatep = NULL; ixlcurp->compiler_resv = 0; /* error if xmit/recv mode not appropriate for current cmd */ if ((((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) != 0) && ((ixlopcode & IXL1394_OPF_ONRECV) == 0)) || (((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) == 0) && ((ixlopcode & IXL1394_OPF_ONXMIT) == 0))) { /* check if command op failed because it was invalid */ if (hci1394_is_opcode_valid(ixlopcode) != B_TRUE) { TNF_PROBE_3(hci1394_parse_ixl_bad_opcode_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_BAD_IXL_OPCODE", tnf_opaque, ixl_commandp, ixlcurp, tnf_opaque, ixl_opcode, ixlopcode); wvp->dma_bld_error = IXL1394_EBAD_IXL_OPCODE; } else { TNF_PROBE_3(hci1394_parse_ixl_mode_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EWRONG_XR_CMD_MODE: " "invalid ixlop in mode", tnf_uint, io_mode, wvp->ixl_io_mode, tnf_opaque, ixl_opcode, ixlopcode); wvp->dma_bld_error = IXL1394_EWRONG_XR_CMD_MODE; } continue; } /* * if ends xfer flag set, finalize current xfer descriptor * block build */ if ((ixlopcode & IXL1394_OPF_ENDSXFER) != 0) { /* finalize any descriptor block build in progress */ hci1394_finalize_cur_xfer_desc(wvp); if (wvp->dma_bld_error != 0) { continue; } } /* * now process based on specific opcode value */ switch (ixlopcode) { case IXL1394_OP_RECV_BUF: case IXL1394_OP_RECV_BUF_U: { ixl1394_xfer_buf_t *cur_xfer_buf_ixlp; cur_xfer_buf_ixlp = (ixl1394_xfer_buf_t *)ixlcurp; /* * In packet-per-buffer mode: * This ixl command builds a collection of xfer * descriptor blocks (size/pkt_size of them) each to * recv a packet whose buffer size is pkt_size and * whose buffer ptr is (pktcur*pkt_size + bufp) * * In buffer fill mode: * This ixl command builds a single xfer descriptor * block to recv as many packets or parts of packets * as can fit into the buffer size specified * (pkt_size is not used). */ /* set xfer_state for new descriptor block build */ wvp->xfer_state = XFER_BUF; /* set this ixl command as current xferstart command */ wvp->ixl_cur_xfer_stp = ixlcurp; /* * perform packet-per-buffer checks * (no checks needed when in buffer fill mode) */ if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) == 0) { /* the packets must use the buffer exactly */ pktsize = cur_xfer_buf_ixlp->pkt_size; pktcnt = 0; if (pktsize != 0) { pktcnt = cur_xfer_buf_ixlp->size / pktsize; } if ((pktcnt == 0) || ((pktsize * pktcnt) != cur_xfer_buf_ixlp->size)) { TNF_PROBE_3(hci1394_parse_ixl_rat_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EPKTSIZE_RATIO", tnf_int, buf_size, cur_xfer_buf_ixlp->size, tnf_int, pkt_size, pktsize); wvp->dma_bld_error = IXL1394_EPKTSIZE_RATIO; continue; } } /* * set buffer pointer & size into first xfer_bufp * and xfer_size */ if (hci1394_set_next_xfer_buf(wvp, cur_xfer_buf_ixlp->ixl_buf.ixldmac_addr, cur_xfer_buf_ixlp->size) != DDI_SUCCESS) { /* wvp->dma_bld_error is set by above call */ continue; } break; } case IXL1394_OP_RECV_PKT_ST: case IXL1394_OP_RECV_PKT_ST_U: { ixl1394_xfer_pkt_t *cur_xfer_pkt_ixlp; cur_xfer_pkt_ixlp = (ixl1394_xfer_pkt_t *)ixlcurp; /* error if in buffer fill mode */ if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) != 0) { TNF_PROBE_1(hci1394_parse_ixl_mode_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EWRONG_XR_CMD_MODE: " "RECV_PKT_ST used in BFFILL mode"); wvp->dma_bld_error = IXL1394_EWRONG_XR_CMD_MODE; continue; } /* set xfer_state for new descriptor block build */ /* set this ixl command as current xferstart command */ wvp->xfer_state = XFER_PKT; wvp->ixl_cur_xfer_stp = ixlcurp; /* * set buffer pointer & size into first xfer_bufp * and xfer_size */ if (hci1394_set_next_xfer_buf(wvp, cur_xfer_pkt_ixlp->ixl_buf.ixldmac_addr, cur_xfer_pkt_ixlp->size) != DDI_SUCCESS) { /* wvp->dma_bld_error is set by above call */ continue; } break; } case IXL1394_OP_RECV_PKT: case IXL1394_OP_RECV_PKT_U: { ixl1394_xfer_pkt_t *cur_xfer_pkt_ixlp; cur_xfer_pkt_ixlp = (ixl1394_xfer_pkt_t *)ixlcurp; /* error if in buffer fill mode */ if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) != 0) { TNF_PROBE_1(hci1394_parse_ixl_mode_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EWRONG_XR_CMD_MODE: " "RECV_PKT_ST used in BFFILL mode"); wvp->dma_bld_error = IXL1394_EWRONG_XR_CMD_MODE; continue; } /* error if xfer_state not xfer pkt */ if (wvp->xfer_state != XFER_PKT) { TNF_PROBE_1(hci1394_parse_ixl_misplacercv_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMISPLACED_RECV: " "RECV_PKT without RECV_PKT_ST"); wvp->dma_bld_error = IXL1394_EMISPLACED_RECV; continue; } /* * save xfer start cmd ixl ptr in compiler_privatep * field of this cmd */ ixlcurp->compiler_privatep = (void *) wvp->ixl_cur_xfer_stp; /* * save pkt index [1-n] in compiler_resv field of * this cmd */ ixlcurp->compiler_resv = wvp->xfer_bufcnt; /* * set buffer pointer & size into next xfer_bufp * and xfer_size */ if (hci1394_set_next_xfer_buf(wvp, cur_xfer_pkt_ixlp->ixl_buf.ixldmac_addr, cur_xfer_pkt_ixlp->size) != DDI_SUCCESS) { /* wvp->dma_bld_error is set by above call */ continue; } /* * set updateable xfer cache flush eval flag if * updateable opcode */ if ((ixlopcode & IXL1394_OPF_UPDATE) != 0) { wvp->xfer_hci_flush |= UPDATEABLE_XFER; } break; } case IXL1394_OP_SEND_BUF: case IXL1394_OP_SEND_BUF_U: { ixl1394_xfer_buf_t *cur_xfer_buf_ixlp; cur_xfer_buf_ixlp = (ixl1394_xfer_buf_t *)ixlcurp; /* * These send_buf commands build a collection of xmit * descriptor blocks (size/pkt_size of them) each to * xfer a packet whose buffer size is pkt_size and whose * buffer pt is (pktcur*pkt_size + bufp). (ptr and size * are adjusted if they have header form of ixl cmd) */ /* set xfer_state for new descriptor block build */ wvp->xfer_state = XFER_BUF; /* set this ixl command as current xferstart command */ wvp->ixl_cur_xfer_stp = ixlcurp; /* the packets must use the buffer exactly,else error */ pktsize = cur_xfer_buf_ixlp->pkt_size; pktcnt = 0; if (pktsize != 0) { pktcnt = cur_xfer_buf_ixlp->size / pktsize; } if ((pktcnt == 0) || ((pktsize * pktcnt) != cur_xfer_buf_ixlp->size)) { TNF_PROBE_3(hci1394_parse_ixl_rat_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EPKTSIZE_RATIO", tnf_int, buf_size, cur_xfer_buf_ixlp->size, tnf_int, pkt_size, pktsize); wvp->dma_bld_error = IXL1394_EPKTSIZE_RATIO; continue; } /* set buf ptr & size into 1st xfer_bufp & xfer_size */ if (hci1394_set_next_xfer_buf(wvp, cur_xfer_buf_ixlp->ixl_buf.ixldmac_addr, cur_xfer_buf_ixlp->size) != DDI_SUCCESS) { /* wvp->dma_bld_error is set by above call */ continue; } break; } case IXL1394_OP_SEND_PKT_ST: case IXL1394_OP_SEND_PKT_ST_U: { ixl1394_xfer_pkt_t *cur_xfer_pkt_ixlp; cur_xfer_pkt_ixlp = (ixl1394_xfer_pkt_t *)ixlcurp; /* set xfer_state for new descriptor block build */ /* set this ixl command as current xferstart command */ wvp->xfer_state = XFER_PKT; wvp->ixl_cur_xfer_stp = ixlcurp; /* * set buffer pointer & size into first xfer_bufp and * xfer_size */ if (hci1394_set_next_xfer_buf(wvp, cur_xfer_pkt_ixlp->ixl_buf.ixldmac_addr, cur_xfer_pkt_ixlp->size) != DDI_SUCCESS) { /* wvp->dma_bld_error is set by above call */ continue; } break; } case IXL1394_OP_SEND_PKT_WHDR_ST: case IXL1394_OP_SEND_PKT_WHDR_ST_U: { ixl1394_xfer_pkt_t *cur_xfer_pkt_ixlp; cur_xfer_pkt_ixlp = (ixl1394_xfer_pkt_t *)ixlcurp; /* set xfer_state for new descriptor block build */ /* set this ixl command as current xferstart command */ wvp->xfer_state = XFER_PKT; wvp->ixl_cur_xfer_stp = ixlcurp; /* * buffer size must be at least 4 (must include header), * else error */ if (cur_xfer_pkt_ixlp->size < 4) { TNF_PROBE_2(hci1394_parse_ixl_hdr_missing_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EPKT_HDR_MISSING", tnf_int, pkt_size, cur_xfer_pkt_ixlp->size); wvp->dma_bld_error = IXL1394_EPKT_HDR_MISSING; continue; } /* * set buffer and size(excluding header) into first * xfer_bufp and xfer_size */ if (hci1394_set_next_xfer_buf(wvp, cur_xfer_pkt_ixlp->ixl_buf.ixldmac_addr + 4, cur_xfer_pkt_ixlp->size - 4) != DDI_SUCCESS) { /* wvp->dma_bld_error is set by above call */ continue; } break; } case IXL1394_OP_SEND_PKT: case IXL1394_OP_SEND_PKT_U: { ixl1394_xfer_pkt_t *cur_xfer_pkt_ixlp; cur_xfer_pkt_ixlp = (ixl1394_xfer_pkt_t *)ixlcurp; /* error if xfer_state not xfer pkt */ if (wvp->xfer_state != XFER_PKT) { TNF_PROBE_1(hci1394_parse_ixl_misplacesnd_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMISPLACED_SEND: SEND_PKT " "without SEND_PKT_ST"); wvp->dma_bld_error = IXL1394_EMISPLACED_SEND; continue; } /* * save xfer start cmd ixl ptr in compiler_privatep * field of this cmd */ ixlcurp->compiler_privatep = (void *) wvp->ixl_cur_xfer_stp; /* * save pkt index [1-n] in compiler_resv field of this * cmd */ ixlcurp->compiler_resv = wvp->xfer_bufcnt; /* * set buffer pointer & size into next xfer_bufp * and xfer_size */ if (hci1394_set_next_xfer_buf(wvp, cur_xfer_pkt_ixlp->ixl_buf.ixldmac_addr, cur_xfer_pkt_ixlp->size) != DDI_SUCCESS) { /* wvp->dma_bld_error is set by above call */ continue; } /* * set updateable xfer cache flush eval flag if * updateable opcode */ if ((ixlopcode & IXL1394_OPF_UPDATE) != 0) { wvp->xfer_hci_flush |= UPDATEABLE_XFER; } break; } case IXL1394_OP_SEND_HDR_ONLY: /* set xfer_state for new descriptor block build */ wvp->xfer_state = XMIT_HDRONLY; /* set this ixl command as current xferstart command */ wvp->ixl_cur_xfer_stp = ixlcurp; break; case IXL1394_OP_SEND_NO_PKT: /* set xfer_state for new descriptor block build */ wvp->xfer_state = XMIT_NOPKT; /* set this ixl command as current xferstart command */ wvp->ixl_cur_xfer_stp = ixlcurp; break; case IXL1394_OP_JUMP: case IXL1394_OP_JUMP_U: { ixl1394_jump_t *cur_jump_ixlp; cur_jump_ixlp = (ixl1394_jump_t *)ixlcurp; /* * verify label indicated by IXL1394_OP_JUMP is * actually an IXL1394_OP_LABEL or NULL */ if ((cur_jump_ixlp->label != NULL) && (cur_jump_ixlp->label->ixl_opcode != IXL1394_OP_LABEL)) { TNF_PROBE_3(hci1394_parse_ixl_jumplabel_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EJUMP_NOT_TO_LABEL", tnf_opaque, jumpixl_commandp, ixlcurp, tnf_opaque, jumpto_ixl, cur_jump_ixlp->label); wvp->dma_bld_error = IXL1394_EJUMP_NOT_TO_LABEL; continue; } break; } case IXL1394_OP_LABEL: /* * save current ixl label command for xfer cmd * finalize processing */ wvp->ixl_cur_labelp = ixlcurp; /* set initiating label flag to cause cache flush */ wvp->xfer_hci_flush |= INITIATING_LBL; break; case IXL1394_OP_CALLBACK: case IXL1394_OP_CALLBACK_U: case IXL1394_OP_STORE_TIMESTAMP: /* * these commands are accepted during compile, * processed during execution (interrupt handling) * No further processing is needed here. */ break; case IXL1394_OP_SET_SKIPMODE: case IXL1394_OP_SET_SKIPMODE_U: /* * Error if already have a set skipmode cmd for * this xfer */ if (wvp->ixl_setskipmode_cmdp != NULL) { TNF_PROBE_2(hci1394_parse_ixl_dup_set_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EDUPLICATE_SET_CMD:" " duplicate set skipmode", tnf_opaque, ixl_commandp, ixlcurp); wvp->dma_bld_error = IXL1394_EDUPLICATE_SET_CMD; continue; } /* save skip mode ixl command and verify skipmode */ wvp->ixl_setskipmode_cmdp = (ixl1394_set_skipmode_t *) ixlcurp; if ((wvp->ixl_setskipmode_cmdp->skipmode != IXL1394_SKIP_TO_NEXT) && (wvp->ixl_setskipmode_cmdp->skipmode != IXL1394_SKIP_TO_SELF) && (wvp->ixl_setskipmode_cmdp->skipmode != IXL1394_SKIP_TO_STOP) && (wvp->ixl_setskipmode_cmdp->skipmode != IXL1394_SKIP_TO_LABEL)) { TNF_PROBE_3(hci1394_parse_ixl_dup_set_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL EBAD_SKIPMODE", tnf_opaque, ixl_commandp, ixlcurp, tnf_int, skip, wvp->ixl_setskipmode_cmdp->skipmode); wvp->dma_bld_error = IXL1394_EBAD_SKIPMODE; continue; } /* * if mode is IXL1394_SKIP_TO_LABEL, verify label * references an IXL1394_OP_LABEL */ if ((wvp->ixl_setskipmode_cmdp->skipmode == IXL1394_SKIP_TO_LABEL) && ((wvp->ixl_setskipmode_cmdp->label == NULL) || (wvp->ixl_setskipmode_cmdp->label->ixl_opcode != IXL1394_OP_LABEL))) { TNF_PROBE_3(hci1394_parse_ixl_jump_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EJUMP_NOT_TO_LABEL", tnf_opaque, jumpixl_commandp, ixlcurp, tnf_opaque, jumpto_ixl, wvp->ixl_setskipmode_cmdp->label); wvp->dma_bld_error = IXL1394_EJUMP_NOT_TO_LABEL; continue; } /* * set updateable set cmd cache flush eval flag if * updateable opcode */ if ((ixlopcode & IXL1394_OPF_UPDATE) != 0) { wvp->xfer_hci_flush |= UPDATEABLE_SET; } break; case IXL1394_OP_SET_TAGSYNC: case IXL1394_OP_SET_TAGSYNC_U: /* * is an error if already have a set tag and sync cmd * for this xfer */ if (wvp->ixl_settagsync_cmdp != NULL) { TNF_PROBE_2(hci1394_parse_ixl_dup_set_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EDUPLICATE_SET_CMD:" " duplicate set tagsync", tnf_opaque, ixl_commandp, ixlcurp); wvp->dma_bld_error = IXL1394_EDUPLICATE_SET_CMD; continue; } /* save ixl command containing tag and sync values */ wvp->ixl_settagsync_cmdp = (ixl1394_set_tagsync_t *)ixlcurp; /* * set updateable set cmd cache flush eval flag if * updateable opcode */ if ((ixlopcode & IXL1394_OPF_UPDATE) != 0) { wvp->xfer_hci_flush |= UPDATEABLE_SET; } break; case IXL1394_OP_SET_SYNCWAIT: /* * count ixl wait-for-sync commands since last * finalize ignore multiple occurrences for same xfer * command */ wvp->ixl_setsyncwait_cnt++; break; default: /* error - unknown/unimplemented ixl command */ TNF_PROBE_3(hci1394_parse_ixl_bad_opcode_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_BAD_IXL_OPCODE", tnf_opaque, ixl_commandp, ixlcurp, tnf_opaque, ixl_opcode, ixlopcode); wvp->dma_bld_error = IXL1394_EBAD_IXL_OPCODE; continue; } } /* while */ /* finalize any last descriptor block build */ wvp->ixl_cur_cmdp = NULL; if (wvp->dma_bld_error == 0) { hci1394_finalize_cur_xfer_desc(wvp); } TNF_PROBE_0_DEBUG(hci1394_parse_ixl_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_finalize_all_xfer_desc() * Pass 2: Scan IXL resolving all dma descriptor jump and skip addresses. * * Set interrupt enable on first descriptor block associated with current * xfer IXL command if current IXL xfer was introduced by an IXL label cmnd. * * Set interrupt enable on last descriptor block associated with current xfer * IXL command if any callback ixl commands are found on the execution path * between the current and the next xfer ixl command. (Previously, this * applied to store timestamp ixl commands, as well.) */ static void hci1394_finalize_all_xfer_desc(hci1394_comp_ixl_vars_t *wvp) { ixl1394_command_t *ixlcurp; /* current ixl command */ ixl1394_command_t *ixlnextp; /* next ixl command */ ixl1394_command_t *ixlexecnext; hci1394_xfer_ctl_t *xferctl_curp; hci1394_xfer_ctl_t *xferctl_nxtp; hci1394_desc_t *hcidescp; ddi_acc_handle_t acc_hdl; uint32_t temp; uint32_t dma_execnext_addr; uint32_t dma_skiplabel_addr; uint32_t dma_skip_addr; uint32_t callback_cnt; uint16_t repcnt; uint16_t ixlopcode; int ii; int err; TNF_PROBE_0_DEBUG(hci1394_finalize_all_xfer_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* * If xmit mode and if default skipmode is skip to label - * follow exec path starting at default skipmode label until * find the first ixl xfer command which is to be executed. * Set its address into default_skipxferp. */ if (((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) == 0) && (wvp->ctxtp->default_skipmode == IXL1394_SKIP_TO_LABEL)) { err = hci1394_ixl_find_next_exec_xfer(wvp->default_skiplabelp, NULL, &wvp->default_skipxferp); if (err == DDI_FAILURE) { TNF_PROBE_2(hci1394_finalize_all_xfer_desc_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_ENO_DATA_PKTS: label<->jump loop detected " "for skiplabel default w/no xfers", tnf_opaque, skipixl_cmdp, wvp->default_skiplabelp); TNF_PROBE_0_DEBUG(hci1394_finalize_all_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS; return; } } /* set first ixl cmd */ ixlnextp = wvp->ctxtp->ixl_firstp; /* follow ixl links until reach end or find error */ while ((ixlnextp != NULL) && (wvp->dma_bld_error == 0)) { /* set this command as the current ixl command */ ixlcurp = ixlnextp; ixlnextp = ixlcurp->next_ixlp; /* get command opcode removing unneeded update flag */ ixlopcode = ixlcurp->ixl_opcode & ~IXL1394_OPF_UPDATE; /* * Scan for next ixl xfer start command (including this one), * along ixl link path. Once xfer command found, find next IXL * xfer cmd along execution path and fill in branch address of * current xfer command. If is composite ixl xfer command, first * link forward branch dma addresses of each descriptor block in * composite, until reach final one then set its branch address * to next execution path xfer found. Next determine skip mode * and fill in skip address(es) appropriately. */ /* skip to next if not xfer start ixl command */ if (((ixlopcode & IXL1394_OPF_ISXFER) == 0) || ((ixlopcode & IXL1394_OPTY_MASK) == 0)) { continue; } /* * get xfer_ctl structure and composite repeat count for current * IXL xfer cmd */ xferctl_curp = (hci1394_xfer_ctl_t *)ixlcurp->compiler_privatep; repcnt = xferctl_curp->cnt; /* * if initiated by an IXL label command, set interrupt enable * flag into last component of first descriptor block of * current IXL xfer cmd */ if ((xferctl_curp->ctl_flags & XCTL_LABELLED) != 0) { hcidescp = (hci1394_desc_t *) xferctl_curp->dma[0].dma_descp; acc_hdl = xferctl_curp->dma[0].dma_buf->bi_handle; temp = ddi_get32(acc_hdl, &hcidescp->hdr); temp |= DESC_INTR_ENBL; ddi_put32(acc_hdl, &hcidescp->hdr, temp); } /* find next xfer IXL cmd by following execution path */ err = hci1394_ixl_find_next_exec_xfer(ixlcurp->next_ixlp, &callback_cnt, &ixlexecnext); /* if label<->jump loop detected, return error */ if (err == DDI_FAILURE) { wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS; TNF_PROBE_2(hci1394_finalize_all_xfer_desc_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_ENO_DATA_PKTS: label<->jump loop detected " "w/no xfers", tnf_opaque, ixl_cmdp, ixlcurp->next_ixlp); continue; } /* link current IXL's xfer_ctl to next xfer IXL on exec path */ xferctl_curp->execp = ixlexecnext; /* * if callbacks have been seen during execution path scan, * set interrupt enable flag into last descriptor of last * descriptor block of current IXL xfer cmd */ if (callback_cnt != 0) { hcidescp = (hci1394_desc_t *) xferctl_curp->dma[repcnt - 1].dma_descp; acc_hdl = xferctl_curp->dma[repcnt - 1].dma_buf->bi_handle; temp = ddi_get32(acc_hdl, &hcidescp->hdr); temp |= DESC_INTR_ENBL; ddi_put32(acc_hdl, &hcidescp->hdr, temp); } /* * obtain dma bound addr of next exec path IXL xfer command, * if any */ dma_execnext_addr = 0; if (ixlexecnext != NULL) { xferctl_nxtp = (hci1394_xfer_ctl_t *) ixlexecnext->compiler_privatep; dma_execnext_addr = xferctl_nxtp->dma[0].dma_bound; } else { /* * If this is last descriptor (next == NULL), then * make sure the interrupt bit is enabled. This * way we can ensure that we are notified when the * descriptor chain processing has come to an end. */ hcidescp = (hci1394_desc_t *) xferctl_curp->dma[repcnt - 1].dma_descp; acc_hdl = xferctl_curp->dma[repcnt - 1].dma_buf->bi_handle; temp = ddi_get32(acc_hdl, &hcidescp->hdr); temp |= DESC_INTR_ENBL; ddi_put32(acc_hdl, &hcidescp->hdr, temp); } /* * set jump address of final cur IXL xfer cmd to addr next * IXL xfer cmd */ hcidescp = (hci1394_desc_t *) xferctl_curp->dma[repcnt - 1].dma_descp; acc_hdl = xferctl_curp->dma[repcnt - 1].dma_buf->bi_handle; ddi_put32(acc_hdl, &hcidescp->branch, dma_execnext_addr); /* * if a composite object, forward link initial jump * dma addresses */ for (ii = 0; ii < repcnt - 1; ii++) { hcidescp = (hci1394_desc_t *) xferctl_curp->dma[ii].dma_descp; acc_hdl = xferctl_curp->dma[ii].dma_buf->bi_handle; ddi_put32(acc_hdl, &hcidescp->branch, xferctl_curp->dma[ii + 1].dma_bound); } /* * fill in skip address(es) for all descriptor blocks belonging * to current IXL xfer command; note:skip addresses apply only * to xmit mode commands */ if ((ixlopcode & IXL1394_OPF_ONXMIT) != 0) { /* first obtain and set skip mode information */ wvp->ixl_setskipmode_cmdp = xferctl_curp->skipmodep; hci1394_set_xmit_skip_mode(wvp); /* * if skip to label,init dma bound addr to be * 1st xfer cmd after label */ dma_skiplabel_addr = 0; if ((wvp->skipmode == IXL1394_SKIP_TO_LABEL) && (wvp->skipxferp != NULL)) { xferctl_nxtp = (hci1394_xfer_ctl_t *) wvp->skipxferp->compiler_privatep; dma_skiplabel_addr = xferctl_nxtp->dma[0].dma_bound; } /* * set skip addrs for each descriptor blk at this * xfer start IXL cmd */ for (ii = 0; ii < repcnt; ii++) { switch (wvp->skipmode) { case IXL1394_SKIP_TO_LABEL: /* set dma bound address - label */ dma_skip_addr = dma_skiplabel_addr; break; case IXL1394_SKIP_TO_NEXT: /* set dma bound address - next */ if (ii < repcnt - 1) { dma_skip_addr = xferctl_curp-> dma[ii + 1].dma_bound; } else { dma_skip_addr = dma_execnext_addr; } break; case IXL1394_SKIP_TO_SELF: /* set dma bound address - self */ dma_skip_addr = xferctl_curp->dma[ii].dma_bound; break; case IXL1394_SKIP_TO_STOP: default: /* set dma bound address - stop */ dma_skip_addr = 0; break; } /* * determine address of first descriptor of * current descriptor block by adjusting addr of * last descriptor of current descriptor block */ hcidescp = ((hci1394_desc_t *) xferctl_curp->dma[ii].dma_descp); acc_hdl = xferctl_curp->dma[ii].dma_buf->bi_handle; /* * adjust by count of descriptors in this desc * block not including the last one (size of * descriptor) */ hcidescp -= ((xferctl_curp->dma[ii].dma_bound & DESC_Z_MASK) - 1); /* * adjust further if the last descriptor is * double sized */ if (ixlopcode == IXL1394_OP_SEND_HDR_ONLY) { hcidescp++; } /* * now set skip address into first descriptor * of descriptor block */ ddi_put32(acc_hdl, &hcidescp->branch, dma_skip_addr); } /* for */ } /* if */ } /* while */ TNF_PROBE_0_DEBUG(hci1394_finalize_all_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_finalize_cur_xfer_desc() * Build the openHCI descriptor for a packet or buffer based on info * currently collected into the working vars struct (wvp). After some * checks, this routine dispatches to the appropriate descriptor block * build (bld) routine for the packet or buf type. */ static void hci1394_finalize_cur_xfer_desc(hci1394_comp_ixl_vars_t *wvp) { uint16_t ixlopcode; uint16_t ixlopraw; TNF_PROBE_0_DEBUG(hci1394_finalize_cur_xfer_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* extract opcode from current IXL cmd (if any) */ if (wvp->ixl_cur_cmdp != NULL) { ixlopcode = wvp->ixl_cur_cmdp->ixl_opcode; ixlopraw = ixlopcode & ~IXL1394_OPF_UPDATE; } else { ixlopcode = ixlopraw = IXL1394_OP_INVALID; } /* * if no xfer descriptor block being built, perform validity checks */ if (wvp->xfer_state == XFER_NONE) { /* * error if being finalized by IXL1394_OP_LABEL or * IXL1394_OP_JUMP or if at end, and have an unapplied * IXL1394_OP_SET_TAGSYNC, IXL1394_OP_SET_SKIPMODE or * IXL1394_OP_SET_SYNCWAIT */ if ((ixlopraw == IXL1394_OP_JUMP) || (ixlopraw == IXL1394_OP_LABEL) || (wvp->ixl_cur_cmdp == NULL) || (wvp->ixl_cur_cmdp->next_ixlp == NULL)) { if ((wvp->ixl_settagsync_cmdp != NULL) || (wvp->ixl_setskipmode_cmdp != NULL) || (wvp->ixl_setsyncwait_cnt != 0)) { wvp->dma_bld_error = IXL1394_EUNAPPLIED_SET_CMD; TNF_PROBE_2( hci1394_finalize_cur_xfer_desc_set_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_UNAPPLIED_SET_CMD: " "orphaned set (no associated packet)", tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp); TNF_PROBE_0_DEBUG( hci1394_finalize_cur_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } } /* error if finalize is due to updateable jump cmd */ if (ixlopcode == IXL1394_OP_JUMP_U) { wvp->dma_bld_error = IXL1394_EUPDATE_DISALLOWED; TNF_PROBE_2(hci1394_finalize_cur_xfer_desc_upd_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EUPDATE_DISALLOWED: jumpU w/out pkt", tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp); TNF_PROBE_0_DEBUG(hci1394_finalize_cur_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } TNF_PROBE_0_DEBUG(hci1394_finalize_cur_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* no error, no xfer */ return; } /* * finalize current xfer descriptor block being built */ /* count IXL xfer start command for descriptor block being built */ wvp->ixl_xfer_st_cnt++; /* * complete setting of cache flush evaluation flags; flags will already * have been set by updateable set cmds and non-start xfer pkt cmds */ /* now set cache flush flag if current xfer start cmnd is updateable */ if ((wvp->ixl_cur_xfer_stp->ixl_opcode & IXL1394_OPF_UPDATE) != 0) { wvp->xfer_hci_flush |= UPDATEABLE_XFER; } /* * also set cache flush flag if xfer being finalized by * updateable jump cmd */ if ((ixlopcode == IXL1394_OP_JUMP_U) != 0) { wvp->xfer_hci_flush |= UPDATEABLE_JUMP; } /* * Determine if cache flush required before building next descriptor * block. If xfer pkt command and any cache flush flags are set, * hci flush needed. * If buffer or special xfer command and xfer command is updateable or * an associated set command is updateable, hci flush is required now. * If a single-xfer buffer or special xfer command is finalized by * updateable jump command, hci flush is required now. * Note: a cache flush will be required later, before the last * descriptor block of a multi-xfer set of descriptor blocks is built, * if this (non-pkt) xfer is finalized by an updateable jump command. */ if (wvp->xfer_hci_flush != 0) { if (((wvp->ixl_cur_xfer_stp->ixl_opcode & IXL1394_OPTY_XFER_PKT_ST) != 0) || ((wvp->xfer_hci_flush & (UPDATEABLE_XFER | UPDATEABLE_SET | INITIATING_LBL)) != 0)) { if (hci1394_flush_hci_cache(wvp) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG( hci1394_finalize_cur_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } } } /* * determine which kind of descriptor block to build based on * xfer state - hdr only, skip cycle, pkt or buf. */ switch (wvp->xfer_state) { case XFER_PKT: if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) != 0) { hci1394_bld_recv_pkt_desc(wvp); } else { hci1394_bld_xmit_pkt_desc(wvp); } break; case XFER_BUF: if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) != 0) { if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) != 0) { hci1394_bld_recv_buf_fill_desc(wvp); } else { hci1394_bld_recv_buf_ppb_desc(wvp); } } else { hci1394_bld_xmit_buf_desc(wvp); } break; case XMIT_HDRONLY: case XMIT_NOPKT: hci1394_bld_xmit_hdronly_nopkt_desc(wvp); break; default: /* internal compiler error */ TNF_PROBE_2(hci1394_finalize_cur_xfer_desc_internal_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_INTERNAL_ERROR: invalid state", tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp); wvp->dma_bld_error = IXL1394_EINTERNAL_ERROR; } /* return if error */ if (wvp->dma_bld_error != 0) { TNF_PROBE_0_DEBUG(hci1394_finalize_cur_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } /* * if was finalizing IXL jump cmd, set compiler_privatep to * cur xfer IXL cmd */ if (ixlopraw == IXL1394_OP_JUMP) { wvp->ixl_cur_cmdp->compiler_privatep = (void *)wvp->ixl_cur_xfer_stp; } /* if cur xfer IXL initiated by IXL label cmd, set flag in xfer_ctl */ if (wvp->ixl_cur_labelp != NULL) { ((hci1394_xfer_ctl_t *) (wvp->ixl_cur_xfer_stp->compiler_privatep))->ctl_flags |= XCTL_LABELLED; wvp->ixl_cur_labelp = NULL; } /* * set any associated IXL set skipmode cmd into xfer_ctl of * cur xfer IXL cmd */ if (wvp->ixl_setskipmode_cmdp != NULL) { ((hci1394_xfer_ctl_t *) (wvp->ixl_cur_xfer_stp->compiler_privatep))->skipmodep = wvp->ixl_setskipmode_cmdp; } /* set no current xfer start cmd */ wvp->ixl_cur_xfer_stp = NULL; /* set no current set tag&sync, set skipmode or set syncwait commands */ wvp->ixl_settagsync_cmdp = NULL; wvp->ixl_setskipmode_cmdp = NULL; wvp->ixl_setsyncwait_cnt = 0; /* set no currently active descriptor blocks */ wvp->descriptors = 0; /* reset total packet length and buffers count */ wvp->xfer_pktlen = 0; wvp->xfer_bufcnt = 0; /* reset flush cache evaluation flags */ wvp->xfer_hci_flush = 0; /* set no xmit descriptor block being built */ wvp->xfer_state = XFER_NONE; TNF_PROBE_0_DEBUG(hci1394_finalize_cur_xfer_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_bld_recv_pkt_desc() * Used to create the openHCI dma descriptor block(s) for a receive packet. */ static void hci1394_bld_recv_pkt_desc(hci1394_comp_ixl_vars_t *wvp) { hci1394_xfer_ctl_t *xctlp; caddr_t dma_descp; uint32_t dma_desc_bound; uint32_t wait_for_sync; uint32_t ii; hci1394_desc_t *wv_descp; /* shorthand to local descrpt */ TNF_PROBE_0_DEBUG(hci1394_bld_recv_pkt_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* * is error if number of descriptors to be built exceeds maximum * descriptors allowed in a descriptor block. */ if ((wvp->descriptors + wvp->xfer_bufcnt) > HCI1394_DESC_MAX_Z) { wvp->dma_bld_error = IXL1394_EFRAGMENT_OFLO; TNF_PROBE_3(hci1394_bld_recv_pkt_desc_fragment_oflo_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EFRAGMENT_OFLO", tnf_opaque, ixl_commandp, wvp->ixl_cur_xfer_stp, tnf_int, frag_count, wvp->descriptors + wvp->xfer_bufcnt); TNF_PROBE_0_DEBUG(hci1394_bld_recv_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* allocate an xfer_ctl struct, including 1 xfer_ctl_dma struct */ if ((xctlp = hci1394_alloc_xfer_ctl(wvp, 1)) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_2(hci1394_bld_recv_pkt_desc_mem_alloc_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: for xfer_ctl", tnf_opaque, ixl_commandp, wvp->ixl_cur_xfer_stp); TNF_PROBE_0_DEBUG(hci1394_bld_recv_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* * save xfer_ctl struct addr in compiler_privatep of * current IXL xfer cmd */ wvp->ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp; /* * if enabled, set wait for sync flag in first descriptor of * descriptor block */ if (wvp->ixl_setsyncwait_cnt > 0) { wvp->ixl_setsyncwait_cnt = 1; wait_for_sync = DESC_W_ENBL; } else { wait_for_sync = DESC_W_DSABL; } /* create descriptor block for this recv packet (xfer status enabled) */ for (ii = 0; ii < wvp->xfer_bufcnt; ii++) { wv_descp = &wvp->descriptor_block[wvp->descriptors]; if (ii == (wvp->xfer_bufcnt - 1)) { HCI1394_INIT_IR_PPB_ILAST(wv_descp, DESC_HDR_STAT_ENBL, DESC_INTR_DSABL, wait_for_sync, wvp->xfer_size[ii]); } else { HCI1394_INIT_IR_PPB_IMORE(wv_descp, wait_for_sync, wvp->xfer_size[ii]); } wv_descp->data_addr = wvp->xfer_bufp[ii]; wv_descp->branch = 0; wv_descp->status = (wvp->xfer_size[ii] << DESC_ST_RESCOUNT_SHIFT) & DESC_ST_RESCOUNT_MASK; wvp->descriptors++; } /* allocate and copy descriptor block to dma memory */ if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG(hci1394_bld_recv_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above function call */ return; } /* * set dma addrs into xfer_ctl structure (unbound addr (kernel virtual) * is last component) */ xctlp->dma[0].dma_bound = dma_desc_bound; xctlp->dma[0].dma_descp = dma_descp + (wvp->xfer_bufcnt - 1) * sizeof (hci1394_desc_t); xctlp->dma[0].dma_buf = &wvp->dma_currentp->mem; TNF_PROBE_0_DEBUG(hci1394_bld_recv_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_bld_recv_buf_ppb_desc() * Used to create the openHCI dma descriptor block(s) for a receive buf * in packet per buffer mode. */ static void hci1394_bld_recv_buf_ppb_desc(hci1394_comp_ixl_vars_t *wvp) { hci1394_xfer_ctl_t *xctlp; ixl1394_xfer_buf_t *local_ixl_cur_xfer_stp; caddr_t dma_descp; uint32_t dma_desc_bound; uint32_t pktsize; uint32_t pktcnt; uint32_t wait_for_sync; uint32_t ii; hci1394_desc_t *wv_descp; /* shorthand to local descriptor */ TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_ppb_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); local_ixl_cur_xfer_stp = (ixl1394_xfer_buf_t *)wvp->ixl_cur_xfer_stp; /* determine number and size of pkt desc blocks to create */ pktsize = local_ixl_cur_xfer_stp->pkt_size; pktcnt = local_ixl_cur_xfer_stp->size / pktsize; /* allocate an xfer_ctl struct including pktcnt xfer_ctl_dma structs */ if ((xctlp = hci1394_alloc_xfer_ctl(wvp, pktcnt)) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_2(hci1394_bld_recv_buf_ppb_desc_mem_alloc_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: for xfer_ctl", tnf_opaque, ixl_commandp, wvp->ixl_cur_xfer_stp); TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_ppb_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* * save xfer_ctl struct addr in compiler_privatep of * current IXL xfer cmd */ local_ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp; /* * if enabled, set wait for sync flag in first descriptor in * descriptor block */ if (wvp->ixl_setsyncwait_cnt > 0) { wvp->ixl_setsyncwait_cnt = 1; wait_for_sync = DESC_W_ENBL; } else { wait_for_sync = DESC_W_DSABL; } /* create first descriptor block for this recv packet */ /* consists of one descriptor and xfer status is enabled */ wv_descp = &wvp->descriptor_block[wvp->descriptors]; HCI1394_INIT_IR_PPB_ILAST(wv_descp, DESC_HDR_STAT_ENBL, DESC_INTR_DSABL, wait_for_sync, pktsize); wv_descp->data_addr = local_ixl_cur_xfer_stp->ixl_buf.ixldmac_addr; wv_descp->branch = 0; wv_descp->status = (pktsize << DESC_ST_RESCOUNT_SHIFT) & DESC_ST_RESCOUNT_MASK; wvp->descriptors++; /* useful debug trace info - IXL command, and packet count and size */ TNF_PROBE_3_DEBUG(hci1394_bld_recv_buf_ppb_desc_recv_buf_info, HCI1394_TNF_HAL_INFO_ISOCH, "", tnf_opaque, ixl_commandp, wvp->ixl_cur_xfer_stp, tnf_int, pkt_count, pktcnt, tnf_int, pkt_size, pktsize); /* * generate as many contiguous descriptor blocks as there are * recv pkts */ for (ii = 0; ii < pktcnt; ii++) { /* if about to create last descriptor block */ if (ii == (pktcnt - 1)) { /* check and perform any required hci cache flush */ if (hci1394_flush_end_desc_check(wvp, ii) != DDI_SUCCESS) { TNF_PROBE_1_DEBUG( hci1394_bld_recv_buf_ppb_desc_fl_error, HCI1394_TNF_HAL_INFO_ISOCH, "", tnf_int, for_ii, ii); TNF_PROBE_0_DEBUG( hci1394_bld_recv_buf_ppb_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } } /* allocate and copy descriptor block to dma memory */ if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_ppb_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } /* * set dma addrs into xfer_ctl struct (unbound addr (kernel * virtual) is last component (descriptor)) */ xctlp->dma[ii].dma_bound = dma_desc_bound; xctlp->dma[ii].dma_descp = dma_descp; xctlp->dma[ii].dma_buf = &wvp->dma_currentp->mem; /* advance buffer ptr by pktsize in descriptor block */ wvp->descriptor_block[wvp->descriptors - 1].data_addr += pktsize; } TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_ppb_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_bld_recv_buf_fill_desc() * Used to create the openHCI dma descriptor block(s) for a receive buf * in buffer fill mode. */ static void hci1394_bld_recv_buf_fill_desc(hci1394_comp_ixl_vars_t *wvp) { hci1394_xfer_ctl_t *xctlp; caddr_t dma_descp; uint32_t dma_desc_bound; uint32_t wait_for_sync; ixl1394_xfer_buf_t *local_ixl_cur_xfer_stp; TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_fill_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); local_ixl_cur_xfer_stp = (ixl1394_xfer_buf_t *)wvp->ixl_cur_xfer_stp; /* allocate an xfer_ctl struct including 1 xfer_ctl_dma structs */ if ((xctlp = hci1394_alloc_xfer_ctl(wvp, 1)) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_2(hci1394_bld_recv_buf_fill_desc_mem_alloc_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: xfer_ctl", tnf_opaque, ixl_commandp, wvp->ixl_cur_xfer_stp); TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_fill_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* * save xfer_ctl struct addr in compiler_privatep of * current IXL xfer cmd */ local_ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp; /* * if enabled, set wait for sync flag in first descriptor of * descriptor block */ if (wvp->ixl_setsyncwait_cnt > 0) { wvp->ixl_setsyncwait_cnt = 1; wait_for_sync = DESC_W_ENBL; } else { wait_for_sync = DESC_W_DSABL; } /* * create descriptor block for this buffer fill mode recv command which * consists of one descriptor with xfer status enabled */ HCI1394_INIT_IR_BF_IMORE(&wvp->descriptor_block[wvp->descriptors], DESC_INTR_DSABL, wait_for_sync, local_ixl_cur_xfer_stp->size); wvp->descriptor_block[wvp->descriptors].data_addr = local_ixl_cur_xfer_stp->ixl_buf.ixldmac_addr; wvp->descriptor_block[wvp->descriptors].branch = 0; wvp->descriptor_block[wvp->descriptors].status = (local_ixl_cur_xfer_stp->size << DESC_ST_RESCOUNT_SHIFT) & DESC_ST_RESCOUNT_MASK; wvp->descriptors++; /* check and perform any required hci cache flush */ if (hci1394_flush_end_desc_check(wvp, 0) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_fill_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } /* allocate and copy descriptor block to dma memory */ if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_fill_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } /* * set dma addrs into xfer_ctl structure (unbound addr (kernel virtual) * is last component. */ xctlp->dma[0].dma_bound = dma_desc_bound; xctlp->dma[0].dma_descp = dma_descp; xctlp->dma[0].dma_buf = &wvp->dma_currentp->mem; TNF_PROBE_0_DEBUG(hci1394_bld_recv_buf_fill_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_bld_xmit_pkt_desc() * Used to create the openHCI dma descriptor block(s) for a transmit packet. */ static void hci1394_bld_xmit_pkt_desc(hci1394_comp_ixl_vars_t *wvp) { hci1394_xfer_ctl_t *xctlp; hci1394_output_more_imm_t *wv_omi_descp; /* shorthand to local descrp */ hci1394_desc_t *wv_descp; /* shorthand to local descriptor */ caddr_t dma_descp; /* dma bound memory for descriptor */ uint32_t dma_desc_bound; uint32_t ii; TNF_PROBE_0_DEBUG(hci1394_bld_xmit_pkt_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* * is error if number of descriptors to be built exceeds maximum * descriptors allowed in a descriptor block. Add 2 for the overhead * of the OMORE-Immediate. */ if ((wvp->descriptors + 2 + wvp->xfer_bufcnt) > HCI1394_DESC_MAX_Z) { wvp->dma_bld_error = IXL1394_EFRAGMENT_OFLO; TNF_PROBE_3(hci1394_bld_xmit_pkt_desc_fragment_oflo_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EFRAGMENT_OFLO", tnf_opaque, ixl_commandp, wvp->ixl_cur_xfer_stp, tnf_int, frag_count, wvp->descriptors + 2 + wvp->xfer_bufcnt); TNF_PROBE_0_DEBUG(hci1394_bld_xmit_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* is error if total packet length exceeds 0xFFFF */ if (wvp->xfer_pktlen > 0xFFFF) { wvp->dma_bld_error = IXL1394_EPKTSIZE_MAX_OFLO; TNF_PROBE_3(hci1394_bld_xmit_pkt_desc_packet_oflo_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EPKTSIZE_MAX_OFLO", tnf_opaque, ixl_commandp, wvp->ixl_cur_xfer_stp, tnf_int, total_pktlen, wvp->xfer_pktlen); TNF_PROBE_0_DEBUG(hci1394_bld_xmit_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* allocate an xfer_ctl struct, including 1 xfer_ctl_dma struct */ if ((xctlp = hci1394_alloc_xfer_ctl(wvp, 1)) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_2(hci1394_bld_xmit_pkt_desc_mem_alloc_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: for xfer_ctl", tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp); TNF_PROBE_0_DEBUG(hci1394_bld_xmit_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* * save xfer_ctl struct addr in compiler_privatep of * current IXL xfer cmd */ wvp->ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp; /* generate values for the xmit pkt hdrs */ hci1394_set_xmit_pkt_hdr(wvp); /* * xmit pkt starts with an output more immediate, * a double sized hci1394_desc */ wv_omi_descp = (hci1394_output_more_imm_t *) (&wvp->descriptor_block[wvp->descriptors]); HCI1394_INIT_IT_OMORE_IMM(wv_omi_descp); wv_omi_descp->data_addr = 0; wv_omi_descp->branch = 0; wv_omi_descp->status = 0; wv_omi_descp->q1 = wvp->xmit_pkthdr1; wv_omi_descp->q2 = wvp->xmit_pkthdr2; wv_omi_descp->q3 = 0; wv_omi_descp->q4 = 0; wvp->descriptors += 2; /* * create the required output more hci1394_desc descriptor, then create * an output last hci1394_desc descriptor with xfer status enabled */ for (ii = 0; ii < wvp->xfer_bufcnt; ii++) { wv_descp = &wvp->descriptor_block[wvp->descriptors]; if (ii == (wvp->xfer_bufcnt - 1)) { HCI1394_INIT_IT_OLAST(wv_descp, DESC_HDR_STAT_ENBL, DESC_INTR_DSABL, wvp->xfer_size[ii]); } else { HCI1394_INIT_IT_OMORE(wv_descp, wvp->xfer_size[ii]); } wv_descp->data_addr = wvp->xfer_bufp[ii]; wv_descp->branch = 0; wv_descp->status = 0; wvp->descriptors++; } /* allocate and copy descriptor block to dma memory */ if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG(hci1394_bld_xmit_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } /* * set dma addrs into xfer_ctl structure (unbound addr (kernel virtual) * is last component (descriptor)) */ xctlp->dma[0].dma_bound = dma_desc_bound; xctlp->dma[0].dma_descp = dma_descp + (wvp->xfer_bufcnt + 1) * sizeof (hci1394_desc_t); xctlp->dma[0].dma_buf = &wvp->dma_currentp->mem; TNF_PROBE_0_DEBUG(hci1394_bld_xmit_pkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_bld_xmit_buf_desc() * Used to create the openHCI dma descriptor blocks for a transmit buffer. */ static void hci1394_bld_xmit_buf_desc(hci1394_comp_ixl_vars_t *wvp) { hci1394_xfer_ctl_t *xctlp; ixl1394_xfer_buf_t *local_ixl_cur_xfer_stp; hci1394_output_more_imm_t *wv_omi_descp; /* shorthand to local descrp */ hci1394_desc_t *wv_descp; /* shorthand to local descriptor */ caddr_t dma_descp; uint32_t dma_desc_bound; uint32_t pktsize; uint32_t pktcnt; uint32_t ii; TNF_PROBE_0_DEBUG(hci1394_bld_xmit_buf_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); local_ixl_cur_xfer_stp = (ixl1394_xfer_buf_t *)wvp->ixl_cur_xfer_stp; /* determine number and size of pkt desc blocks to create */ pktsize = local_ixl_cur_xfer_stp->pkt_size; pktcnt = local_ixl_cur_xfer_stp->size / pktsize; /* allocate an xfer_ctl struct including pktcnt xfer_ctl_dma structs */ if ((xctlp = hci1394_alloc_xfer_ctl(wvp, pktcnt)) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_2(hci1394_bld_xmit_buf_desc_mem_alloc_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: for xfer_ctl", tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp); TNF_PROBE_0_DEBUG(hci1394_bld_xmit_buf_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* * save xfer_ctl struct addr in compiler_privatep of * current IXL xfer cmd */ local_ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp; /* generate values for the xmit pkt hdrs */ wvp->xfer_pktlen = pktsize; hci1394_set_xmit_pkt_hdr(wvp); /* * xmit pkt starts with an output more immediate, * a double sized hci1394_desc */ wv_omi_descp = (hci1394_output_more_imm_t *) &wvp->descriptor_block[wvp->descriptors]; HCI1394_INIT_IT_OMORE_IMM(wv_omi_descp); wv_omi_descp->data_addr = 0; wv_omi_descp->branch = 0; wv_omi_descp->status = 0; wv_omi_descp->q1 = wvp->xmit_pkthdr1; wv_omi_descp->q2 = wvp->xmit_pkthdr2; wv_omi_descp->q3 = 0; wv_omi_descp->q4 = 0; wvp->descriptors += 2; /* follow with a single output last descriptor w/status enabled */ wv_descp = &wvp->descriptor_block[wvp->descriptors]; HCI1394_INIT_IT_OLAST(wv_descp, DESC_HDR_STAT_ENBL, DESC_INTR_DSABL, pktsize); wv_descp->data_addr = local_ixl_cur_xfer_stp->ixl_buf.ixldmac_addr; wv_descp->branch = 0; wv_descp->status = 0; wvp->descriptors++; /* * generate as many contiguous descriptor blocks as there are * xmit packets */ for (ii = 0; ii < pktcnt; ii++) { /* if about to create last descriptor block */ if (ii == (pktcnt - 1)) { /* check and perform any required hci cache flush */ if (hci1394_flush_end_desc_check(wvp, ii) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG( hci1394_bld_xmit_buf_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } } /* allocate and copy descriptor block to dma memory */ if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG(hci1394_bld_xmit_buf_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } /* * set dma addrs into xfer_ctl structure (unbound addr * (kernel virtual) is last component (descriptor)) */ xctlp->dma[ii].dma_bound = dma_desc_bound; xctlp->dma[ii].dma_descp = dma_descp + 2 * sizeof (hci1394_desc_t); xctlp->dma[ii].dma_buf = &wvp->dma_currentp->mem; /* advance buffer ptr by pktsize in descriptor block */ wvp->descriptor_block[wvp->descriptors - 1].data_addr += pktsize; } TNF_PROBE_0_DEBUG(hci1394_bld_xmit_buf_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_bld_xmit_hdronly_nopkt_desc() * Used to create the openHCI dma descriptor blocks for transmitting * a packet consisting of an isochronous header with no data payload, * or for not sending a packet at all for a cycle. * * A Store_Value openhci descriptor is built at the start of each * IXL1394_OP_SEND_HDR_ONLY and IXL1394_OP_SEND_NO_PKT command's dma * descriptor block (to allow for skip cycle specification and set skipmode * processing for these commands). */ static void hci1394_bld_xmit_hdronly_nopkt_desc(hci1394_comp_ixl_vars_t *wvp) { hci1394_xfer_ctl_t *xctlp; hci1394_output_last_t *wv_ol_descp; /* shorthand to local descrp */ hci1394_output_last_imm_t *wv_oli_descp; /* shorthand to local descrp */ caddr_t dma_descp; uint32_t dma_desc_bound; uint32_t repcnt; uint32_t ii; TNF_PROBE_0_DEBUG(hci1394_bld_xmit_hdronly_nopkt_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* determine # of instances of output hdronly/nopkt to generate */ repcnt = ((ixl1394_xmit_special_t *)wvp->ixl_cur_xfer_stp)->count; /* * allocate an xfer_ctl structure which includes repcnt * xfer_ctl_dma structs */ if ((xctlp = hci1394_alloc_xfer_ctl(wvp, repcnt)) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_2(hci1394_bld_xmit_hdronly_nopkt_desc_mem_alloc_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL EMEM_ALLOC_FAIL: for xfer_ctl", tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp); TNF_PROBE_0_DEBUG(hci1394_bld_xmit_hdronly_nopkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return; } /* * save xfer_ctl struct addr in compiler_privatep of * current IXL xfer command */ wvp->ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp; /* * create a storevalue descriptor * (will be used for skip vs jump processing) */ hci1394_set_xmit_storevalue_desc(wvp); /* * processing now based on opcode: * IXL1394_OP_SEND_HDR_ONLY or IXL1394_OP_SEND_NO_PKT */ if ((wvp->ixl_cur_xfer_stp->ixl_opcode & ~IXL1394_OPF_UPDATE) == IXL1394_OP_SEND_HDR_ONLY) { /* for header only, generate values for the xmit pkt hdrs */ hci1394_set_xmit_pkt_hdr(wvp); /* * create an output last immediate (double sized) descriptor * xfer status enabled */ wv_oli_descp = (hci1394_output_last_imm_t *) &wvp->descriptor_block[wvp->descriptors]; HCI1394_INIT_IT_OLAST_IMM(wv_oli_descp, DESC_HDR_STAT_ENBL, DESC_INTR_DSABL); wv_oli_descp->data_addr = 0; wv_oli_descp->branch = 0; wv_oli_descp->status = 0; wv_oli_descp->q1 = wvp->xmit_pkthdr1; wv_oli_descp->q2 = wvp->xmit_pkthdr2; wv_oli_descp->q3 = 0; wv_oli_descp->q4 = 0; wvp->descriptors += 2; } else { /* * for skip cycle, create a single output last descriptor * with xfer status enabled */ wv_ol_descp = &wvp->descriptor_block[wvp->descriptors]; HCI1394_INIT_IT_OLAST(wv_ol_descp, DESC_HDR_STAT_ENBL, DESC_INTR_DSABL, 0); wv_ol_descp->data_addr = 0; wv_ol_descp->branch = 0; wv_ol_descp->status = 0; wvp->descriptors++; } /* * generate as many contiguous descriptor blocks as repeat count * indicates */ for (ii = 0; ii < repcnt; ii++) { /* if about to create last descriptor block */ if (ii == (repcnt - 1)) { /* check and perform any required hci cache flush */ if (hci1394_flush_end_desc_check(wvp, ii) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG( hci1394_bld_xmit_hdronly_nopkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } } /* allocate and copy descriptor block to dma memory */ if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG( hci1394_bld_xmit_hdronly_nopkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return; } /* * set dma addrs into xfer_ctl structure (unbound addr * (kernel virtual) is last component (descriptor) */ xctlp->dma[ii].dma_bound = dma_desc_bound; xctlp->dma[ii].dma_descp = dma_descp + sizeof (hci1394_desc_t); xctlp->dma[ii].dma_buf = &wvp->dma_currentp->mem; } TNF_PROBE_0_DEBUG(hci1394_bld_xmit_hdronly_nopkt_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_bld_dma_mem_desc_blk() * Used to put a given OpenHCI descriptor block into dma bound memory. */ static int hci1394_bld_dma_mem_desc_blk(hci1394_comp_ixl_vars_t *wvp, caddr_t *dma_descpp, uint32_t *dma_desc_bound) { uint32_t dma_bound; TNF_PROBE_0_DEBUG(hci1394_bld_dma_mem_desc_blk_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* set internal error if no descriptor blocks to build */ if (wvp->descriptors == 0) { wvp->dma_bld_error = IXL1394_EINTERNAL_ERROR; TNF_PROBE_1(hci1394_bld_dma_mem_desc_blk_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_INTERNAL_ERROR: no descriptors to build"); TNF_PROBE_0_DEBUG(hci1394_bld_dma_mem_desc_blk_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_FAILURE); } /* allocate dma memory and move this descriptor block to it */ *dma_descpp = (caddr_t)hci1394_alloc_dma_mem(wvp, wvp->descriptors * sizeof (hci1394_desc_t), &dma_bound); if (*dma_descpp == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_1(hci1394_bld_dma_mem_desc_blk_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: for descriptors"); TNF_PROBE_0_DEBUG(hci1394_bld_dma_mem_desc_blk_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_FAILURE); } #ifdef _KERNEL ddi_rep_put32(wvp->dma_currentp->mem.bi_handle, (uint_t *)wvp->descriptor_block, (uint_t *)*dma_descpp, wvp->descriptors * (sizeof (hci1394_desc_t) >> 2), DDI_DEV_AUTOINCR); #else bcopy(wvp->descriptor_block, *dma_descpp, wvp->descriptors * sizeof (hci1394_desc_t)); #endif /* * convert allocated block's memory address to bus address space * include properly set Z bits (descriptor count). */ *dma_desc_bound = (dma_bound & ~DESC_Z_MASK) | wvp->descriptors; TNF_PROBE_0_DEBUG(hci1394_bld_dma_mem_desc_blk_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_SUCCESS); } /* * hci1394_set_xmit_pkt_hdr() * Compose the 2 quadlets for the xmit packet header. */ static void hci1394_set_xmit_pkt_hdr(hci1394_comp_ixl_vars_t *wvp) { uint16_t tag; uint16_t sync; TNF_PROBE_0_DEBUG(hci1394_set_xmit_pkt_hdr_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* * choose tag and sync bits for header either from default values or * from currently active set tag and sync IXL command * (clear command after use) */ if (wvp->ixl_settagsync_cmdp == NULL) { tag = wvp->default_tag; sync = wvp->default_sync; } else { tag = wvp->ixl_settagsync_cmdp->tag; sync = wvp->ixl_settagsync_cmdp->sync; wvp->ixl_settagsync_cmdp = NULL; } tag &= (DESC_PKT_TAG_MASK >> DESC_PKT_TAG_SHIFT); sync &= (DESC_PKT_SY_MASK >> DESC_PKT_SY_SHIFT); /* * build xmit pkt header - * hdr1 has speed, tag, channel number and sync bits * hdr2 has the packet length. */ wvp->xmit_pkthdr1 = (wvp->ctxtp->isospd << DESC_PKT_SPD_SHIFT) | (tag << DESC_PKT_TAG_SHIFT) | (wvp->ctxtp->isochan << DESC_PKT_CHAN_SHIFT) | (IEEE1394_TCODE_ISOCH << DESC_PKT_TCODE_SHIFT) | (sync << DESC_PKT_SY_SHIFT); wvp->xmit_pkthdr2 = wvp->xfer_pktlen << DESC_PKT_DATALEN_SHIFT; TNF_PROBE_0_DEBUG(hci1394_set_xmit_pkt_hdr_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_set_xmit_skip_mode() * Set current skip mode from default or from currently active command. * If non-default skip mode command's skip mode is skip to label, find * and set xfer start IXL command which follows skip to label into * compiler_privatep of set skipmode IXL command. */ static void hci1394_set_xmit_skip_mode(hci1394_comp_ixl_vars_t *wvp) { int err; TNF_PROBE_0_DEBUG(hci1394_set_xmit_skip_mode_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); if (wvp->ixl_setskipmode_cmdp == NULL) { wvp->skipmode = wvp->default_skipmode; wvp->skiplabelp = wvp->default_skiplabelp; wvp->skipxferp = wvp->default_skipxferp; } else { wvp->skipmode = wvp->ixl_setskipmode_cmdp->skipmode; wvp->skiplabelp = wvp->ixl_setskipmode_cmdp->label; wvp->skipxferp = NULL; if (wvp->skipmode == IXL1394_SKIP_TO_LABEL) { err = hci1394_ixl_find_next_exec_xfer(wvp->skiplabelp, NULL, &wvp->skipxferp); if (err == DDI_FAILURE) { TNF_PROBE_2(hci1394_set_xmit_skip_mode_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_ENO_DATA_PKTS: " "label<->jump loop detected for skiplabel " "w/no xfers", tnf_opaque, setskip_cmdp, wvp->ixl_setskipmode_cmdp); wvp->skipxferp = NULL; wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS; } } wvp->ixl_setskipmode_cmdp->compiler_privatep = (void *)wvp->skipxferp; } TNF_PROBE_0_DEBUG(hci1394_set_xmit_skip_mode_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_set_xmit_storevalue_desc() * Set up store_value DMA descriptor. * XMIT_HDRONLY or XMIT_NOPKT xfer states use a store value as first * descriptor in the descriptor block (to handle skip mode processing) */ static void hci1394_set_xmit_storevalue_desc(hci1394_comp_ixl_vars_t *wvp) { TNF_PROBE_0_DEBUG(hci1394_set_xmit_storevalue_desc_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); wvp->descriptors++; HCI1394_INIT_IT_STORE(&wvp->descriptor_block[wvp->descriptors - 1], wvp->storevalue_data); wvp->descriptor_block[wvp->descriptors - 1].data_addr = wvp->storevalue_bufp; wvp->descriptor_block[wvp->descriptors - 1].branch = 0; wvp->descriptor_block[wvp->descriptors - 1].status = 0; TNF_PROBE_0_DEBUG(hci1394_set_xmit_storevalue_desc_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); } /* * hci1394_set_next_xfer_buf() * This routine adds the data buffer to the current wvp list. * Returns DDI_SUCCESS or DDI_FAILURE. If DDI_FAILURE, wvp->dma_bld_error * contains the error code. */ static int hci1394_set_next_xfer_buf(hci1394_comp_ixl_vars_t *wvp, uint32_t bufp, uint16_t size) { TNF_PROBE_0_DEBUG(hci1394_set_next_xfer_buf_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* error if buffer pointer is null (size may be 0) */ if (bufp == 0) { wvp->dma_bld_error = IXL1394_ENULL_BUFFER_ADDR; TNF_PROBE_0_DEBUG(hci1394_set_next_xfer_buf_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_FAILURE); } /* count new xfer buffer */ wvp->xfer_bufcnt++; /* error if exceeds maximum xfer buffer components allowed */ if (wvp->xfer_bufcnt > HCI1394_DESC_MAX_Z) { wvp->dma_bld_error = IXL1394_EFRAGMENT_OFLO; TNF_PROBE_2(hci1394_set_next_xfer_buf_error, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EFRAGMENT_OFLO", tnf_int, frag_count, wvp->xfer_bufcnt); TNF_PROBE_0_DEBUG(hci1394_set_next_xfer_buf_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_FAILURE); } /* save xmit buffer and size */ wvp->xfer_bufp[wvp->xfer_bufcnt - 1] = bufp; wvp->xfer_size[wvp->xfer_bufcnt - 1] = size; /* accumulate total packet length */ wvp->xfer_pktlen += size; TNF_PROBE_0_DEBUG(hci1394_set_next_xfer_buf_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_SUCCESS); } /* * hci1394_flush_end_desc_check() * Check if flush required before last descriptor block of a * non-unary set generated by an xfer buff or xmit special command * or a unary set provided no other flush has already been done. * * hci flush is required if xfer is finalized by an updateable * jump command. * * Returns DDI_SUCCESS or DDI_FAILURE. If DDI_FAILURE, wvp->dma_bld_error * will contain the error code. */ static int hci1394_flush_end_desc_check(hci1394_comp_ixl_vars_t *wvp, uint32_t count) { TNF_PROBE_0_DEBUG(hci1394_flush_end_desc_check_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); if ((count != 0) || ((wvp->xfer_hci_flush & (UPDATEABLE_XFER | UPDATEABLE_SET | INITIATING_LBL)) == 0)) { if (wvp->xfer_hci_flush & UPDATEABLE_JUMP) { if (hci1394_flush_hci_cache(wvp) != DDI_SUCCESS) { TNF_PROBE_0_DEBUG( hci1394_flush_end_desc_check_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* wvp->dma_bld_error is set by above call */ return (DDI_FAILURE); } } } TNF_PROBE_0_DEBUG(hci1394_flush_end_desc_check_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_SUCCESS); } /* * hci1394_flush_hci_cache() * Sun hci controller (RIO) implementation specific processing! * * Allocate dma memory for 1 hci descriptor block which will be left unused. * During execution this will cause a break in the contiguous address space * processing required by Sun's RIO implementation of the ohci controller and * will require the controller to refetch the next descriptor block from * host memory. * * General rules for cache flush preceeding a descriptor block in dma memory: * 1. Current IXL Xfer Command Updateable Rule: * Cache flush of IXL xfer command is required if it, or any of the * non-start IXL packet xfer commands associated with it, is flagged * updateable. * 2. Next IXL Xfer Command Indeterminate Rule: * Cache flush of IXL xfer command is required if an IXL jump command * which is flagged updateable has finalized the current IXL xfer * command. * 3. Updateable IXL Set Command Rule: * Cache flush of an IXL xfer command is required if any of the IXL * "Set" commands (IXL1394_OP_SET_*) associated with the IXL xfer * command (i.e. immediately preceeding it), is flagged updateable. * 4. Label Initiating Xfer Command Rule: * Cache flush of IXL xfer command is required if it is initiated by a * label IXL command. (This is to allow both a flush of the cache and * an interrupt to be generated easily and in close proximity to each * other. This can make possible simpler more successful reset of * descriptor statuses, especially under circumstances where the cycle * of hci commands is short and/or there are no callbacks distributed * through the span of xfers, etc... This is especially important for * input where statuses must be reset before execution cycles back * again. * * Application of above rules: * Packet mode IXL xfer commands: * If any of the above flush rules apply, flush cache should be done * immediately preceeding the generation of the dma descriptor block * for the packet xfer. * Non-packet mode IXL xfer commands (including IXL1394_OP_*BUF*, * SEND_HDR_ONLY, and SEND_NO_PKT): * If Rules #1, #3 or #4 applies, a flush cache should be done * immediately before the first generated dma descriptor block of the * non-packet xfer. * If Rule #2 applies, a flush cache should be done immediately before * the last generated dma descriptor block of the non-packet xfer. * * Note: The flush cache should be done at most once in each location that is * required to be flushed no matter how many rules apply (i.e. only once * before the first descriptor block and/or only once before the last * descriptor block generated). If more than one place requires a flush, * then both flush operations must be performed. This is determined by * taking all rules that apply into account. * * Returns DDI_SUCCESS or DDI_FAILURE. If DDI_FAILURE, wvp->dma_bld_error * will contain the error code. */ static int hci1394_flush_hci_cache(hci1394_comp_ixl_vars_t *wvp) { uint32_t dma_bound; TNF_PROBE_0_DEBUG(hci1394_flush_hci_cache_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); if (hci1394_alloc_dma_mem(wvp, sizeof (hci1394_desc_t), &dma_bound) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_1(hci1394_flush_hci_cache_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: for flush_hci_cache"); TNF_PROBE_0_DEBUG(hci1394_flush_hci_cache_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_FAILURE); } TNF_PROBE_0_DEBUG(hci1394_flush_hci_cache_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (DDI_SUCCESS); } /* * hci1394_alloc_storevalue_dma_mem() * Allocate dma memory for a 1 hci component descriptor block * which will be used as the dma memory location that ixl * compiler generated storevalue descriptor commands will * specify as location to store their data value. * * Returns 32-bit bound address of allocated mem, or NULL. */ static uint32_t hci1394_alloc_storevalue_dma_mem(hci1394_comp_ixl_vars_t *wvp) { uint32_t dma_bound; TNF_PROBE_0_DEBUG(hci1394_alloc_storevalue_dma_mem_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); if (hci1394_alloc_dma_mem(wvp, sizeof (hci1394_desc_t), &dma_bound) == NULL) { wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL; TNF_PROBE_2(hci1394_bld_alloc_storevalue_dma_mem_alloc_fail, HCI1394_TNF_HAL_ERROR_ISOCH, "", tnf_string, errmsg, "IXL1394_EMEM_ALLOC_FAIL: for storevalue dma", tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp); TNF_PROBE_0_DEBUG(hci1394_alloc_storevalue_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (0); } TNF_PROBE_0_DEBUG(hci1394_alloc_storevalue_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* return bound address of allocated memory */ return (dma_bound); } /* * hci1394_alloc_xfer_ctl() * Allocate an xfer_ctl structure. */ static hci1394_xfer_ctl_t * hci1394_alloc_xfer_ctl(hci1394_comp_ixl_vars_t *wvp, uint32_t dmacnt) { hci1394_xfer_ctl_t *xcsp; TNF_PROBE_0_DEBUG(hci1394_alloc_xfer_ctl_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* * allocate an xfer_ctl struct which includes dmacnt of * xfer_ctl_dma structs */ #ifdef _KERNEL if ((xcsp = (hci1394_xfer_ctl_t *)kmem_zalloc( (sizeof (hci1394_xfer_ctl_t) + (dmacnt - 1) * sizeof (hci1394_xfer_ctl_dma_t)), KM_NOSLEEP)) == NULL) { TNF_PROBE_0_DEBUG(hci1394_alloc_xfer_ctl_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (NULL); } #else /* * This section makes it possible to easily run and test the compiler in * user mode. */ if ((xcsp = (hci1394_xfer_ctl_t *)calloc(1, sizeof (hci1394_xfer_ctl_t) + (dmacnt - 1) * sizeof (hci1394_xfer_ctl_dma_t))) == NULL) { TNF_PROBE_0_DEBUG(hci1394_alloc_xfer_ctl_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (NULL); } #endif /* * set dma structure count into allocated xfer_ctl struct for * later deletion. */ xcsp->cnt = dmacnt; /* link it to previously allocated xfer_ctl structs or set as first */ if (wvp->xcs_firstp == NULL) { wvp->xcs_firstp = wvp->xcs_currentp = xcsp; } else { wvp->xcs_currentp->ctl_nextp = xcsp; wvp->xcs_currentp = xcsp; } TNF_PROBE_0_DEBUG(hci1394_alloc_xfer_ctl_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* return allocated xfer_ctl structure */ return (xcsp); } /* * hci1394_alloc_dma_mem() * Allocates and binds memory for openHCI DMA descriptors as needed. */ static void * hci1394_alloc_dma_mem(hci1394_comp_ixl_vars_t *wvp, uint32_t size, uint32_t *dma_bound) { hci1394_idma_desc_mem_t *dma_new; hci1394_buf_parms_t parms; hci1394_buf_info_t *memp; void *dma_mem_ret; int ret; TNF_PROBE_0_DEBUG(hci1394_alloc_dma_mem_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* * if no dma has been allocated or current request exceeds * remaining memory */ if ((wvp->dma_currentp == NULL) || (size > (wvp->dma_currentp->mem.bi_cookie.dmac_size - wvp->dma_currentp->used))) { #ifdef _KERNEL /* kernel-mode memory allocation for driver */ /* allocate struct to track more dma descriptor memory */ if ((dma_new = (hci1394_idma_desc_mem_t *) kmem_zalloc(sizeof (hci1394_idma_desc_mem_t), KM_NOSLEEP)) == NULL) { TNF_PROBE_0_DEBUG(hci1394_alloc_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (NULL); } /* * if more cookies available from the current mem, try to find * one of suitable size. Cookies that are too small will be * skipped and unused. Given that cookie size is always at least * 1 page long and HCI1394_DESC_MAX_Z is much smaller than that, * it's a small price to pay for code simplicity. */ if (wvp->dma_currentp != NULL) { /* new struct is derived from current */ memp = &wvp->dma_currentp->mem; dma_new->mem = *memp; dma_new->offset = wvp->dma_currentp->offset + memp->bi_cookie.dmac_size; for (; memp->bi_cookie_count > 1; memp->bi_cookie_count--) { ddi_dma_nextcookie(memp->bi_dma_handle, &dma_new->mem.bi_cookie); if (dma_new->mem.bi_cookie.dmac_size >= size) { dma_new->mem_handle = wvp->dma_currentp->mem_handle; wvp->dma_currentp->mem_handle = NULL; dma_new->mem.bi_cookie_count--; break; } dma_new->offset += dma_new->mem.bi_cookie.dmac_size; } } /* if no luck with current buffer, allocate a new one */ if (dma_new->mem_handle == NULL) { parms.bp_length = HCI1394_IXL_PAGESIZE; parms.bp_max_cookies = OHCI_MAX_COOKIE; parms.bp_alignment = 16; ret = hci1394_buf_alloc(&wvp->soft_statep->drvinfo, &parms, &dma_new->mem, &dma_new->mem_handle); if (ret != DDI_SUCCESS) { kmem_free(dma_new, sizeof (hci1394_idma_desc_mem_t)); TNF_PROBE_0_DEBUG(hci1394_alloc_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (NULL); } /* paranoia: this is not supposed to happen */ if (dma_new->mem.bi_cookie.dmac_size < size) { hci1394_buf_free(&dma_new->mem_handle); kmem_free(dma_new, sizeof (hci1394_idma_desc_mem_t)); TNF_PROBE_0_DEBUG(hci1394_alloc_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (NULL); } dma_new->offset = 0; } #else /* user-mode memory allocation for user mode compiler tests */ /* allocate another dma_desc_mem struct */ if ((dma_new = (hci1394_idma_desc_mem_t *) calloc(1, sizeof (hci1394_idma_desc_mem_t))) == NULL) { TNF_PROBE_0_DEBUG(hci1394_alloc_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (NULL); } dma_new->mem.bi_dma_handle = NULL; dma_new->mem.bi_handle = NULL; if ((dma_new->mem.bi_kaddr = (caddr_t)calloc(1, HCI1394_IXL_PAGESIZE)) == NULL) { TNF_PROBE_0_DEBUG(hci1394_alloc_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (NULL); } dma_new->mem.bi_cookie.dmac_address = (unsigned long)dma_new->mem.bi_kaddr; dma_new->mem.bi_real_length = HCI1394_IXL_PAGESIZE; dma_new->mem.bi_cookie_count = 1; #endif /* if this is not first dma_desc_mem, link last one to it */ if (wvp->dma_currentp != NULL) { wvp->dma_currentp->dma_nextp = dma_new; wvp->dma_currentp = dma_new; } else { /* else set it as first one */ wvp->dma_currentp = wvp->dma_firstp = dma_new; } } /* now allocate requested memory from current block */ dma_mem_ret = wvp->dma_currentp->mem.bi_kaddr + wvp->dma_currentp->offset + wvp->dma_currentp->used; *dma_bound = wvp->dma_currentp->mem.bi_cookie.dmac_address + wvp->dma_currentp->used; wvp->dma_currentp->used += size; TNF_PROBE_0_DEBUG(hci1394_alloc_dma_mem_exit, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (dma_mem_ret); } /* * hci1394_is_opcode_valid() * given an ixl opcode, this routine returns B_TRUE if it is a * recognized opcode and B_FALSE if it is not recognized. * Note that the FULL 16 bits of the opcode are checked which includes * various flags and not just the low order 8 bits of unique code. */ static boolean_t hci1394_is_opcode_valid(uint16_t ixlopcode) { TNF_PROBE_0_DEBUG(hci1394_is_opcode_bad_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); /* if it's not one we know about, then it's bad */ switch (ixlopcode) { case IXL1394_OP_LABEL: case IXL1394_OP_JUMP: case IXL1394_OP_CALLBACK: case IXL1394_OP_RECV_PKT: case IXL1394_OP_RECV_PKT_ST: case IXL1394_OP_RECV_BUF: case IXL1394_OP_SEND_PKT: case IXL1394_OP_SEND_PKT_ST: case IXL1394_OP_SEND_PKT_WHDR_ST: case IXL1394_OP_SEND_BUF: case IXL1394_OP_SEND_HDR_ONLY: case IXL1394_OP_SEND_NO_PKT: case IXL1394_OP_STORE_TIMESTAMP: case IXL1394_OP_SET_TAGSYNC: case IXL1394_OP_SET_SKIPMODE: case IXL1394_OP_SET_SYNCWAIT: case IXL1394_OP_JUMP_U: case IXL1394_OP_CALLBACK_U: case IXL1394_OP_RECV_PKT_U: case IXL1394_OP_RECV_PKT_ST_U: case IXL1394_OP_RECV_BUF_U: case IXL1394_OP_SEND_PKT_U: case IXL1394_OP_SEND_PKT_ST_U: case IXL1394_OP_SEND_PKT_WHDR_ST_U: case IXL1394_OP_SEND_BUF_U: case IXL1394_OP_SET_TAGSYNC_U: case IXL1394_OP_SET_SKIPMODE_U: TNF_PROBE_1_DEBUG(hci1394_is_opcode_valid_enter, HCI1394_TNF_HAL_STACK_ISOCH, "", tnf_string, msg, "ixl opcode is valid"); TNF_PROBE_0_DEBUG(hci1394_is_opcode_bad_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (B_TRUE); default: TNF_PROBE_2(hci1394_is_opcode_valid_enter, HCI1394_TNF_HAL_STACK_ISOCH, "", tnf_string, msg, "ixl opcode is NOT valid", tnf_opaque, ixl_opcode, ixlopcode); TNF_PROBE_0_DEBUG(hci1394_is_opcode_valid_enter, HCI1394_TNF_HAL_STACK_ISOCH, ""); return (B_FALSE); } }
<reponame>marques-work/gocd /* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import _ from "lodash"; import m from "mithril"; import Stream from "mithril/stream"; import {TaskJSON, Template} from "models/admin_templates/templates"; import {EnvironmentVariableJSON} from "models/environment_variables/types"; import {PipelineStructure} from "models/internal_pipeline_structure/pipeline_structure"; import {ArtifactJSON} from "models/pipeline_configs/artifact"; import {JobJSON} from "models/pipeline_configs/job"; import {StageJSON} from "models/pipeline_configs/stage"; import {TabJSON} from "models/pipeline_configs/tab"; import {ModelWithNameIdentifierValidator} from "models/shared/name_validation"; import {PluginInfos} from "models/shared/plugin_infos_new/plugin_info"; import * as Buttons from "views/components/buttons"; import {FlashMessage, MessageType} from "views/components/flash_message"; import {CheckboxField, SelectField, SelectFieldOptions, TextField} from "views/components/forms/input_fields"; import {Tree} from "views/components/hierarchy/tree"; import {KeyValuePair} from "views/components/key_value_pair"; import {Link} from "views/components/link"; import {Modal, Size} from "views/components/modal"; import {Tabs} from "views/components/tab"; import {Table} from "views/components/table"; import styles from "views/pages/admin_templates/modals.scss"; import {TaskWidget} from "views/pages/admin_templates/task_widget"; const inflection = require("lodash-inflection"); export class CreateTemplateModal extends Modal { private readonly callback: (newTemplateName: string, basedOnPipeline?: string) => void; private readonly template: ModelWithNameIdentifierValidator; private readonly basedOnPipelineCheckbox: Stream<boolean>; private readonly selectedPipeline: Stream<string>; private readonly pipelines: string[]; constructor(pipelineStructure: PipelineStructure, callback: (newTemplateName: string, basedOnPipeline?: string) => void) { super(); this.callback = callback; this.template = new ModelWithNameIdentifierValidator(); this.basedOnPipelineCheckbox = Stream<boolean>(false); this.selectedPipeline = Stream<string>(); this.pipelines = pipelineStructure.getAllConfigPipelinesNotUsingTemplates().sort((a, b) => { return a.toLowerCase().localeCompare(b.toLowerCase()); }); } body() { return ( <div> <TextField property={this.template.name} errorText={this.template.errors().errorsForDisplay("name")} onchange={() => this.template.validate("name")} required={true} label={"Template name"}/> <CheckboxField property={this.basedOnPipelineCheckbox} label={"Extract from pipeline"} helpText={"If a pipeline is not selected, a template with a default stage and default job will be created. If a pipeline is selected, the template will use the stages from the pipeline and the pipeline itself will be modified to use this template."}/> {this.maybeShowPipelines()} </div> ); } buttons() { const disabled = _.isEmpty(this.template.name()) || this.template.errors().hasErrors() || (this.basedOnPipelineCheckbox() && _.isEmpty(this.selectedPipeline())); return [<Buttons.Primary data-test-id="button-create" disabled={disabled} onclick={this.create.bind(this)}>Create</Buttons.Primary>]; } title(): string { return "Create a new template"; } private create() { this.callback(this.template.name(), this.basedOnPipelineCheckbox() ? this.selectedPipeline() : undefined); super.close(); } private maybeShowPipelines() { if (this.basedOnPipelineCheckbox()) { return ( <SelectField property={this.selectedPipeline} label={"Pipeline"} helpText={"This pipeline will be modified to use the newly created template."}> <SelectFieldOptions items={this.pipelines} selected={this.selectedPipeline()}/> </SelectField> ); } } } export class ShowTemplateModal extends Modal { private readonly template: string; private readonly templateConfig: Stream<Template>; private readonly pluginInfos: PluginInfos; private selectedStage?: StageJSON; private selectedJob?: JobJSON; constructor(template: string, templateConfig: Stream<Template>, pluginInfos: PluginInfos) { super(Size.large); this.fixedHeight = true; this.template = template; this.templateConfig = templateConfig; this.pluginInfos = pluginInfos; this.templateConfig(); } body() { if (this.isLoading()) { return undefined; } return ( <div class={styles.parent}> <div data-test-id="stage-job-tree" class={styles.stageJobTree}> {this.templateConfig().stages.map((eachStage) => { const stageLink = ( <Link href="#" onclick={() => { this.selectStage(eachStage); return false; }}>{eachStage.name}</Link> ); return ( <Tree datum={stageLink}> {eachStage.jobs.map((eachJob) => { const jobLink = ( <Link href="#" onclick={() => { this.selectJob(eachStage, eachJob); return false; }}>{eachJob.name}</Link> ); return ( <Tree datum={jobLink}/> ); })} </Tree>); })} </div> {this.showSelection()} </div> ); } title(): string { return `Showing template ${this.template}`; } private selectStage(eachStage: StageJSON) { this.selectedStage = eachStage; this.selectedJob = undefined; } private selectJob(eachStage: StageJSON, eachJob: JobJSON) { this.selectedStage = eachStage; this.selectedJob = eachJob; } private showSelection() { if (!this.selectedJob && !this.selectedStage) { this.selectStage(this.templateConfig().stages[0]); } if (this.selectedJob) { return this.showJob(this.selectedStage!, this.selectedJob!); } return this.showStage(this.selectedStage!); } private showStage(stage: StageJSON) { const stageProperties = new Map([ ["Stage Type", stage.approval.type === "success" ? "On success" : "Manual"], ["Fetch Materials", this.yesOrNo(stage.fetch_materials)], ["Never Cleanup Artifacts", this.yesOrNo(stage.never_cleanup_artifacts)], ["Clean Working Directory", this.yesOrNo(stage.clean_working_directory)], ]); return ( <div data-test-id={`selected-stage-${stage.name}`} class={styles.stageOrJob}> Showing stage <em>{stage.name}</em> <hr/> <div class={styles.propertiesWrapper}> <KeyValuePair data={stageProperties}/> </div> <Tabs tabs={["Environment Variables", "Permissions"]} contents={ [this.environmentVariables(stage.environment_variables), this.stagePermissions(stage)]}/> </div> ); } private showJob(stage: StageJSON, job: JobJSON) { const jobProperties = new Map<string, any>([ ["Resources", _.isEmpty(job.resources) ? null : job.resources.join(", ")], ["Elastic Profile ID", job.elastic_profile_id], ["Job Timeout", (this.jobTimeout(job))], ["Run type", this.jobRunType(job)], ]); return ( <div data-test-id={`selected-job-${stage.name}-${job.name}`} class={styles.stageOrJob}> Showing job <em>{stage.name}</em> &gt; <em>{job.name}</em> <hr/> <div className={styles.propertiesWrapper}> <KeyValuePair data={jobProperties}/> </div> <Tabs tabs={["Tasks", "Artifacts", "Environment Variables", "Custom Tabs"]} contents={[this.tasks(job.tasks), this.artifacts(job.artifacts), this.environmentVariables(job.environment_variables), this.tabs( job.tabs)]}/> </div> ); } private jobTimeout(job: JobJSON) { let timeout: any; if (_.isNil(job.timeout)) { timeout = "Use server default"; } else if (job.timeout === 0) { timeout = "Never timeout"; } else { timeout = `Cancel after ${job.timeout} ${inflection.pluralize("minute", job.timeout)} of inactivity`; } return timeout; } private jobRunType(job: JobJSON) { if (job.run_instance_count === "all") { return "Run on all agents"; } else if (job.run_instance_count === 0) { return `Run on ${job.run_instance_count} agents`; } else { return `Run on 1 agent`; } } private yesOrNo(b: boolean) { return b ? "Yes" : "No"; } private environmentVariables(variables: EnvironmentVariableJSON[]) { if (_.isEmpty(variables)) { return <FlashMessage message="No environment variables have been configured." type={MessageType.info}/>; } const data = new Map(variables.map((eachVar) => { return [eachVar.name, eachVar.secure ? "******" : eachVar.value]; })); return <KeyValuePair data={data}/>; } private stagePermissions(stage: StageJSON) { const authorization = stage.approval.authorization; const data = new Map<string, m.Children>(); if (authorization) { if (authorization.users.length >= 1) { data.set("Users", authorization.users.join(", ")); } if (authorization.roles.length >= 1) { data.set("Roles", authorization.roles.join(", ")); } } if (data.size === 0) { return ( <FlashMessage message="There is no authorization configured for this stage nor its pipeline group. Only GoCD administrators can operate this stage." type={MessageType.info}/> ); } else { return <KeyValuePair data={data}/>; } } private artifacts(artifacts: ArtifactJSON[]) { if (_.isEmpty(artifacts)) { return (<FlashMessage message="No artifacts have been configured" type={MessageType.info}/>); } const artifactsGroupedByType = _.groupBy(artifacts, (eachArtifact) => eachArtifact.type); return [ this.buildArtifacts(artifactsGroupedByType.build), this.testArtifacts(artifactsGroupedByType.test), this.externalArtifacts(artifactsGroupedByType.external), ]; } private tabs(tabs: TabJSON[]) { if (_.isEmpty(tabs)) { return (<FlashMessage message="No custom tabs have been configured" type={MessageType.info}/>); } const data = tabs.map((eachTab) => { return [eachTab.name, eachTab.path]; }); return <Table headers={["Tab Name", "Path"]} data={data}/>; } private tasks(tasks: TaskJSON[]) { if (_.isEmpty(tasks)) { return (<FlashMessage message="No tasks have been configured" type={MessageType.info}/>); } return ( <div class={styles.taskList}> {tasks.map((eachTask, index) => { return ( <div data-test-id={`task-${index}`} class={styles.taskRow}> <div class={styles.taskDescription}> <TaskWidget pluginInfos={this.pluginInfos} task={eachTask}/> </div> <div class={styles.taskRunIf}> Run if {eachTask.attributes.run_if.join(", ")} </div> </div> ); })} </div> ); } private buildArtifacts(artifacts: ArtifactJSON[]) { if (_.isEmpty(artifacts)) { return <FlashMessage message="No build artifacts have been configured" type={MessageType.info}/>; } const data = artifacts.map((eachArtifact) => { return [eachArtifact.source, eachArtifact.destination]; }); return <Table caption="Build Artifacts" headers={["Source", "Destination"]} data={data}/>; } private testArtifacts(artifacts: ArtifactJSON[]) { if (_.isEmpty(artifacts)) { return <FlashMessage message="No test artifacts have been configured" type={MessageType.info}/>; } const data = artifacts.map((eachArtifact) => { return [eachArtifact.source, eachArtifact.destination]; }); return <Table caption="Test Artifacts" headers={["Source", "Destination"]} data={data}/>; } private externalArtifacts(artifacts: ArtifactJSON[]) { if (_.isEmpty(artifacts)) { return <FlashMessage message="No external artifacts have been configured" type={MessageType.info}/>; } return [ <div>External Artifacts</div>, artifacts.map((eachArtifact) => { return this.externalArtifact(eachArtifact); }) ]; } private externalArtifact(artifact: ArtifactJSON) { const artifactInfo = new Map([["Artifact ID", artifact.artifact_id], ["Store ID", artifact.store_id]]); const artifactConfig = new Map(artifact.configuration!.map((eachConfig) => { return [eachConfig.key, eachConfig.value || "******"]; })); return ( <div> <KeyValuePair data={artifactInfo}/> Configuration: <div style="padding-left: 15px;"> <KeyValuePair data={artifactConfig}/> </div> </div> ); } }
Staff from the OAE, Student Housing, and Stanford Dining will hold two drop-in information sessions in April, prior to the 2019 Disability Draw deadline. If you plan to participate in the 2019 Disability Draw, please review these important dates and deadlines. Stanford University has a strong commitment to maintaining a diverse and stimulating academic community, representing a broad spectrum of talents and experiences. Students with disabilities, actively participating in the various aspects of life at Stanford, are an essential part of that diversity.
Peak Oxygen Uptake Recovery Delay After Maximal Exercise in Patients With Heart Failure Peak oxygen consumption recovery delay (Vo2peak RD) is measured as the time until post-exercise oxygen consumption (Vo2) decreases below Vo2peak RD following maximal cardiopulmonary exercise testing. Prolonged Vo2peak RDRD following exercise is associated with markers of greater disease severity in patients with heart failure. Purpose: Peak oxygen uptake recovery delay (Vo2peakRD), measured as the time until post-exercise oxygen uptake (Vo2) decreases below Vo2peak following maximal cardiopulmonary exercise testing (CPX), has been recognized as an abnormal response, associated with reduced cardiac output reserve during exercise in patients with heart failure (HF). In the current study we examined the association of Vo2peakRD during routine CPX testing of patients with symptomatic HF across a wide range of left ventricular ejection fraction (LVEF) values with clinical biomarkers. Methods: In this retrospective study, 80 clinically stable symptomatic HF patients across a wide range of LVEF at our institution were evaluated that put forth a minimally acceptable effort during CPX testing (respiratory exchange ratio ≥ 1.00). The Vo2peakRD was measured in 10-sec intervals following maximal CPX testing. Markers of elevated cardiac filling pressures (N-terminal pro-brain natriuretic peptide and echocardio-Doppler E/e') and other key CPX parameters were explored for their association with Vo2peakRD. Results: The mean Vo2peakRD and Vo2peak were 10 (interquartile range 10, 40) sec and 13.9 (11.6, 16.4) mL kg−1 min−1, respectively. Vo2peakRD demonstrated a positive linear trend with serum NTproBNP levels and E/e' (TJT = 1239.500, z = 2.634, P <.01; TJT = 1081.000, z = 2.046, P =.04, respectively). Conclusion: Prolonged Vo2peakRD following exercise is associated with markers of greater disease severity in patients with HF.
def ball_start(event): global roll if ball.x == START_X and ball.y == START_Y and roll <= 3: vy = 4 while True: vy += GRAVITY ball.move(VX, vy) pause(DELAY) if ball.y+ball.height >= window.height or vy == 0: vy = -vy*REDUCE elif ball.x > window.width: roll += 1 break window.add(ball, START_X, START_Y)
<commit_before><commit_msg>Add test file for user functions. <commit_after>from okcupyd.db import user def test_have_messaged_before(T): message_thread_model = T.factory.message_thread() assert user.have_messaged_by_username( message_thread_model.initiator.handle, message_thread_model.respondent.handle ) assert user.have_messaged_by_username( message_thread_model.respondent.handle, message_thread_model.initiator.handle ) assert not user.have_messaged_by_username('a', 'b') assert not user.have_messaged_by_username( message_thread_model.respondent.handle, 'a' ) T.factory.user('b') assert not user.have_messaged_by_username( 'b', message_thread_model.initiator.handle )
A Multiple-Loss Dual-Output Convolutional Neural Network for Fashion Class Classification An improved multi-loss multi-output convolutional neural network method was deployed to extract features from a given set of disjointed data (Fashion and Color) with diverse convolutional chunks in a single network. The first convolution block extracts features from the first image dataset (Fashion) and determines the classes to which they belong. The second block is responsible for learning the information encoded in the second set of data (color), classify and append such to the features extracted from the first convolutional block. Each block possesses its loss function which makes the network a multi-loss convolutional neural network. A set of double fully connected output heads are generated at the network terminal; enabling the network to perform predictions on a combination of disjointed labels. To validate the classification ability of our network model, we conducted several experiments with different network parameters and variations of data sizes and obtained remarkable classification results of 98 and 95 on the fashion and color sets respectively.
The Klu Klux Klan is coming to the Hamptons. The hate group — which says it has some 2,500 members on Long Island — is planning to disrupt a Black Lives Matter rally scheduled for noon Sunday at the traffic circle near Village Hall in Westhampton Beach, according to Patch.com. Gary Monker, the Exalted Cyclops Chief Officer of the KKK’s New York chapter, told Patch on Tuesday that Black Lives Matter and the Black Panthers group aren’t who they say they are. “[They are] a contradiction,” Monker told Patch. “They always say they have peaceful protests but nothing is ever peaceful. They rape, pilfer, loot. They’re rioting and using this as an excuse to do wrong. It’s not right.” Black Lives Matter officials argued that their rallies are always peaceful. “[There has been] absolutely no violence. Where is he getting his information?,” Black Lives Matter organizer Vanessa Vascez-Corleone told Patch. Vascez-Corleone said the KKK was welcome to show up — if they have the guts. “I would like to invite them to the rally. I honestly don’t believe they’re going to show up. I hope they come out of hiding,” she told Patch. Read More
The profiling and identification of chemical components, prototypes and metabolites of Run-zao-zhi-yang capsule in rat plasma, urine and bile by an UPLC-Q-TOF/MSE -based high-throughput strategy. Run-zao-zhi-yang (RZZY) capsule, a traditional Chinese medicine formula, is popularly used for the treatment of dermatitis and eczema. However, few studies have been carried out on RZZY and its metabolites. In this study, we developed a three-step strategy to rapidly characterize the chemical constituents and metabolites of RZZY using ultra-high-performance liquid chromatography coupled with quadrupole time-of-flight mass spectrometry. A total of 41 chemical components were characterized from RZZY. Among these, there are 11 flavonoids, six alkaloids, six stilbene glycosides, five anthraquinones and 13 other compounds. In addition, 18 prototypes and 35 metabolites were detected in rat plasma, urine and bile. This study offers an applicable approach for high-throughput profiling and identification of chemical components and metabolites derived from traditional Chinese medicine formula in vivo, and also provides essential data for exploring bioactive ingredients and action mechanisms of RZZY.
Positron Emission Tomographic Imaging of Iodine 124 AntiProstate Stem Cell AntigenEngineered Antibody Fragments in LAPC-9 TumorBearing Severe Combined Immunodeficiency Mice The humanized antibody (hu1G8) has been shown to localize to prostate stem cell antigen (PSCA) and image PSCA-positive xenografts. We previously constructed hu1G8 anti-PSCA antibody fragments and tested them for tumor targeting and the ability to image prostate cancer at early and late time points postinjection by positron emission tomography (PET). We now then compare the PET imaging and the radioactivity accumulation properties in prostate cancer tumors and nontarget tissues to determine the superior 124I-labeled hu1G8 antibody format. 124I-labeled diabody, minibody, scFv-Fc, scFv-Fc double mutant (DM), and parental IgG were administered into severe combined immunodeficiency (SCID) mice bearing LAPC-9 xenografts and followed by whole-body PET imaging of mice at preselected time points. Regions of interest were manually drawn around tumor and nontarget tissues and evaluated for radioactivity accumulation. The 124I-hu1G8 IgG has its best time point for tumor high-contrast imaging at 168 hours postinjection. The 124I-hu1G8 minibody at 44 hours postinjection results in superior tumor high-contrast imaging compared to the other antibody formats. The 124I-hu1G8 minibody at 44 hours postinjection also has comparable percent tumor radioactivity compared to 124I-hu1G8 IgG at 168 hours postinjection. The 124I-hu1G8 minibody is the best engineered hu1G8 antibody format for imaging prostate cancer.
<gh_stars>1-10 /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.ocs.indaba.tag; import com.ocs.indaba.common.Constants; import com.ocs.indaba.common.Messages; import com.ocs.indaba.common.Rights; import com.ocs.indaba.dao.UserDAO; import com.ocs.indaba.po.Cases; import com.ocs.indaba.service.AccessPermissionService; import com.ocs.indaba.service.TaskService; import com.ocs.indaba.service.ViewPermissionService; import com.ocs.indaba.util.DateUtils; import com.ocs.indaba.util.SpringContextUtil; import com.ocs.indaba.vo.*; import java.io.IOException; import java.text.MessageFormat; import java.util.Date; import java.util.List; import javax.servlet.jsp.JspException; import javax.servlet.jsp.JspWriter; import org.apache.log4j.Logger; import org.apache.taglibs.standard.lang.support.ExpressionEvaluatorManager; /** * * @author <NAME> */ public abstract class BaseHorseContentTagHandler extends BaseTagHandler { private static final Logger logger = Logger.getLogger(BaseHorseContentTagHandler.class); private static final int MAX_BLOCKS_PER_LINE = 15; private static final String TASK_DISPLAY_PATTERN = "{0} {1}"; private AccessPermissionService accessPermissionService = (AccessPermissionService) SpringContextUtil.getBean("accessPermissionService"); private ViewPermissionService viewPermissionService = (ViewPermissionService) SpringContextUtil.getBean("viewPermissionService"); private TaskService taskService = (TaskService) SpringContextUtil.getBean("taskService"); private UserDAO userDao = (UserDAO) SpringContextUtil.getBean("userDao"); /* * attribute for access */ protected int prjid = 0; protected int uid = 0; private int blocks = 0; protected void outputHorse(JspWriter out, ActiveHorseView horseView) throws IOException { if (horseView == null) { return; } blocks = 0; StringBuilder sBuf = new StringBuilder(); out.print("<tr>"); // Content name out.print("<td>"); out.print("&nbsp;&nbsp;&nbsp;&nbsp;"); String contentName = getContentName(horseView.getTargetName(), horseView.getProductName()); sBuf.append(contentName); String contentDeailUrl = getContentUrl(horseView.getContentType(), horseView.getHorseId()); boolean showUrl = true; if (horseView.getWorkflowObjectStatus() == Constants.WORKFLOW_OBJECT_STATUS_WAITING) { showUrl = false; } else { // LIGHTHOUSE-94: check overall right READ_CONTENT_DETAILS if (horseView.isUserJoinedIn()) { showUrl = accessPermissionService.isPermitted(prjid, uid, Rights.READ_CONTENT_DETAILS); } else { showUrl = accessPermissionService.isPermitted(prjid, uid, Rights.READ_CONTENT_DETAILS_OF_OTHERS); } } // accessPermissionService.isPermitted(prjid, uid, "display content"); /* * if (showUrl) { sBuf.append("<a * href='").append(contentDeailUrl).append("'>"). * append(contentName).append("</a>"); } else { * sBuf.append(contentName); } * */ //if (horseView.getWorkflowObjectStatus() == Constants.WORKFLOW_OBJECT_STATUS_SUSPENDED) { if (horseView.getWorkflowObjectStatus() < 0) { // Suspended workflow object sBuf.append("&nbsp;&nbsp;<img width='15' src='./images/stop_sign.png' alt='"). append(getI18nMessage(Messages.KEY_COMMON_REMARK_SUSPENDED)).append("'/>"); } out.print(sBuf.toString()); out.print("</td>"); // Goals out.print(" <td valign='top'>"); List<SequenceObjectView> sequences = horseView.getSequences(); if (sequences != null) { int seqId = 0; for (SequenceObjectView seqObj : sequences) { ++seqId; sBuf.setLength(0);// clear buffer appendSequence(sBuf, seqObj, seqId, horseView.getHorseId()); //sBuf.append("<br/>"); //sBuf.append("<span class='blk_blank_green'/>"); out.print(sBuf.toString()); } out.flush(); sequences.clear(); sequences = null; } out.print("</td>"); if (showUrl) { out.print("<td align='center'><a href='" + contentDeailUrl + "' title='" + getI18nMessage(Messages.KEY_COMMON_ALT_VIEWCONTENT) + "'>" + "<img src='images/view.png' alt='" + getI18nMessage(Messages.KEY_COMMON_ALT_VIEWCONTENT) + "'/></a></td>"); } else { out.print("<td></td>"); } // History Chart if (horseView.getWorkflowObjectStatus() == Constants.WORKFLOW_OBJECT_STATUS_WAITING) { out.print("<td align='center'><img src='images/chart-bw.png' alt='" + getI18nMessage(Messages.KEY_COMMON_ALT_HISTORYCHART) + "'/></td>"); } else { out.print("<td align='center'><a href='#loadChart' onclick='loadChart(\"" + horseView.getHorseId() + "\")' title='" + getI18nMessage(Messages.KEY_COMMON_ALT_HISTORYCHART) + "'>" + "<img src='images/chart.png' alt='" + getI18nMessage(Messages.KEY_COMMON_ALT_HISTORYCHART) + "'/></a></td>"); } // Goal out.print("<td nowrap>"); String durTime = DateUtils.date2Str(horseView.getDueTime(), DateUtils.DEFAULT_DATE_FORMAT_2); if (durTime == null || "".equals(durTime)) { durTime = "--"; } out.print(durTime); out.print("</td>"); // Estimation out.print("<td nowrap>"); String estTime = DateUtils.date2Str(horseView.getEstimationTime(), DateUtils.DEFAULT_DATE_FORMAT_2); if (estTime == null || "".equals(estTime)) { estTime = "--"; } out.print(estTime); out.print("</td></tr>"); //________________ Next Step AssignedTask task = horseView.getActiveTask(); if (task == null) { out.print("<td colspan='5'><span class='small_txt' style='color: gray'>&nbsp;&nbsp;&nbsp;&nbsp;" + getI18nMessage(Messages.KEY_COMMON_MSG_NEXT_STEP) + ": -- </span></td></tr>"); out.flush(); return; } UserDisplay userDisplay = viewPermissionService.getUserDisplayOfProject(prjid, Constants.DEFAULT_VIEW_MATRIX_ID, uid, task.getAssignedUserId()); String displayName = (userDisplay != null) ? userDisplay.getDisplayUsername() : userDao.selectUserById(task.getAssignedUserId()).getUsername(); if ((task != null) && (task.getAssignedUserId() == uid)) { displayName = " " + getI18nMessage(Messages.KEY_COMMON_LABEL_YOU) + " "; } out.print("<td colspan='5'>"); out.print("<span class='small_txt'>&nbsp;&nbsp;&nbsp;&nbsp;" + getI18nMessage(Messages.KEY_COMMON_MSG_NEXT_STEP)); sBuf.setLength(0); if (displayName == null) { sBuf.append(" <span class='red_small_txt'>"). append(super.getI18nMessage(Messages.KEY_COMMON_MSG_NO_ASSIGN, task.getProductName())).append("'</span>"); } else { String userLink = null; if (userDisplay != null && userDisplay.getPermission() != Constants.VIEW_PERMISSION_NONE) { userLink = (" <a href='profile.do?targetUid=") + task.getAssignedUserId() + "'>" + displayName + ("</a>"); } else { userLink = (" <span style='color: gray'>") + displayName + ("</span>"); } sBuf.append(MessageFormat.format(getI18nMessage(Messages.KEY_COMMON_LABEL_WILLDO), userLink, "'" + task.getTaskName() + "'")); } out.print(sBuf.toString()); out.print("</span></tr>"); out.flush(); } protected void outputHorseDetails(JspWriter out, ActiveHorseView horseView) throws IOException { outputHorseDetails(out, horseView, true); } protected void outputHorseDetails(JspWriter out, ActiveHorseView horseView, boolean checkUid) throws IOException { if (horseView == null) { return; } blocks = 0; StringBuilder sBuf = new StringBuilder(); out.print("<tr>"); // Content name out.print("<td>"); String contentName = getContentName(horseView.getTargetName(), horseView.getProductName()); sBuf.append("<span>").append(contentName).append("</span>"); String contentDeailUrl = getContentUrl(horseView.getContentType(), horseView.getHorseId()); boolean showUrl = true; if (horseView.getWorkflowObjectStatus() == Constants.WORKFLOW_OBJECT_STATUS_WAITING) { showUrl = false; } else if (checkUid) { if (horseView.isUserJoinedIn()) { showUrl = accessPermissionService.isPermitted(prjid, uid, Rights.READ_CONTENT_DETAILS); } else { showUrl = accessPermissionService.isPermitted(prjid, uid, Rights.READ_CONTENT_DETAILS_OF_OTHERS); } } // accessPermissionService.isPermitted(prjid, uid, "display content"); /* * if (showUrl) { sBuf.append("<a * href='").append(contentDeailUrl).append("'>"). * append(contentName).append("</a>"); } else { * sBuf.append("<span>").append(contentName).append("</span>"); } * */ if (horseView.getWorkflowObjectStatus() < 0) { // " + getI18nMessage(KEY_BASEHORSECONTENT_TITLE_SUSPEND) + " workflow object sBuf.append("&nbsp;&nbsp;<img width='15' src='./images/stop_sign.png' alt='"). append(getI18nMessage(Messages.KEY_COMMON_REMARK_SUSPENDED)).append("'/>"); } out.print(sBuf.toString()); out.print("</td>"); // Goal's Status out.print(" <td>"); List<SequenceObjectView> sequences = horseView.getSequences(); if (sequences != null) { int seqId = 0; for (SequenceObjectView seqObj : sequences) { ++seqId; sBuf.setLength(0);// clear buffer appendSequence(sBuf, seqObj, seqId, horseView.getHorseId()); out.print(sBuf.toString()); } out.flush(); sequences.clear(); sequences = null; } out.print("</td>"); // Done out.print("<td>"); int percentage = (int) (horseView.getCompleted() * 100); if (percentage < 0) { percentage = 0; } out.print(percentage + "%"); out.print("</td>"); if (showUrl) { out.print("<td align='center'><a href='" + contentDeailUrl + "' title='view the content'><img src='images/view.png' alt='" + getI18nMessage(Messages.KEY_COMMON_ALT_VIEWCONTENT) + "'/></a></td>"); } else { out.print("<td></td>"); } // " + getI18nMessage(Messages.KEY_COMMON_ALT_HISTORYCHART) + "(if the horse is not started, keep it unclickable.) if (horseView.getWorkflowObjectStatus() == Constants.WORKFLOW_OBJECT_STATUS_WAITING) { out.print("<td align='center'><img src='images/chart-bw.png' alt='" + getI18nMessage(Messages.KEY_COMMON_ALT_HISTORYCHART) + "'/></td>"); } else { out.print("<td align='center'><a href='#loadChart' onclick='loadChart(\"" + horseView.getHorseId() + "\")' title='" + getI18nMessage(Messages.KEY_COMMON_ALT_HISTORYCHART) + "'><img src='images/chart.png' alt='" + getI18nMessage(Messages.KEY_COMMON_ALT_HISTORYCHART) + "'/></a></td>"); } // Next Due out.print("<td>"); AssignedTask nextTask = horseView.getActiveTask(); String dueTime = "--"; if (nextTask != null) { Date startTime = nextTask.getStartTime(); if (nextTask.getDurTime() != null) { dueTime = DateUtils.date2Str(nextTask.getDurTime(), DateUtils.DEFAULT_DATE_FORMAT_2); } else { if (startTime == null) { startTime = new Date(); } dueTime = DateUtils.date2Str(DateUtils.nextIntervalDays(startTime, nextTask.getDuration()), DateUtils.DEFAULT_DATE_FORMAT_2); } } out.print(dueTime); out.print("</td>"); // Open Cases out.print("<td>"); sBuf.setLength(0);// clear buffer List<Cases> openCases = horseView.getOpenCases(); if (openCases == null || openCases.size() <= 0) { sBuf.append("--"); } else { Cases caze = null; for (int i = 0, n = openCases.size(); i < n; ++i) { caze = openCases.get(i); sBuf.append("<a href='casedetail.do?caseid=").append(caze.getId()). append("' title='").append(caze.getTitle()).append("'>#"). append(caze.getId()).append("</a>"); if (i != n - 1) { sBuf.append(", "); } } } out.print(sBuf.toString()); out.print("</td>"); // People Assigned out.print("<td class='td-cls-people-assigned'>"); sBuf.setLength(0);// clear buffer List<UserDisplay> userDisplays = horseView.getPeopleAssigned(); if (userDisplays == null || userDisplays.size() <= 0) { sBuf.append("--"); } else { UserDisplay ud = null; for (int i = 0, n = userDisplays.size(); i < n; ++i) { ud = userDisplays.get(i); if (ud.getPermission() == Constants.ACCESS_PERMISSION_NO) { sBuf.append(ud.getDisplayUsername()); } else { sBuf.append("<a href='profile.do?targetUid="). append(ud.getUserId()).append("' title='"). append(ud.getDisplayUsername()).append("'>"). append(ud.getDisplayUsername()).append("</a>"); } if (i != n - 1) { sBuf.append(", "); } } userDisplays.clear(); userDisplays = null; } out.print(sBuf.toString()); out.print("</td></tr>"); out.flush(); } /** * Get horseView content name * * @param targetName * @param productName * @return */ protected String getContentName(String targetName, String productName) { return MessageFormat.format(TASK_DISPLAY_PATTERN, targetName, productName); } /** * Get the horseView content url * * @param contentType * @param horseId * @return */ protected String getContentUrl(int contentType, int horseId) { String action = (contentType == Constants.CONTENT_TYPE_JOURNAL) ? Constants.CONTENT_JOURNAL_ACTION : Constants.CONTENT_SURVEY_ACTION; StringBuilder sBuf = new StringBuilder(); sBuf.append(action).append("?action=display&horseid=").append(horseId); return sBuf.toString(); } /** * Get the horseView content url * * @param contentType * @param horseId * @return */ protected void appendSequence(StringBuilder sBuf, SequenceObjectView seqObj, int seqId, int horseId) { List<GoalObjectView> goalObjectList = null; if (seqObj == null || (goalObjectList = seqObj.getGoalObjects()) == null) { return; } for (GoalObjectView goalObj : goalObjectList) { ++blocks; appendGoal(sBuf, goalObj, seqId, horseId); //sBuf.append("<span class='blk_blank'/>"); } } protected void appendGoal(StringBuilder sBuf, GoalObjectView goalObj, int seqId, int horseId) { Date enterTime = goalObj.getEnterTime(); long t1 = enterTime == null ? 0 : (enterTime.getTime() + goalObj.getDuration() * Constants.MILLSECONDS_PER_DAY); Date maxDuetime = taskService.getMaxDuetimeByGoalObjectId(goalObj.getGoalObjId()); long t2 = (maxDuetime == null) ? 0L : maxDuetime.getTime(); if (goalObj.getStatus() != Constants.GOAL_OBJECT_STATUS_DONE && enterTime != null && (t1 > t2 ? t1 : t2) < System.currentTimeMillis()) { goalObj.setStatus(Constants.GOAL_OBJECT_STATUS_OVERDUE); } String cssStyle = ""; switch (goalObj.getStatus()) { case Constants.GOAL_OBJECT_STATUS_WAITING: { cssStyle = "status-blk-waiting"; break; } case Constants.GOAL_OBJECT_STATUS_STARTING: { cssStyle = "status-blk-starting"; break; } case Constants.GOAL_OBJECT_STATUS_STARTED: { cssStyle = "status-blk-started"; break; } case Constants.GOAL_OBJECT_STATUS_DONE: { cssStyle = "status-blk-completed"; break; } case Constants.GOAL_OBJECT_STATUS_OVERDUE: default: { cssStyle = "status-blk-overdue"; break; } } sBuf.append("<a href='#' onclick='displayTasks("). append(horseId). append(","). append(goalObj.getGoalId()). append("); return false;' class='status-blk "). append(cssStyle).append("' title='").append(goalObj.getGoalName()).append("'>").append("</a>"); //sBuf.append("<a href = '#' onclick='return false;' class='status-blk ").append(cssStyle).append("' title='").append(goalObj.getGoalName()).append("'>").append(seqId).append("</a>"); if (blocks > MAX_BLOCKS_PER_LINE) { blocks = 0; sBuf.append("<br/>"); } } /** * @param prjid the prjid to set */ public void setPrjid(Object prjid) { try { this.prjid = (Integer) ExpressionEvaluatorManager.evaluate("prjid", prjid.toString(), Integer.class, this, pageContext); } catch (JspException ex) { logger.error("set prjid value error" + ex); } } /** * @param uid the uid to set */ public void setUid(Object uid) { try { this.uid = (Integer) ExpressionEvaluatorManager.evaluate("uid", uid.toString(), Integer.class, this, pageContext); } catch (JspException ex) { logger.error("set uid value error" + ex); } } }
<filename>tests/test_cli.py from half_life.cli.main import main def test_main(): main()
import useWith from 'ramda/src/useWith'; import React, { FC, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useDispatch, useSelector } from 'react-redux'; import { Button } from '@comp/Button'; import { Form } from '@comp/Form'; import { Input } from '@comp/Input'; import { Modal } from '@comp/Modal'; import { SyncInput } from '@comp/SyncInput'; import validator from '@helpers/validator'; import { validatorChangePasswordForm } from '@helpers/validator/form/changePassword'; import { AuthActions, UserActions } from '@store/actions'; import { getEmail } from '@store/selectors'; import { useForm } from '@use/form'; interface IFormValidatedState { oldPassword: string; newPassword: string; } const initialState = { oldPassword: '', newPassword: '', }; export const Account: FC = () => { const { t } = useTranslation(); const dispatch = useDispatch(); const email = useSelector(getEmail); const [isOpenModal, setIsOpenModal] = useState<boolean>(false); const handleClick = () => { setIsOpenModal(true); }; const handlePositive = () => { }; const handleNegative = () => { setIsOpenModal(false); }; const handleClose = () => { setIsOpenModal(false); }; const { handleChange, handleSubmit, handleBlur, values, errors, touches, } = useForm<IFormValidatedState>( initialState, useWith(dispatch, [AuthActions.effect.changePassword]), validatorChangePasswordForm, ); return ( <> <h1 className="settings__title">{t('Account')}</h1> <div> <SyncInput type="email" name="email" label={t('Email')} isLight initialValue={email} action={UserActions.effect.updateEmail} validator={validator.email({ max: 64 })} style={{ marginBottom: 20 }} /> <div className="input"> <div className="input__wrapper"> <div className="input__inner"> <span className="input__label">{t('Password')}</span> <div className="input__holder"> <Button type="button" isMaxWidth onClick={handleClick} > {t('Change password')} </Button> </div> </div> </div> </div> </div> <Modal isOpen={isOpenModal} negative={t('Cancel')} positive={t('Change Password')} onPositive={handlePositive} onNegative={handleNegative} onClose={handleClose} type="submit" renderWrapper={(children) => ( <Form handleSubmit={handleSubmit}> {children} </Form> )} > <h1 className="dialog__title"> {t('Change password')} </h1> <Input type="password" placeholder={t('Old password')} touched={touches.oldPassword} error={errors.oldPassword} name="oldPassword" value={values.oldPassword} onChange={handleChange} onBlur={handleBlur} isLight /> <Input type="password" placeholder={t('New password')} touched={touches.newPassword} error={errors.newPassword} name="newPassword" value={values.newPassword} onChange={handleChange} onBlur={handleBlur} isLight /> </Modal> </> ); };
<reponame>pzibang/pzibang-learn_openvx<gh_stars>0 /* * Copyright (c) 2012-2016 The Khronos Group Inc. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and/or associated documentation files (the * "Materials"), to deal in the Materials without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Materials, and to * permit persons to whom the Materials are furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Materials. * * MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS * KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS * SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT * https://www.khronos.org/registry/ * * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. */ /*! * \file * \brief The OpenMP Target Interface * \author <NAME> <<EMAIL>> */ #include <VX/vx.h> #include <VX/vx_helper.h> #include <vx_internal.h> #include <vx_interface.h> static const vx_char name[VX_MAX_TARGET_NAME] = "khronos.openmp"; /*! \brief List of Kernel supported by OpenMP Target. */ static vx_kernel_description_t *target_kernels[] = { &lut_kernel, }; /*! \brief Declares the number of base supported kernels. * \ingroup group_implementation */ static vx_uint32 num_target_kernels = dimof(target_kernels); /******************************************************************************/ /* EXPORTED FUNCTIONS */ /******************************************************************************/ vx_status vxTargetInit(vx_target target) { if (target) { strncpy(target->name, name, VX_MAX_TARGET_NAME); target->priority = VX_TARGET_PRIORITY_OPENMP; } return ownInitializeTarget(target, target_kernels, num_target_kernels); } vx_status vxTargetDeinit(vx_target target) { return ownDeinitializeTarget(target); } vx_status vxTargetSupports(vx_target target, vx_char targetName[VX_MAX_TARGET_NAME], vx_char kernelName[VX_MAX_KERNEL_NAME], #if defined(EXPERIMENTAL_USE_VARIANTS) vx_char variantName[VX_MAX_VARIANT_NAME], #endif vx_uint32 *pIndex) { vx_status status = VX_ERROR_NOT_SUPPORTED; if (strncmp(targetName, name, VX_MAX_TARGET_NAME) == 0 || strncmp(targetName, "default", VX_MAX_TARGET_NAME) == 0) { vx_uint32 k = 0u; for (k = 0u; k < VX_INT_MAX_KERNELS; k++) { vx_char targetKernelName[VX_MAX_KERNEL_NAME]; vx_char *kernel; vx_char def[8] = "default"; #if defined(EXPERIMENTAL_USE_VARIANTS) vx_char *variant; #endif strncpy(targetKernelName, target->kernels[k].name, VX_MAX_KERNEL_NAME); kernel = strtok(targetKernelName, ":"); if (kernel == NULL) kernel = def; #if defined(EXPERIMENTAL_USE_VARIANTS) variant = strtok(NULL, ":"); if (variant == NULL) variant = def; #endif if (strncmp(kernelName, kernel, VX_MAX_KERNEL_NAME) == 0 #if defined(EXPERIMENTAL_USE_VARIANTS) && strncmp(variantName, variant, VX_MAX_VARIANT_NAME) == 0 #endif ) { status = VX_SUCCESS; if (pIndex) *pIndex = k; break; } } } return status; } vx_action vxTargetProcess(vx_target target, vx_node_t *nodes[], vx_size startIndex, vx_size numNodes) { vx_action action = VX_ACTION_CONTINUE; vx_status status = VX_SUCCESS; vx_size n = 0; for (n = startIndex; (n < (startIndex + numNodes)) && (action == VX_ACTION_CONTINUE); n++) { VX_PRINT(VX_ZONE_GRAPH,"Executing Kernel %s:%d in Nodes[%u] on target %s\n", nodes[n]->kernel->name, nodes[n]->kernel->enumeration, n, nodes[n]->base.context->targets[nodes[n]->affinity].name); ownStartCapture(&nodes[n]->perf); status = nodes[n]->kernel->function((vx_node)nodes[n], (vx_reference *)nodes[n]->parameters, nodes[n]->kernel->signature.num_parameters); nodes[n]->executed = vx_true_e; nodes[n]->status = status; ownStopCapture(&nodes[n]->perf); VX_PRINT(VX_ZONE_GRAPH,"kernel %s returned %d\n", nodes[n]->kernel->name, status); if (status == VX_SUCCESS) { /* call the callback if it is attached */ if (nodes[n]->callback) { action = nodes[n]->callback((vx_node)nodes[n]); VX_PRINT(VX_ZONE_GRAPH,"callback returned action %d\n", action); } } else { action = VX_ACTION_ABANDON; VX_PRINT(VX_ZONE_ERROR, "Abandoning Graph due to error (%d)!\n", status); } } return action; } vx_status vxTargetVerify(vx_target target, vx_node_t *node) { vx_status status = VX_SUCCESS; return status; } vx_kernel vxTargetAddKernel(vx_target target, vx_char name[VX_MAX_KERNEL_NAME], vx_enum enumeration, vx_kernel_f func_ptr, vx_uint32 numParams, vx_kernel_validate_f validate, vx_kernel_input_validate_f input, vx_kernel_output_validate_f output, vx_kernel_initialize_f initialize, vx_kernel_deinitialize_f deinitialize) { vx_uint32 k = 0u; vx_kernel_t *kernel = NULL; for (k = 0; k < VX_INT_MAX_KERNELS; k++) { kernel = &(target->kernels[k]); if (kernel->enabled == vx_false_e) { ownInitializeKernel(target->base.context, kernel, enumeration, func_ptr, name, NULL, numParams, validate, input, output, initialize, deinitialize); VX_PRINT(VX_ZONE_KERNEL, "Reserving %s Kernel[%u] for %s\n", target->name, k, kernel->name); target->num_kernels++; break; } kernel = NULL; } return (vx_kernel)kernel; }
/* * This file is part of GumTree. * * GumTree is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * GumTree is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with GumTree. If not, see <http://www.gnu.org/licenses/>. * * Copyright 2022 Jean-Rémy Falleri <[email protected]> */ package com.github.gumtreediff.client.diff.swingdiff; import com.github.gumtreediff.actions.Diff; import com.github.gumtreediff.client.diff.AbstractDiffClient; import com.github.gumtreediff.gen.TreeGenerators; import com.github.gumtreediff.io.DirectoryComparator; import com.github.gumtreediff.utils.Pair; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.io.File; import java.io.IOException; import java.nio.file.Path; public class DirectoryPanel extends JPanel implements ListSelectionListener { private final DirectoryComparator comparator; private final JList<Pair<File, File>> listModified; private final AbstractDiffClient client; public DirectoryPanel(String src, String dst, AbstractDiffClient client) { super(new GridLayout(3, 1)); this.client = client; this.comparator = new DirectoryComparator(src, dst); comparator.compare(); Pair<File, File>[] modifiedFilesArray = new Pair[comparator.getModifiedFiles().size()]; comparator.getModifiedFiles().toArray(modifiedFilesArray); listModified = new JList<>(modifiedFilesArray); listModified.setCellRenderer(new PairFileCellRenderer()); listModified.addListSelectionListener(this); listModified.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); JScrollPane panModified = new JScrollPane(listModified); this.add(panModified); File[] addedFilesArray = new File[comparator.getAddedFiles().size()]; comparator.getAddedFiles().toArray(addedFilesArray); JList<File> listAdded = new JList<>(addedFilesArray); listAdded.setSelectionModel(new DisabledItemSelectionModel()); listAdded.setBackground(new Color(0, 255, 0, 128)); listAdded.setCellRenderer(new FileCellRenderer(comparator.getDst())); JScrollPane panAdded = new JScrollPane(listAdded); this.add(panAdded); File[] deletedFilesArray = new File[comparator.getDeletedFiles().size()]; comparator.getDeletedFiles().toArray(deletedFilesArray); JList<File> listDeleted = new JList<>(deletedFilesArray); listDeleted.setBackground(new Color(255, 0, 0, 128)); listDeleted.setCellRenderer(new FileCellRenderer(comparator.getSrc())); listDeleted.setSelectionModel(new DisabledItemSelectionModel()); JScrollPane panDeleted = new JScrollPane(listDeleted); this.add(panDeleted); this.setPreferredSize(new Dimension(1024, 768)); } @Override public void valueChanged(ListSelectionEvent e) { if (e.getValueIsAdjusting()) return; Pair<File, File> files = listModified.getSelectedValue(); if (!TreeGenerators.getInstance().hasGeneratorForFile(files.first.getAbsolutePath())) return; Diff diff = null; try { diff = client.getDiff(files.first.getAbsolutePath(), files.second.getAbsolutePath()); } catch (IOException ex) { ex.printStackTrace(); } final Diff theDiff = diff; if (diff == null) return; javax.swing.SwingUtilities.invokeLater(() -> { JFrame frame = new JFrame("Diff"); frame.add(new MappingsPanel(files.first.getAbsolutePath(), files.second.getAbsolutePath(), theDiff)); frame.pack(); frame.setVisible(true); }); } private class PairFileCellRenderer extends DefaultListCellRenderer { public Component getListCellRendererComponent( JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Component res = super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); Pair<File, File> files = (Pair<File, File>) value; String fileName = comparator.getSrc().toAbsolutePath() .relativize(files.first.toPath().toAbsolutePath()).toString(); setText((fileName)); return res; } } private class FileCellRenderer extends DefaultListCellRenderer { private Path root; public FileCellRenderer(Path root) { this.root = root; } public Component getListCellRendererComponent( JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Component res = super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); File file = (File) value; String fileName = root.toAbsolutePath() .relativize(file.toPath().toAbsolutePath()).toString(); setText((fileName)); return res; } } private class DisabledItemSelectionModel extends DefaultListSelectionModel { @Override public void setSelectionInterval(int index0, int index1) { super.setSelectionInterval(-1, -1); } @Override public void addSelectionInterval(int index0, int index1) { super.setSelectionInterval(-1, -1); } } }
Lydia Gomez-Diaz finished her freshman year at Eastern Michigan University, despite having had a hard time adjusting to changes in her life after moving to the U.S. from Spain 10 months ago, but Gomez-Diaz found a home on the EMU women’s track and field team. But the hard work she put in during the season paid off. At the 2012 Outdoor Mid-American Conference Championships, Gomez-Diaz had a hammer throw of 190 feet 10 inches, which is not only a personal best, but also earned her a spot in the EMU record books. The record-setting throw also placed her on the 2012 Women’s Track and Field All-MAC Second Team. Gomez-Diaz ended her first season at the 2012 NCAA East Preliminary Round in Jacksonville, Fla. taking 41st place with a hammer throw of 169 feet 5 inches. Although she might have the technique for the hammer throw event, Gomez-Diaz admits she had a hard time learning how to compete in the weight throw event, which she didn’t participate in while in Spain. Gomez-Diaz confessed that she enjoys the hammer throw more. In the off-season, Gomez-Diaz plans on improving her technique, personal record, strength and weight throwing. One of the reasons Gomez-Diaz came to the U.S. was the incentive of having her education paid for by competing at the collegiate level. Gomez-Diaz said she lived a “normal life” in her hometown of Tarragona, Spain, which is about an hour and a half southwest of Barcelona. “I would go to school, practice and go to the beach everyday in the summer time,” she said. L’Arrabassada Beach and Playa Miragla are a couple of the beaches Gomez-Diaz might visit during her trip back home in a couple of weeks. She says life in the U.S. is similar to her life in Spain, but has subtle differences. For example, during the winter here, she went to class and practice, but then stayed indoors. In Spain, the weather is much warmer throughout the year and people go out much more during the day. Gomez-Diaz says that EMU’s new Spanish track and field athlete, Marina Manjohn Rivadulla from Escola Universitària del Maresme Mataró, used to be a teammate when they were part of Agrupación Atlética de Cataluña. She has known Rivadulla for about five years. Gomez-Diaz says she looks to help out Rivadulla ease into her first freshman season next year. Rivadulla is a sprinter and distance runner; she has set personal bests in the 400-, 800- and 1,500-meter runs. Look for the Spanish girls to be refreshed, sun-tanned and determined to break personal records at the start of the next year’s track and field indoor and outdoor seasons.
Beclin1 improves mitochondriaassociated membranes in the heart during endotoxemia Abstract Mitochondriaassociated membranes (MAMs) are essential to mitochondria. This study was to determine whether endotoxemia rearranges MAMs in the heart, and whether Beclin1 regulates this process. Wildtype mice and mice with a cardiacspecific overexpression of Beclin1 (Becn1Tg), or a heterozygous knockout of Beclin1 (Becn1 +/−) were given lipopolysaccharide (LPS) challenge. In the heart, the ultrastructure of MAMs was examined by electron microscopy and the histology evaluated by immunostaining. Additionally, MAMs were isolated by ultracentrifugation, and their content and function were quantified. The effects of Beclin1activating peptide (TBpeptide) on MAMs were also examined. Data showed that endotoxemia decreased both the total mass and the function of MAMs, and these deficiencies became worse in Becn1 +/− mice but were alleviated in Becn1Tg and TBpeptidetreated mice. Responses of myocardial MAMs to LPS and to TBpeptide were additionally examined in AC16 human cardiomyocytes. In vitro findings recaptured the effects of LPS and TBpeptide in cardiomyocytes; the challenge of LPS reduced the level and activity of MAMs, and TBpeptide attenuated this defect. Together, the results suggest a new function of Beclin1 in improving cardiac MAMs during endotoxemia, providing a mechanism for the previously identified role of Beclin1 in protection of mitochondria and cardiac function. (Becn1 +/− ) were given lipopolysaccharide (LPS) challenge. In the heart, the ultrastructure of MAMs was examined by electron microscopy and the histology evaluated by immunostaining. Additionally, MAMs were isolated by ultracentrifugation, and their content and function were quantified. The effects of Beclin-1-activating peptide (TB-peptide) on MAMs were also examined. Data showed that endotoxemia decreased both the total mass and the function of MAMs, and these deficiencies became worse in Becn1 +/− mice but were alleviated in Becn1-Tg and TB-peptide-treated mice. Responses of myocardial MAMs to LPS and to TB-peptide were additionally examined in AC16 human cardiomyocytes. In vitro findings recaptured the effects of LPS and TB-peptide in cardiomyocytes; the challenge of LPS reduced the level and activity of MAMs, and TB-peptide attenuated this defect. Together, the results suggest a new function of Beclin-1 in improving cardiac MAMs during endotoxemia, providing a mechanism for the previously identified role of Beclin-1 in protection of mitochondria and cardiac function. K E Y W O R D S beclin-1, cardiac dysfunction, LPS, MAMs, sepsis exploration of new therapeutic interventions for sepsis are in urgent need. Cardiac dysfunction is a main predictor of poor outcomes in sepsis. In the heart, mitochondria comprise about 30% of myocardial volume. 7 Research in the recent years have demonstrated that mitochondria are more than the powerhouses that generate ATP to meet energy demands. They are also involved in a plethora of important cellular processes such as inflammation, autophagy, cell death, apoptosis, and metabolism. Previously, our studies and others showed that sepsis triggers damage in mitochondria, resulting in a deficiency in ATP supply and an overproduction of mitochondria-derived danger-associated molecular patterns (DAMPs), such as mtROS and fragmented mtDNA. These harmful molecules released from damaged mitochondria become a driving force to exacerbate myocardial inflammation and cardiac dysfunction during sepsis. 9,11 Under normal physiology condition, the quality and quantity of mitochondria are tightly regulated by multiple aspects of cellular process, including mitochondrial biogenesis, the dynamic switch between fusion and fission, and recycling through mitophagy. Additional control over the mitochondrial population, distribution, and function is directly achieved by a subcellular domain termed mitochondria-associated membranes (MAMs). MAMs are the regions of close physical connection between mitochondrial outer membrane and other intracellular membranes that are mainly from the endoplasmic reticulum (ER). Mitochondrial physiology is regulated by MAMs particularly through mitochondria-ER communication in the transport of Ca 2+15 and lipids. 18 MAMs also function as a signaling hub harboring key molecules during protein sorting, ER stress, apoptosis, inflammation, and autophagy. 19,20 Aberrations in MAMs are linked to health problems with mitochondrial dysfunction as a major pathological component; the deregulation of MAMs was identified in preclinical and clinical samples of neurodegenerative diseases, diabetes, obesity, and infectious diseases. In sepsis, a devastating deterioration in cardiac mitochondria, shown by a damaged integrity in structure, deficiency in function, and an overproduction of DAMPs, incites downstream inflammation. 27 However, whether sepsis causes any defects in MAMs and whether MAMs play any pathological role in triggering sepsis-induced mitochondria damage remain unknown. Our laboratory recently investigated the role of Beclin-1 in the heart during endotoxemia induced by lipopolysaccharide (LPS), a major molecule of pathogen-associated molecular patterns released from gram-negative bacteria. 27 Beclin-1 is a key autophagy initiation factor that is universally expressed. 28,29 We detected that the specific activation of Beclin-1 improves cardiac function and limits myocardial inflammation during endotoxemia. This Beclin-1-dependent protection was associated with an improved quality control of mitochondria and a reduction in mitochondria-derived DAMPs. Results also suggest that, mechanistically, Beclin-1 removes damaged mitochondria by a selective activation of adaptive mitophagy in the heart under septic insults. As expected, the specific activation of Beclin-1, either genetically or pharmacologically, significantly improved cardiac performance under the challenge by LPS, 27 leading us to postulate that the targeted activation of autophagy factors is an effective approach to boost adaptive autophagic responses, and thus, improves outcomes in sepsis. In the study summarized in this report, we extended our investigation to examine the potential role of MAMs in the mitochondrial pathology in sepsis and to determine whether Beclin-1 possesses a regulatory power over this process. In a mouse model of endotoxemia, we obtained new findings showing that septic challenge by LPS incites losses in myocardial MAMs, and this damage was alleviated by the targeted activation of Beclin-1 either genetically or pharmacologically. | Experimental animals Wild-type (WT) C57BL/6 mice were obtained from Charles River laboratories and an in-campus mouse breeding core facility at the University of Texas Southwestern Medical Center (UTSW). All animals were conditioned inhouse for 5-6 days after arrival with commercial diet and tap water available at will. Mouse strains with a cardiacspecific overexpression of Beclin-1 under the -myosin heavy chain promoter (Becn1-Tg) 30 or haploinsufficient for beclin-1 (Becn1 +/− ) 31 were previously developed. Animal work descripted in this study was reviewed by and conducted under the oversight of UTSW institutional animal care and use committee and conformed to the National Research Council's "Guide for the Care and Use of Laboratory Animals" when establishing animal research standards. LPS-induced endotoxemia model: Male mice, 8-12 weeks old, were weighed to determine the amount of LPS (#L3012; MilliporeSigma) required to achieve indicated doses and administered intraperitoneally (i.p.) in a volume of 100 L per mouse. Sterile endotoxin-free PBS was used as a vehicle control in sham groups. In some experiments, Beclin-1activating peptide (TB-peptide), synthesized according to a published sequence 32 containing 2 mmol/L L-glutamine, 12.5% fetal bovine serum (FBS), and 1X penicillin-streptomycin (#TMS-002-C, ES-009-B, and TMS-AB2-C; MilliporeSigma) at 37°C in a humidified incubator with 5% CO 2. The medium was exchanged for fresh medium every 2-3 days. After reaching to 90% confluency, cells were dissociated with trypsin-EDTA (#SM-2003-C; MilliporeSigma) for further passage or experiments. In some experiments, cells were exposed to LPS and/ or TB-peptide in the conditions described in figures. | Preparation of cellular fractions Heart tissues were harvested, washed in PBS, snap-clamp frozen, and kept at −80°C until used. Procedures for the isolation of MAMs and mitochondria were performed at 4°C according to established protocols 33,34 with minor modifications. Briefly, tissue pieces of one mouse heart were homogenized in 1 mL IB heart buffer (220 mmol/L mannitol, 70 mmol/L sucrose, 10 mmol/L HEPES, and 1 mmol/L EGTA, pH7.4) using a Potter-Elvehjem PTFE pestle and glass homogenizer (#P7734; MilliporeSigma), which was driven by a stirrer motor with electronic speed controller (#EW-04369-10; Cole-Palmer), by 40 strokes at a speed of 1500 rpm followed by another 40 strokes at 800 rpm. Crude mitochondrial fractions were then obtained by differential centrifugation in the following two steps. First, the homogenized heart lysates were subjected to twice-repeated centrifugation at 740 g for 5 minutes to remove unbroken cells and nuclei. Second, the supernatant mixtures were centrifuged at 9000 g for 10 minutes to collect pellets. These pellets were then resuspended in freshly prepared mitochondria-resuspension buffer (MRB; 250 mmol/L mannitol, 5 mmol/L HEPES, and 0.5 mmol/L EGTA, pH7.4) and subjected to twice-repeated centrifugation at 10 000 g for 10 minutes to collect crude mitochondria. The crude mitochondria pellets were then resuspended in MRB at the ratio of 0.5 mL MRB per heart and subjected to ultracentrifugation (Sorvall MX 120 Plus Micro-Ultracentrifuge with rotor S50-ST, #50135645; Thermo Scientific) to isolate MAMs and pure mitochondria (PM) by the following three steps. First, in each 7 mL ultracentrifuge tube, 6 mL of freshly made percoll medium (225 mmol/L mannitol, 25 mmol/L HEPES (pH7.4), 1 mmol/L EGTA, and 30% percoll (v/v)) was layered with 0.5 mL of crude mitochondria resuspension and 0.5 mL of MRB, from the bottom to the top, and centrifuged at 95 000 g for 30 minutes. Fractions of mitochondria, dense bands located approximately at the bottom, and MAMs, diffused white bands located above the mitochondria, were collected. Second, the collected bands of mitochondria and MAMs were diluted 10 times with MRB and further centrifuged at 6300 g for 10 minutes. Third, mixtures of MAMs bands and mitochondria bands were centrifuged at 100 000 g for 1 hour. For fractions of MAMs, the pellets were collected and stored at −80°C until used. For fractions of mitochondria, the pellets were collected and resuspended with MRB again, followed by another two washes by centrifugation at 6300 g for 10 minutes, and the PM pellets were then collected and stored at −80°C until used. All chemicals were purchased from MilliporeSigma. | Electron microscopy 2.5 | Immunostaining Fresh heart tissues were perfused in PBS, followed by fixation in 4% paraformaldehyde, and then left in fixation buffer for 24 hours at 4°C. For dehydration, fixed tissues were first transferred to 10% sucrose/PBS for 24 hours, then to 18% sucrose/PBS for another 24 hours, and both steps were performed at 4°C. Tissue samples were embedded in OCT, sectioned at 8 m, air-dried and stored at −80°C until used. For staining, frozen slides were then thawed, rehydrated, and subjected to immunochemistry. Slides were blocked with 3% donkey serum/PBS, stained with a goat polyclonal calreticulin antibody (1:50) and a rabbit polyclonal monoclonal VDAC1 antibody (1:50; #ab4109 and ab15895; Abcam, Cambridge, MA) for 1 hour at room temperature, followed by another 1 hour incubation with Alexa Fluor 88-labeled donkey anti-rabbit IgG (1:500) and Cy3-labeled donkey anti-goat IgG (1:500; #711-546-152 and 705-166-147; Jackson ImmunoResearch). In some experiments, tissue slides were co-stained with VDAC1 and Nile red to visualize mitochondria and lipid contents. Slides were incubated with anti-VDAC1 as above, and followed by another 1 hour incubation with biotinlyated goat anti-rabbit IgG (1:400; ab64256; Abcam). The antibody signals were visualized by adding horseradish peroxidase (HRP) substrate (ImmPACT DAB substrate; SK4105; Vectror Lab), and the reaction was stopped using water after a 5-minute incubation. Each slide was then added a drop of Nile red (1:200 in 75% glycerol, diluted from stock solution of 500ug/ml in acetone; #109123; MilliporeSigma), incubated for 5 minutes, and sealed with coverslip. All slides were examined under a Nikon Eclipse Ti-E inverted microscope at 40 magnification. | Quantification of phospholipids Levels of phospholipids in mitochondria were measured with a phospholipid assay kit (#MAK122; MilliporeSigma). Most phospholipids contain one diglyceride, a phosphate group, and one choline. This assay was designed to quantify choline-containing phospholipids in samples. According to the manufacturer's protocol, fractions of mitochondria were diluted to 1-2.5 g protein per assay using the assay buffer provided. Each reaction mix was set up by adding a prepared sample or standard to phospholipid D that degrades phospholipids to release choline. The amount of choline was determined with choline oxidase and an H 2 O 2 specific dye. A colorimetric reading at wavelength 570 nm was proportional to the phospholipid concentration in the sample. Final results were calculated according to the standard curve and normalized by protein amount per sample, and measurements were performed in triplicates. | Cytotoxicity assay Cytotoxicity of AC16 cells in response to a treatment was estimated by a lactate dehydrogenase (LDH) assay kit (#ab65393; Abcam). Cells were washed and collected with fresh culture medium, and seeded at density 1.75 10 4 /100 ul/well on 96well plates. After settled on plates for overnight in a tissue culture incubator, cells were treated at conditions according to the experimental design, followed by the measurement of LDH, an enzyme marker of plasma membrane damage, according to the manufacturer's protocol. Briefly, cells were precipitated by centrifugation at 600 g for 10 minutes, cell medium was transferred to an optically clear 96-well plate and mixed with reaction buffer for 10 minutes, and the absorbance at wavelength 450 nm was measured. All measurements were performed in triplicates. | Statistical analysis All data were expressed as mean ±SEM of at least three independent experiments using 4-6 animals/group. The two tailed unpaired t tests were used for comparisons between groups. Differences were considered statistically significant as p ≤.05. MAMs and the role of Beclin-1 Recently, we discovered that enhancing Beclin-1-dependent autophagy attenuates mitochondrial damage in the heart during endotoxemia. 27 Since MAMs play essential roles to keep the fitness of mitochondrial health, we went on to determine whether LPS challenge affects the physiology of cardiac MAMs and whether Beclin-1 regulates this process. We compared the WT mice with mouse lines carrying either a cardiac-specific overexpression of Beclin-1 (Becn1-Tg) or haploinsufficiency for beclin-1 (Becn1 +/− ). Mice were given challenge of LPS at indicated doses, and the heart tissue was collected 18 hours post LPS challenge for the following examinations and. Electron microscopy was used to compare the morphology of mitochondria and their surrounding MAMs in the left ventricular wall of hearts from WT, Becn1-Tg and Becn1 +/− mice at baseline and following the challenge by LPS at 2.5, 5, and 10 mg/kg. As shown in Figure 1A, we observed LPS dose-dependent decrease in the degree of MAM formation, indicated by red arrows, in WT mice. This decrease was associated with a loss of clearly defined cristae structures of the inner mitochondrial membrane, which showed a damage in mitochondria. These disruptions were more severe in the Becn1 +/− mice; the reduction in MAMs was evident even in the sham group compared with its WT counterpart. On the contrary, Becn1-Tg mice given LPS showed near-to-normal structures of MAMs and mitochondria in the heart. The heart tissue was also examined by immunostaining using VDAC1, a marker of mitochondria, and calreticulin, a maker of ER, for the purpose of detecting the degree of mitochondria-ER tethering that indicates MAMs. As presented in Figure 1B, comparisons between sham and LPS-treated mice revealed that LPS caused a gradual reduction in the formation of MAMs, shown by the decreased amount of overlay between VDAC1 and calreticulin the in color yellow, in all three strains. Further, an increase in the levels of MAMs in Becn1-Tg mice but a decrease in Becn1 +/− mice were also evident. Taken together, these results suggest: endotoxemia F I G U R E 1 LPS-induced changes in the structure of cardiac MAMs in WT, Becn1-Tg, and Becn1 +/− mice. Mice were given indicated doses of LPS via i.p., and heart tissues were harvested 18 hours later. A, Ultrastructure of mitochondria and MAMs was observed by transmission electron microscope. Red arrows indicate the structure of MAMs. B, Heart tissue sections were co-immune-stained with the ER marker calreticulin (red) and mitochondria marker VDAC1 (green). Overlay color in yellow indicates the levels of MAMs. All images are representative of n ≥ 3 animals per group shock causes a dose-dependent decrease in MAM formation, and Beclin-1 has a function in the regulation of MAMs in the heart during endotoxemia. | LPS-dependent decrease in cardiac lipid accumulation and the role of beclin-1 While in charge of the transport of calcium and lipids between mitochondria and ER, MAMs serve as a special hub for enzymes of lipid trafficking and synthesis, such as acyl-CoA: cholesterol acyltransferase/sterol O-acyltransferase 1 (ACAT1/SOAT1), diacylglycerol O-acyltransferase 2 (DGAT2), PS synthases 1 and 2 (PSS1 and PSS2), phosphatidylethanolamine N-methyltransferase 2 (PEMT2), and fatty acid CoA ligase 4 (FACL4/ACS4). In addition, the unique heterogeneity of the MAM structure requires lipid constituents such as cholesterol and sphingolipids to support the formation of MAMs and to fortify the membrane thickness of MAMs. As such, MAMs were reported to be fortified with cholesterol and sphingolipids, which increase their thickness. 39,40 Therefore, the level of cardiac lipid accumulation is closely related to the proper formation and physiology of MAMs in the heart tissue. Our previous research showed that LPS decreases mitochondrial mass and function in the heart. 27 In this report, we compared lipid levels in relative to mitochondrial mass in the heart tissue of WT, Becn1-Tg, and Becn1 +/− mice. Tissue slides were co-stained with lipid-specific florescent dye Nile red (color red) and mitochondrial marker VDAC1 (brown). As shown in Figure 2, LPS challenge induced a dose-dependent decrease in the ratio of lipid vs mitochondria, and this decrease was evidently attenuated in the strain of Becn1-Tg mice. Significantly, downregulation of Beclin-1 in Becn1 +/− mice severely suppressed lipid accumulation in the heart, even in the group of sham controls. This observation is consistent with a previously published study showing LPS-stimulated reduction in cardiac lipid accumulation in mice. 41 It further suggests that, under both physiological condition and physiological condition of endotoxemia, Beclin-1 signaling is required to retain lipid levels in the heart, which is likely at least one of the mechanisms underlying Beclin-1-dependent support of MAMs. Our previous investigation demonstrated that LPS challenge at 5 mg/kg caused disrupted cardiac contractility in WT mice but not in Becn1-Tg mice. However, in Becn1 +/− mice, cardiac dysfunction started showing when given LPS at 2.5 mg/kg and progressed worsen at 5 mg/kg. 27 For the rest of the report, we chose to examine the features of cardiac MAMs when mice were challenged by LPS at 5 mg/kg, at which dose the most significant difference in cardiac performance was shown by the three strains of mice. 27 | BECLIN-1 preserves the mass and function of cardiac MAMs during endotoxemia To directly examine whether the insult of LPS challenge alters MAMs in the heart, and whether Beclin-1 has any regulation on this response, we quantified and compared the amount and function of MAMs in the heart tissue of WT, Becn1-Tg, and Becn1 +/− mice at baseline and following challenge by LPS. First, we validated the procedure of isolating MAMs and PM fractions by ultracentrifugation based on published protocols, 32,33 as described in the section of Methods and Materials section. The successful separation of MAMs from mitochondria was demonstrated by the detection of specific markers in each isolation ( Figure 3A). Cytochrome C, an enzyme located in the mitochondrial intermembrane space, was exclusively located in PM but not in MAMs. Mitochondrial outer membrane protein VADC1 and mitochondrial chaperone GRP75, which both form complexes with partners proteins on the ER for mitochondria-ER tethering, were detected primarily in PM. PEN2, a subunit of gamma secretase complex located on the ER membranes, was found in MAMs but not in PM. Fatty acid CoA ligase 4 (FACL4), an enzyme enriched in MAMs to facilitate lipid metabolism, was detected mainly in MAMs and not in PM. As summarized in Figure 3B, the amount of MAM isolation in the heart was quantified based on tissue weight. A toxic dose of LPS, 5 mg/kg, triggered a significant, over 40% drop in the quantity of MAMs in WT mice, but this response was sufficiently alleviated by the overexpression of Beclin-1 in Becn1-Tg mice. On the contrary, the downregulation of Beclin-1 in Becn1 +/− mice resulted in a 40% decrease in MAMs at the baseline and an additional 10% in response to LPS. A fundamental function of MAMs is to coordinate the synthesis and transport of phospholipids to other organelles such as mitochondria. 42 Because mitochondria are unable to synthesize phospholipids de novo and rely on the ER as the sole supplier, 43,44 mitochondrial phospholipid levels indicate the function of MAMs. 45 By the quantification of total phospholipids in mitochondrial fractions, we found that LPS caused a ~30% decrease in MAM function in the heart; however, this deficiency was not detected in Becn1-Tg mice ( Figure 3C). In Becn1 +/− mice, the baseline level of cardiac MAMs was about 25% lower than that of the WT mice, and LPS challenge induced a further slight reduction but not one that was statistically significant. Overall, these observations suggest that LPS induces deficiencies in cardiac MAMs, and Beclin-1 is able to control this deterioration process during endotoxemia. benefits in disease models related to reducing viral infection, 46 improving cardiac performance during pressure overload, 47 and enhancing the effectiveness of chemotherapy. 32 We have also shown that this peptide improves heart function during endotoxemia. 27 To examine whether this pharmacological approach has a potential to alleviate the damage of MAMs in endotoxemia, we compared MAM mass and function in the heart tissue of LPS-challenged mice with or without TB-peptide. As shown in Figure 4A, the administration of TB-peptide following LPS challenge attenuated an LPS-induced decline in MAM mass; the treatment minimized the LPS-induced reduction in MAM quantity from 50% to ~20% compared to the sham group. Measurement of phospholipids showed that TB-peptide significantly improved the function of MAMs in the heart tissue in endotoxemia ( Figure 4B). | TB-peptide protects mams in human cardiomyocyte challenged by LPS To further elucidate whether Beclin-1 regulates MAMs in myocardium, we examined the effects of TB-peptide on MAMs in human cardiomyocyte AC16 cells under challenge by LPS. We titrated various treatment conditions of LPS and TB-peptide and chose those under which cell toxicity was undetectable. As illustrated in Figure 4A, cells were treated under the following five conditions: (a) control; (b) LPS 185 ng/mL for 1 hour; (c) TB-peptide 5 mol/L for 1 hour; (d) TB-peptide 5 mol/L for 1 hour, after which cells were washed and cultured in control medium for 1 hour (treatment released); (e) TB-peptide 5 mol/L for 1 hour after which treatment was replaced with an LPS 185 ng/ mL challenge for 1 hour. In condition (f), cells were pretreated with TB-peptide prior to LPS challenge, and the potential interference between the two reagents was avoided. These treatment conditions had no impact on cell survival ( Figure 5B). To evaluate the peptide's effect on enhancing autophagy, cell lysates were subjected to the examination of LC3II, a marker of autophagosome formation during autophagy initiation, and p62/SQSTM1, a polyubiquitinbinding autophagy adaptor protein which decrease marks lysosomal degradation at the later phase of autophagy flux, by western blot. As shown in Figure 5C, LPS challenge stimulated an accumulation of both LC3II and p62, suggesting that LPS at the indicated dosage causes a blockage in the flow of autophagy flux. However, the treatment with TB-peptide promoted autophagic response, as shown by a significant increase in LC3II but decrease in p62. In the cells under the same conditions, double-immunostaining using mitochondrial marker Tom 20 and ER maker calnexin visualized the areas of MAMs, shown by mitochondria-ER overlay in the color yellow and indicated by white arrows (Figure 5D). The result indicates that LPS challenge caused a reduction in MAMs, and TB-peptide promoted the formation of MAMs under the normal condition and preserved the amount of MAMs in response to LPS. Moreover, TB-peptide demonstrated a similar effect F I G U R E 2 LPS-induced changes in mitochondria and lipid contents in WT, Becn1-Tg, and Becn1 +/− mice. Mice were given indicated doses of LPS via i.p., and heart tissues were harvested 18 hours later. Heart tissue sections were co-stained with mitochondria marker VDAC1 (brown) and lipid marker Nile red (red). All images are representative of n ≥ 3 animals per group when the function of MAMs was measured by the levels of mitochondrial phospholipids ( Figure 5E). These in vitro data confirmed what was observed in vivo, showing that the activation of the Beclin-1 pathway preserved MAMs in myocardium in response to endotoxemia. | DISCUSSION Studies of both clinical samples and animal models demonstrated that damage in mitochondria is one of the critical pathogenesis factors in inducing inflammation and multiorgan failure including cardiomyopathy in sepsis. Recently, our investigation showed that the targeted activation of the autophagy initiation factor Beclin-1 improves the quality control of mitochondria in the heart tissue and sequentially attenuates cardiac dysfunction in response to endotoxemia. 27 Since mitochondrial physiology is tightly supported by the surrounding subcellular domain of MAMs, we therefore went on to examine whether LPS challenge causes any deficiencies in myocardial MAMs and whether Beclin-1 is capable of controlling this process. Here we reported results obtained from a mouse model of endotoxemia showing that a toxic challenge of LPS produced a profound loss in both the mass and the function of MAMs in the heart (Figures 1-3). These defects were lessened in animals with a cardiac-specific overexpression of Beclin-1 but aggravated in those haploinsufficient for beclin-1, strongly indicating a regulatory role for Beclin-1 in maintaining proper MAMs in the cardiac tissue. Furthermore, the pharmacological activation of Beclin-1 by TB-peptide showed a beneficial effect by preserving MAM mass and function in those mice undergoing endotoxemia ( Figure 4). In vitro studies using human cardiomyocyte AC16 cells further complemented the above in vivo observations; TB-peptide was able to stimulate autophagy and to protect MAMs in response to challenge by LPS ( Figure 5). Together, these data provide evidence indicating that endotoxemia during septic shock triggers a severe damage in cardiac MAMs, which is hypothesized as one of the causative mechanisms for inducing mitochondrial deficiencies, the accumulation of mitochondrial DAMPs, and sequential cardiomyopathy in sepsis. The data also suggest that pharmacological approaches that activate Beclin-1 signaling may hold a therapeutic potential to preserve MAM properties during endotoxemia. While our previous investigation suggests that Beclin-1 protects cardiac mitochondria via the selection of a certain mitophagy pathway, 27 data presented here support that the mechanism of this Beclin-1-dependent protection may go beyond mitophagy and involve MAMs. In this report, we found F I G U R E 3 LPS-induced changes in the mass and function of cardiac MAMs in WT, Becn1-Tg, and Becn1 +/− mice. Mice were given 5 mg/kg LPS via i.p., and heart tissues were harvested 18 hours later. Fractions of pure mitochondria (PM) and MAMs were prepared by ultracentrifugation from the heart tissue. A, Successful isolation of MAMs and PM from the hearts of WT mice was demonstrated by Western blots detecting marker proteins of mitochondria and ER. B, The amount of each fraction of MAMs were measured and results were normalized by tissue weight. C, Levels of phospholipids in mitochondrial fractions were quantified and results were normalized by the amount of protein. In B and C, values are means ±SEM. Significant differences are shown as * for sham vs LPS-treated and for WT vs Becn1-Tg or Becn1 +/− groups (P <.05, n = 4-6, unpaired t test) that LPS triggers a dose-dependent reduction in the content of MAMs, starting from the lowest tested dosage of 2.5 mg/ kg (Figure 1). Our previous evaluation of myocardial mitochondria status in the same model showed that LPS-induced decrease in mitochondrial mass did not occur until the LPS challenge reached to a physiologically toxic level at 5 mg/ kg. 27 These data suggest that, compared with mitochondria, features of MAMs are more sensitive to the challenge of endotoxemia. Thus, it is hypothesized that an acute septic challenge, simulated by LPS, transduces a damage signal to the myocytes and causes a decrease in MAM content, which then results in a reduced mitochondrial mass. Our data further showed that enhancing Beclin-1 signal provides a capability to mitigate LPS-induced decrease in MAMs. Additionally, as previously shown, in the heart of LPS-challenged mice, the upregulation of Beclin-1 accumulates PTEN-induced putative kinase 1 (PINK1) and the E3 ubiquitin ligase Parkin in mitochondria, while strongly suppressing other mitophagy mediators such as the receptor protein BNIP3L and BNIP3, 27 suggesting that Beclin-1 achieves the elimination of dysfunctional mitochondria by selecting PINK1-Parkin mitophagy instead of a bulk induction of all types of mitophagy. Data presented here obtained both in vitro and in vivo suggest that Beclin-1 has a new function to maintain proper MAMs in the heart, and a specific activation of Beclin-1 signaling can attenuate an LPS-induced decline in MAM quality and quantity during endotoxemia. Other published in vitro studies using cultured cell lines such as HEK293 and Hela showed F I G U R E 4 Effects of TB-peptide on cardiac MAMs in LPS-challenged mice. WT mice were given 5 mg/kg LPS i.p., then TB-peptide, 16 mg/ kg, was administered i.p. 30 minutes post LPS challenge. Heart tissues were collected 18 h post LPS challenge, and fractions of mitochondria and MAMs were prepared by ultracentrifugation. A, The amount of each fraction of MAMs was measured and results were normalized by tissue weight. B, Levels of phospholipids in mitochondrial fractions were quantified and results were normalized by the amount of protein. All values are means ±SEM. Significant differences are shown as * for sham vs LPS-treated and # for without vs with TB-peptide (P <.05, n = 4-6, unpaired t test) that force-expressed Beclin-1 and PINK1 were accumulated in MAMs and both were required for the formation of mitochondria-ER tethering, suggesting a feedforward crosstalk between mitophagy and MAMs. Thus, based on our current studies, we speculate that Beclin-1 attenuates LPSinduced mitochondrial deficiencies in the heart through its F I G U R E 5 TB-peptide promotes autophagy and protects MAMs in human cardiomyocyte challenged by LPS. A, AC16 cells were cultured until 80%-90% confluency and treated with the conditions as illustrated. B, The percentage of cell survival was calculated based on the analysis of cytotoxicity. C, Levels of LC3II and p62 in cell lysates were examined by western blot and quantified by densitometry. GAPDH was used as a loading control. D, Cells were co-immune-stained with the ER marker calnexin (green) and mitochondria marker Tom 20 (red). Staining of nucleus by DAPI is shown in blue. Overlay areas of mitochondria-ER contact shown in the color yellow indicates the levels of MAMs and are labeled with white arrows. All images are representative of ≥3 independent experiments. E, Levels of phospholipids in mitochondrial fractions were quantified and results were normalized by the amount of protein. All values are means ±SEM. Significant differences are shown as * for sham vs LPS-treated and # for without vs with TB-peptide (p <.05, n = 3-5, unpaired t test) actions both in preserving MAMs and in inducing mitophagy. Whether the induction of PINK1-Parkin mitophagy is dependent on the integrity of MAMs in Beclin-1 signaling, or vice versa, would be important questions to pursue in follow-up studies. It remains important to address whether Beclin-1-mediated regulation of MAMs is an autophagy-dependent or -independent mechanism. Though the most recognized function of Beclin-1 is to initiate autophagy, the function of Beclin-1 in non-autophagy pathways cannot be neglected. Beclin-1 is a component of multiprotein complex of phosphatidylinositol-3-kinase (PI3K) class III, generating phosphatidylinositol-3-phosphate for membrane trafficking. 51 When together with Atg14, Beclin-1 is involved in inducing membrane elongation and maturation of both autophagosomes and phagosomes for autophagy. Alternatively, Beclin-1 may also interact with UVRAG to stimulate vesicle formation that feeds endocytic trafficking. 52 The autophagy-independent function of Beclin-1 in endosomal membrane dynamics was shown to be necessary in acquiring neuron viability and in facilitating skin development in animal models. 53,54 Since Beclin-1 was detected in MAMs, 49 it might directly support the formation of MAMs via interacting with other membrane trafficking factors such as UVRAG or the like. Further dissecting the autophagy-dependent and -independent mechanisms of Beclin-1 in MAMs and mitochondria can be accomplished by utilizing genetic models carrying a knockout and/or transgenic expression of specific downstream factors of Beclin-1, such as Atg14 and/or UVRAG. The regulation of mitochondria homeostasis by MAMs may also involve a dynamic collection of signaling molecules located at MAMs. Molecules recruited to MAMs or transported out of MAMs change constantly in response to physiological conditions or pathological challenges. A recently published proteomic study identified over 1200 proteins associated with MAMs in mouse tissue of brain and liver. 19 Identified core components of MAMs include necessary factors necessary for sufficient mitochondria-ER communication in the transport of Ca 2+15 and lipids, 18 as well as those that support mitochondrial morphology. Many others core components belong to a variety of categories including factors in autophagy, inflammation, apoptosis, ER stress, and metabolism. It has been well-accepted that mitochondria form a complex crosstalk network for these cellular functions via the generation of signaling molecules such as mitochondrial DAMPs and metabolic intermediates. 58 Therefore, changes in the dynamics of molecules located at MAMs have an extremely high likelihood of affecting the status of mitochondria. Proteomic profiling of cardiac MAMs in models of sepsis with and without Beclin-1 activation will provide a more profound understanding of the role of MAMs in sepsis-induced cardiomyopathy and their potential regulation by Beclin-1. Lastly, a delicate balance is required to control the level of MAMs as well as the space between mitochondria and ER that supports proper mitochondrial function. Animal models of obesity and clinical samples from Alzheimer's patients showed that an excessive induction of MAMs under these disease conditions is pathological, responsible for calcium overload in mitochondria and leading to compromised mitochondrial oxidative capacity and augmented oxidative stress. 21,59 As summarized in this report, we detected that endotoxemia induces a severe reduction in the mass and function of cardiac MAMs, a response that is maladaptive because it is associated with a deterioration in mitochondria and dysfunction in the heart. 27 These results indicate that impairments of MAMs are pathological during sepsis-induced cardiomyopathy. We also obtained data suggesting a new function for Beclin-1 in supporting the properties of MAMs, either by inducing the new formation of MAMs or by defending them from degradation, which thus improves cardiac mitochondria and protects the heart in response to endotoxemia. In the future, investigations to directly examine the role of MAMs in sepsis outcomes and to determine whether MAMs are essential for Beclin-1-dependent cardiac protection will unveil new knowledge about the biological function of MAMs and improve the current understanding of the Beclin-1-dependent signaling network in the heart during sepsis. An immediate question following our new findings reported here is whether and how Beclin-1 initiates the formation of MAMs in the heart. In this regard, we suspect that Beclin-1 may achieve this function via the mitochondrial GTPase mitofusin 2 (Mfn2). Mfn2 was originally identified as one of the mitochondria fusion factors but is now recognized as a key regulator in the formation of MAMs. 60,61 This protein is located at MAMs and the mitochondrial outer membrane, and physically interacts with other mitofusins or itself to bring mitochondria and the ER together. 62,63 Mfn2 is robustly expressed in the heart, 60 and a deficiency in Mfn2 expression causes fewer MAMs and more gap space between mitochondria and the ER. 64 In a preliminary study, we detected that LPS decreased the amount of Mfn2 in MAMs, and Beclin-1 is needed to keep up its level (data not shown). One of our ongoing investigations is aimed at determining the potential signaling pathway between Beclin-1 and Mfn2 in cardiac MAMs and the responses of these signaling components to endotoxemia.
def _get_stack(skip_frames: int) -> List[inspect.FrameInfo]: stack = [] skip_frames += 1 for frame_info in inspect.stack()[skip_frames:]: if "/twisted/" in frame_info.filename: continue method_name = _get_stack_frame_method_name(frame_info) if method_name == "ThreadedMemoryReactorClock.advance": break stack.append(frame_info) if frame_info.function == "wrapped_async_request_handler": break return stack[::-1]
Custer, S.D. (AP) — A Fourth of July hot dog eating contest in South Dakota turned tragic when a contestant choked to death. Custer County Sheriff Rick Wheeler tells the Rapid City Journal that 47-year-old Walter Eagle Tail, of Custer, died Thursday at a hospital after attempts to save him failed at the scene. The Custer Chamber of Commerce sponsored the contest. The group canceled a pie-eating contest scheduled for Friday. Chamber Executive Director Dave Ressler says the group is "at a loss for words." Speed-eating competitions are popular over the Fourth of July weekend. Eight-time champion Joey "Jaws" Chestnut managed to eat 61 hot dogs in 10 minutes at the annual Coney Island contest on Friday, eight short of the record 69 he swallowed in the 2013 competition.
Introducing the TRADOS Workflow Development The term used to refer to the development is "workflow", within the context of the translation and localisation industry. This paper proposes that workflow is an element of every translation production process, and that automating the workflow is not the all-embracing solution in itself. There are multiple workflows within any translation production process, and automation of the workflows must be preceded by an articulation and definition of the translation production processes currently employed and those which are emerging. Introduction The term used to refer to the development is "workflow", within the context of the translation and localisation industry. This paper proposes that workflow is an element of every translation production process, and that automating the workflow is not the all-embracing solution in itself. There are multiple workflows within any translation production process, and automation of the workflows must be preceded by an articulation and definition of the translation production processes currently employed and those which are emerging. Above all, this paper proposes that until we recognise and support those professionals involved in the translation production process who are not translating, any attempt to introduce workflow technology will ultimately contribute to the problems rather than resolving the issues. Once we achieve the definition of the translation production process, identify the workflows and recognise that translation production requires more than the art and science of translation, then we can proceed to manage the workflows in a fashion that will give our industry scalability and visibility, reducing costs whilst retaining quality. I'm going to introduce you to the workflow development by first giving you some background as to why we have such a development, followed by an articulation of the problems inherent in the translation and localisation industries. I'll outline some of the solutions which are currently proposed to resolve the problems and assist us to grow to service the requirements of today and the anticipated requirements of tomorrow. Finally, I will outline how TRADOS will support the anticipated explosive growth in our industry and assist it to expand into the globalisation arena with technology which is truly scalable. The Background I had a title for this paper and for the presentation of our new generation of technology, which was "Localisation -It's All About The Exception". This section of the paper brings us rapidly through the background of the industry and explains why exception and change management are inherent in what we do. TRADOS Background TRADOS has been developing and marketing translation technology for over fifteen years now. Our initial product offerings -and those which are at the core of our business still todaywere terminology management and translation memory database systems. The interfaces that made TRADOS as popular as it is today were chosen because they were the industry standard tools for translation (Microsoft Word and WordPerfect). TRADOS provided an interface between them and the translation memory database. We continued with that same theme by providing technology to extract text from the industry standard applications used for developing and building complex reference material (FrameMaker and Interleaf) and providing tools to assist with niche formats, such as QuarkXPress and PageMaker. We have developed tools to create a translation memory from previously translated material, and provided project management and database administration utilities within the translation memory system. Translation production has never been a simple process. Any process involving multiple roles, distributed teams, complex content and formats and constant change management will present significant challenges. Management and automation of the translation production process is one which has not yet been fully addressed by any technology in an independent fashion. We have all been busy trying to reduce the total cost of translation by reducing the amount of repetitive work involved in translation and to increase the speed at which we can produce. At TRADOS we've tried to achieve this aim by getting the translator to accept the new technology as a useful tool to assist them in their creative process. We've also focussed very strongly on the introduction and acceptance of translation memory technology as being essential to the translation and localisation industry. We are continuing to incrementally improve our translation memory system, offering it as a common technology solution in a forthcoming release. Our translation interfaces have grown to incorporate our HTML/XML/SGML and our Microsoft PowerPoint interfaces. We are about to introduce a common tag format (TRADOStag, an XML based format) for the interfaces into professional publishing packages along with utilities to make the process more effective. As pioneers of translation technology, we carry a weighty responsibility -delivering on our promise to support the professional translator with best of breed technology. Translation Memory Technology Being a pioneer is a most challenging role -identifying a need in a marketplace which has not yet been fully articulated and providing a technology solution which anticipates the requirements which have not yet been specified is a supreme challenge. Not least because it involves an element of quite some risk. TRADOS tools and technology are accepted by now as the industry standard for the professional translator. Translation memory technology has delivered significant cost, quality and time to market benefits to translators and translation managers. As the global market expands, and the projections for the number of words and type of material to be translated reach vast proportions, the requirement for the translation production process to be merged with other production and business systems within all organisations is becoming imperative. To service the needs of the "Global Marketplace" the industry must expand substantially. How will it do so and still retain a semblance of quality? How do we connect our processes with the rest of the processes used within our organisation whilst still retaining the versatility that we are required to show? Most fundamental is the problem we have of doing more in a shorter timeline at a lower cost. Translation memory technology has contributed to the scalability of the industry allowing us to grow and embrace new paradigms as rapidly as they have been presented to us. The TRADOS workflow development consolidates our technology and provides a common platform to support the complex process of producing translated material within a distributed environment with seemingly impossible deadlines. Translation Production Background Technology has moved so fast over the past 10 to 15 years it is almost difficult now to envision how translations were ever produced without the personal computer. Add the Internet into the mix of the multiple software tools now available for developing and publishing content, think about how the content to be translated has changed during the recent past from being primarily reference manuals for products with a world-wide distribution to the marketplace it is today, and gasp at the perceived diversity of process which would be necessary to deliver. In the early days of the translation memory technology systems, a translation production process was almost simple. We had the content -it was originated by the documentation group, the legal group, the marketing group. It was often originated in a word processing application such as Microsoft Word, but if it was a technical reference manual with complex illustrations a desktop publishing product such as Interleaf was used. The translation production process in the early days involved origination of the material from which text for translation was extracted (usually manual). The text was translated, the documents re-assembled, and then published. Translation was a costly business, and really only worthwhile when large volumes of text were to be translated. Early translation technology pioneers recognised that within these large volumes of text much of the content was repetitive, translation memory databases were developed to address this issue. As translation technology moved on, the cost of text extraction and subsequent document reassembly was addressed. Converters (such as the S-Tagger) or filters within other translation memory systems automated the text extraction process, facilitating easier document reassembly. The configuration of the translation production team usually reflected the simplicity of this process. The translation production team was made up of the project manager, who made the decisions about scope, schedule, budget, resources and drove the project from start to completion; the translation engineer who converted the files, set up the translation memory, built the localisation kit and was responsible for technical queries, distribution and quality assurance; the linguist who translated the content, populated the translation memories, reviewed linguistic content, edited the content and delivered completed translated files and translation memories; and the publications person who assembled and finalised the documents for publication. So what has changed? The translation production process remains substantially similar -documents for translation are originated, sent for translation, translatable text is extracted, translated and the completed documents re-assembled and published. The configuration of a typical translation team today is markedly similar to yesterday's team. What makes definition and automation of the process today so much more imperative and how is it easier to do it now than it was then? The change that has probably had the most impact on the translation production process is how material to be translated is now published and the consequent need for a tighter, more automated process. Today's material for translation is less and less the large reference manual which will be printed on paper, or even delivered on CD in PDF format (although much of this sort of material still remains and will continue to be required), which involves a long and complex translation production process. An additional major factor is the sheer volume now presented for translation. The number of words has increased massively as a total -whilst the number of words per project has reduced. More and more instantaneous translation is required, the publication methods and environments have extended to include PDF delivered over the web, HTML help, dynamic ASP pages and multimedia. Content is frequently generated on the fly. The world wide web has increased the pressure for simultaneous (or as near as possible) shipment of localised versions of products along with the original source language. Rather than investing in user guides which a user will read from start to finish, more and more publishers are turning to web-based support systems in the hopes that the material can be updated in a more costefficient manner. The production tasks within today's translation production process have also changed accordingly. More and more of the tasks are engineering and administrative, involving complex distribution chains with rapid turnaround. The Internet has also introduced a new era where collaboration will be the key word, and availability of information about a project as well as the content of a project must be distributed in a more standardised and accessible fashion. Re-purposing of information is still a major aspiration for most involved in the process of producing translated material. Re-purposing demands a new paradigm in how information is developed, stored and retrieved. Current technology has not yet delivered a solution which is universally adopted, but I don't imagine it's that far away. Current Market Background The current market for translation and localisation is sending the technology providers some very clear messages. These messages mainly centre on the requirement for a new generation of technology to reflect the changing marketplace they are adapting themselves to service. Translation technology has focussed (as we have just outlined) up until now on providing tools to automate repetitive processes and tasks for the translator. As the localisation/translation business has matured, and the translation production process has become more defined, the major businesses around the industry have been built on management of the translation production process, rather than the actual translation itself. We currently see those in the marketplace for translation production facing a number of new (and not so new) issues. First amongst these is the speed of delivery of translated material. In fairly recent times localised material shipped 6 to 8 weeks after the source language material. Quality was a higher criteria for acceptance than availability. Costs were associated mainly with the per word price of translation, rather than the management of the translation, and management was often centred at the originating site. These days the line item "project management" appears on virtually every purchase order, at varying rates from 5 to 10 percent of the total project costs. The Internet and global sales opportunities have changed the ball park quite radically. Information about availability of a product in its source language speeds its way across the world wide web in an instant and demand for the internationalised versions or models creates a significant pressure for near instantaneous delivery of the localised product. The Instant Whip Syndrome This "instant whip" syndrome has led to the increasing numbers of translators working simultaneously on the same project, increasing significantly the amount of management required. It has also led to the fragmentation of translation groups -when a project requires multiple translators to deliver in a shorter length of time, employing translators to be available full time becomes uneconomic, outside of organisations whose core business is translation (such as single language translation vendors) or whose flow of work is constant or highly specialised. Another issue facing today's translation professionals is the dispersal, collection and organisation of information. This is, after all, the information age, and the expectation on us all is the provision of more information, in a more organised fashion, more quickly available than ever. Collaboration, visibility, openness are key words of the early third millennium. Some time in the future we may all use a single format to deliver information in a standard fashion, but we currently have a myriad of choices and most have a marked preference for using different tools to deliver different messages -PDF generated from FrameMaker, MSWord, Quicksilver, QuarkXPress for user information and reference material; training material developed using AuthorWare, MacroMedia Director, Microsoft PowerPoint; on-line help as HTML or RTF; marketing literature originated in QuarkXPress, InDesign or PageMaker; web pages authored using countless "standard" applications. Specialist applications for specialised material abound, with no standard other than meta languages such as XML emerging for export purposes. The localisation industry is attempting to produce billions of words of translated material without a tool set to support the multiple formats, the shrinking timelines, the dispersed resource bank, the automated systems spitting out short segments of text for translation with an expectation of instantaneous turnaround. And we're expected to produce even more every day. Defining the Localisation Industry Issues The problems inherent in our industry are really quite simple to articulate. We have not defined the industry as either a creative or a mechanical industry. Machine translation and other technological advances are constantly snapping at our heels, distracting us from the real issues. We are rarely core business and frequently an afterthought. The industry is growing at a phenomenal rate and we're not yet mature enough to be truly scalable. This section of the paper outlines these issues. Localisation, Creative or Mechanical? We ask what this industry is built on -the creative impetus or the mechanical operation? Or, is it a combination of the two, resulting in a communications industry. When I talk about our industry being creative or mechanical, I'm not referring to the eternal debate over whether translation is art or science -let's not go there right now!. At a fundamental level, the problem for our industry is that it sits in the middle. It's both a creative process and a mechanical process which is often seen as a necessary evil by those who are paying for it. It epitomises so much that we know nothing about and therefore cannot control. We need localised product, say the strategy makers, because our partners in another part of the world won't buy unless we speak their language. We need localised product so that we can expand our marketplace to become this truly global enterprise that we're supposed to be. To get this localised product we have to rely on people we can't see in places we've never been to produce material we don't understand which will be sold by parts of our organisation we may be in competition with, to users we'll never get to know since we can't understand them who have sensitivities which are foreign to us. And we have to pay for this? Is it any wonder the localisation industry has no real centre? We're all so busy justifying our existence and fighting the communication battle. We understand somewhere within ourselves that the industry is about so much more than per-word cost these days, but few of us can translate that into a value proposition which makes sense to all. We fight the communication battle on the level of the computer taking over our lives as wellalthough perhaps these days it's less fear of the computer and more fear of the Internet. The Internet is pushing us towards translation on demand from one perspective, but from another it is actually pushing us away from it. The Internet is a communications revolution -it is changing how we do business with each other, demanding a style of communication which is open, accessible, more casual and personal. We have a creative process which is treated as a production process -almost manufacturingwe're expected to treat it as if it were a manufacturing process. But it isn't. Yes, we can define it, we can probably make it more efficient, we can make it more visible, but first we have to define what it is. At a high level localisation is the production of an adaptation of the output of a creative process. The production process for that adaptation involves the management of another creative process (translation) and subsequent production of that output. It is fundamentally reactive -we can advise, cajole, stamp our feet all we like, and many have been successful in educating those creating the source -but we will still always have to adapt that which has been created by others. Machine Translation and Other Silver Bullets Virtually all of the world is waiting for the first machine translation system that will really work. Why? So we can communicate with all of the rest of the world without spending all of this money on translation. How difficult can it really be? hmmm... Myself, I look forward immensely to the first truly effective machine translation system because it's only when it becomes available that those clamouring for it will understand that the proposition is one which can assist us to communicate with all of the rest of the world, but it will never (OK, let's never say never) replace communication -the emotions, nuances, expressions. Should such a machine translation system become available, we will see the shifting of communication methods from written to oral or visual to replace the lack of personality inherent in a mechanical translation, however grammatically and syntactically correct it is. I write little these days which is formally localised, my main localisation activity is presentations. I use the same set of slides, the same accent, the same language, the same visual aids, but the delivery of each presentation is localised to the country, region and makeup of the attendees. I slow my speech or speed it up. I use humour in some situations, never in others. I choose the words I use to ensure that my audience understands me, and I tailor my expressions and emotions according to how it feels. It's always me, it's the same presentation, but I deliver it in a localised fashion. A gentleman in the US once asked me why I hadn't localised the text in my presentation from International English to US English. He felt it got in the way for him to be puzzling over why "localise" was spelt like that rather than the correct way, "localize". Like a typing error to a proof reader reading a novel, it didn't stop him from understanding the word, but it annoyed him. My reply had to be that I hadn't localised my accent either, but didn't mind when they laughed at my pronunciation of what they call "filmmmm" as "fillum". This example is almost a trivialisation of what localisation is about, but for me it's one of the fundamental issues. As an essentially undefined process, localisation is seen/appears to be some kind of black magic. What exactly happens during a translation production process? For the more familiar, those who have been localising for decades, they have the picture. But what they can't understand is why it still costs as much today as it did 20 years ago? Don't we have more tools? Surely there is machine translation by now that will just take the stuff and spit it out in another language? Sure it's a bit quicker today, but its still a significant cost. Worse than that, we still don't have real metrics and worse again, we haven't really managed to get it into the source. We've heard of controlled language projects to try and integrate and we use translation memory technology, but can we actually see what happens in a project, as an industry? Our Core Businesses Another fundamental cornerstone of our industry is that translation is not core business for many of us. Even for TRADOS, the language architects, we are primarily software developers. Our core business is development of software and the incurring of revenue from sales of that software. Translation for us is an aid to selling the software, not an aid to developing it. It is also a statement of our marketing message. For the Multilingual Governing bodies (such as the EU, the Canadian, Irish or Welsh Governments) it's not core business. Their core business is making laws and directives. Translating the laws and directives into a form whereby they can be understood is secondary to the arrival at the decisions. Localisation is, of course, taken into account at a much earlier stage in the process, by the representatives of the different locales, in the making of the decisions. It is difficult, so, for us to justify expenditure on technology, infrastructure and resources which are a supporting proposition. Where does the localisation budget live in a company? Where does the workflow for a translation production project start? Scalability We're being told by Forrester Reports (www.forrester.com) and by industry gurus that this Globalisation business is going to grow to vast proportions very soon. I wonder how we're going to cope with it. We're not really scalable enough for that, but the pressure is being applied on us to become more scalable. We look at the issues we're already coping with today and wonder how can we do it faster to be able to support those needs that are about to descend? Where does quality end up in this scenario? How do we assure that what we are producing is still of the high quality we all signed on for? We need to build support systems for ourselves which reach across the divides of the merger mania and competitive advantage. Otherwise we will not be able to support the growth which is coming on stream. We are seeing completely new propositions such as Application Service Providers and e-Hubs approaching us over the Internet. We see how the Internet has so far contributed to the increase in the pressure for more, faster, cheaper and the ripple of nervousness tingles through the industry. We fear that we are not set up to cope with this. We fear that not only are we not set up to cope with this, we know that we must continue to support our traditional business as well -we see the technology stocks tumble and pull out our contact management systems to look out the traditional customer who has built his business on more than e-hype. But we also know that there is yet another new format coming down the line and we must be able to support it. The paradigm has to change and we must be part of changing the paradigm. We need to accept that we've got a creative process which has mechanical demands placed upon it. We have a process that's by its nature going to be spread across a distributed team and one which is utterly dependent on another very creative process -the development of product and product usage, sales, marketing and support material. We have pressure to deliver faster, to reduce costs, to ensure high quality and to expand to embrace twice as much next year as this. We must seamlessly integrate new technologies, media and communication methods into our business and demand that the technology providers ride far in front of us, scouting them out. The Cluetrain Manifesto is a "movement" which examines the phenomenon that is the Internet and the substantial changes that we must all implement in our businesses to be successful in the global village. State-of-the-Art Solutions to an Age Old Problem If the authors are correct in their analysis and should they actually manage to convince the whole of the world of their thesis, our industry will be required to gear up quite significantly, and the predictions for the "globalisation" business will, in fact, be realised and perhaps exceeded. Our industry has grown at an extraordinary pace so far, and right through its history we have been offered the solution to end all solutions. We have had ISO, machine translation, controlled language, SGML, translation memory, merger mania and now Workflow, the Internet and XML. The smartest amongst us have taken the best of these propositions and integrated what they needed to achieve their ends. The ISO accreditation process can help any organisation to assess its business process strengths and weaknesses; Machine Translation when used in conjunction with a translation memory system and smart engineers can cut hundreds of hours out of a schedule whilst retaining quality in the right environment; controlled language has a problem since it's also trying to impose a mechanical solution to a creative problem, but one which may yet surprise us all; SGML systems abound, but similar to controlled language and document management systems, they're costly and business changes so fast it's hard to justify the costs; workflow is not defined -to some it's a streamlining of their business processes, to others it's web-based distribution, to others again it's automation of their production processes combined with streamlining of their business processes and a web-based distribution and communication facility. Traditional Workflow We keep talking about "workflow" without ever actually defining what it is. The term is used to represent many aspects of our businesses. However, it is used in an indiscriminate fashion leading to confusion as to how a workflow can be improved. When I started working on this development, I had three strong feelings about "workflow": "It's for manufacturing environments" Workflow is defined by the WfMC (workflow Management Coalition), the standards body for workflow management systems, as "Workflow is the automation of a business process, in whole or in part, during which documents, information, or tasks are passed from one participant to another for action, according to a set of procedural rules.". They go on to state that "A workflow management system defines, creates and manages the execution of workflows, through the use of software, running on one or more workflow engines, which is able to interpret the process definition, interact with workflow participants, and, where required, invoke the use of information technology (IT) tools and applications." The traditional, commercially available, workflow products that most of us would be familiar with are the likes of the SAP system. Many of us would not actually be familiar enough with the SAP system to know much more about it than there are always advertisements in the papers for SAP consultants who seem to earn a lot of money! Or that it really only applies in a manufacturing environment. My impression (and it may of course just be that I'm insular) is that workflow is kind of synonymous with manufacturing or other "straight through" or mechanical processes -that is processes that have a clear beginning, middle and end. When I hear workflow, I feel on a gut level that we're talking about a methodology that can't have an application within a creative environment. Similar to ISO accreditation, workflow as a proposition feels like a square peg in a round hole. It's Consultants, business process re-engineering, costly When I hear workflow, another set of bells ring in my head. I hear consultants, business process re-engineering, investment. Somewhere inside my head I react against the thought that we need to spend that much money assessing, identifying and documenting a process which is so subject to change. For me, successful management of a translation project is usually about good change management. Anticipating not the actual unexpected but the fact that the unexpected inevitably appears at some point during the project. Anticipating it means building a firm foundation for the project to be able to facilitate the new parameters at any point. In the business of producing products (as opposed to services) which are localised, the sudden discovery or epiphany that changes the direction of the product usually happens just as we're about to start our part of the process. It's Web Based, EJB, B2B The third and final major area that my head NOW associates with workflow is the term "web based". The term web-based workflow is extremely popular across all sections of industry, not just our industry. It's jazzed up with words like EJB (Enterprise Java Bean), B2B (Business to Business) and other new acronyms for existing concepts such as CRM (Customer Relationship Management) which I've no doubt we'll hear referred to as CIF before too long (Customer Interaction Facilitation). I like the Internet, I use it all the time and admit to even being an e-mail junkie. I think that the Enterprise Java Bean is really cool technology. I'm pleased to see the re-definition of business practises which for a long time have stagnated in a murky area between the industrial and technology revolutions. But I want to know how does a web based workflow apply to the complex issue of translation production. I want to know what is really meant by the term workflow and then what is meant by web-based. Web-Based Workflow Systems Pundits, analysts, gurus and localisation warriors alike have all started to suggest our only hope of survival and servicing this upcoming influx is to adapt a web-based workflow paradigm. The term "web-based workflow" is by now so pervasive as to seem a fundamental cornerstone of the translation/localisation process. Probably where I have the issue with this proposition is the term "based". A translation workflow or production management process is rarely "based" on the Internet. The Internet is a vital tool in some parts of the process, but most of my process is What I'd like to do with a web-centric workflow system is to be able to continue doing business as I currently do it, only be able to remove some of the headaches associated with owning, using and maintaining software and with working with a distributed team. For me, the Internet is just the best for: Finding information (about the process, the resources, the methodology) Communication (status, contracting, resources) Delivery (sending and receiving files) We use e-rooms, e-mail, bulletin boards, ftp, newsgroups -but we use them in conjunction with our desktop applications. There is an emerging trend, however, which offers up a completely different marketplace for us all, and one which is truly Internet based and which will require a web-based workflow management system -that's e-support and database driven web product. We all need to be ready for that. When I talk web-based workflow, therefore, I'm talking primarily about how we can make use of the Internet for the services we have grown to depend on it for within our multi-discipline IT set ups and workflows and I'm also bearing in mind the future -when what is now almost rocket science will be accepted practise and methodology. In the web-based workflow paradigm, our workflow is: Got stuff -need it translated somewhere else in a different timezone -need to send it there -they translate it -they tell me how they're getting on -they send back the translated stuff -I publish it. Translation Workflow In our context, workflow is all of the above, some of the above and more. Having had the privilege of the time to research just what is meant by workflow, I've come to some conclusions. Workflow is how a process is automated. To design a workflow you must know what the input is, what tasks must be performed on it, in what order they must be performed, by whom, what the output is and when it is expected. Simple really, eh? In the context of a translation or localisation project the workflow is: I've got a source file, I need it translated, and I need to publish it afterwards. On a more granular level, I perhaps have: I've got a source file, I need it translated after it's created, translation must be done by a native speaker, who will produce localised files within a very short time frame for me to publish it. Our input is the source material and perhaps previously translated material. The tasks that must be performed on it are translation (as the primary task) with some other ancillary or supporting tasks. In our context, we're really talking about a hierarchy of workflows, in much the same way that the WfMC describe a workflow management system. The TRADOS Workflow Development This brings us then to the TRADOS Workflow development -just what is it we have been able to make out of all that. First, some statements: there is no single workflow for translation production any translation workflow is in fact a series of workflows traditional workflow is in many ways an anathema to our industry within the translation production process, human input and creativity is required there are tasks which can be automated and we will automate them there are tasks which we don't know about which are carried out, we will provide the support to automate them a true translation workflow management system will need to be configured for the specific workflows involved the translation workflow management system (that which automates the progress of material through stages) must interact with a translation production management system (that which automates the tasks during each of the stages). Whoa! strong stuff -so what on earth are you reading this for? Just to be told that workflow doesn't apply to your environment? That doesn't work -the inherent eternal battle in the localisation business is to marry the creative with the mechanical. Localisation sits right in between our product (content) developers, those mad creative people who get ideas and make them into something tangible and our sales people who are making money out of the produced ideas. What we propose with our development is actually fairly simple. We need definition of the translation production process on a generic level. We need identification of the common tasks performed within any translation production process. We need recognition that the roles within the translation production process have evolved to include technical, project and publications management along with linguistic management and translation. We need to identify the common workflows within the translation production process. We need to articulate our requirements for technology to support the automation of tasks, the progress of projects, the management of the process and the operation of the workflows. We need technology to assist us achieve our aims which will be configurable for our environment and which will solve our problems. TRADOS Workspace -Phase One The first phase in introduction of the TRADOS Workspace is the definition of the translation production management process. The output from this definition is a set of utilities which ties the TRADOS technology together, automating production tasks. This first phase introduces an interface and methodology for those working on translation production using TRADOS technology who are not translating. It is essential that this paradigm is consolidated and accepted before we attempt to streamline the workflows of a translation production process. The presentation accompanying this paper will illustrate how such a system can work, and take us through a couple of common scenarios. TRADOS Workspace -Phase Two Phase two is the Client/Server based workflow management system, which automates the workflow for translation production as defined in phase one. The workflow management system may have a specialised market early on, and may not be suitable for the traditional localisation projects. We look to you to tell us if it will. Core to the workflow management system is the workflow engine. The user interfaces with the workflow engine at project start to define the elements of project, the tasks which must be performed, the order in which they must be performed, the conditions under which completion is determined and the resources which will perform the tasks. The workflow engine collects this set of instructions and executes them. Other users will then interface with the system to signal the completion of a stage, and to add updated material or additional data to it. Around the workflow engine, so, we must build a set of interfaces, and provide as many automated processes as possible (phase one). Some processes used within a translation production environment are easily automated -mainly the distribution and administrative ones, others are more complex. Currently, the vast majority of translation production is outsourced to either multi-language vendors (buying services from single language vendors and freelance translators and inhouse resources), single language vendors (buying services from freelance translators and with in-house translators and resources) or freelance translators. A standard definition of translation production is also made difficult due to the varied and diverse nature (formats) of the material being translated, and the constantly changing requirements of the marketplace. The appendix to this paper outlines the generic translation production process structures in terms of phases, team configurations, roles and generic task sets. The presentation will illustrate the processes, both current and how they can be improved using the TRADOS Workspace. Conclusion This session is not intended to act as a product demonstration, there is plenty of opportunity for that in the coming months. Instead, I hope to have given you an insight into how we see our industry and the issues we plan to resolve by providing the technology you need. TRADOS is committed to pioneering the best of new technology to support the translation and localisation professional. The communications revolution that is the Internet is having, and will continue to have, a profound effect on how we all do business. Our business processes must become more streamlined, automated and identifiable in order that we can take advantage of the best of the new paradigms in technical and commercial propositions. The TRADOS Workspace will provide the platform which will enable you choose which parts of your process can be best executed on the desktop, over the Internet or within a client/server environment. However, there are a number of key similarities between all of these, and a number of problems common across the majority of the customer base. The Workspace research has identified these and will deliver a solution which can increase efficiency, reduce costs, raise visibility and ensure that current and future technology employed in the origination process can be seamlessly integrated into the translation/localisation process. The Translation Production Phases Common across all translation production processes are the four major phases of a project. These phases are consecutive, but their start and end dates are not necessarily quantifiable prior to project start. Within a master production plan, there may be multiple production projects, each of these break into the same four phases. The phases are: Project Definition, Pre-Production, Production and Post Production. Phase 1 -Project Definition The project definition phase may take place long before material is ready for translation, or it may happen immediately prior to the production phase. When this phase happens is irrelevant for description purposes. Projects with a simple scope which requires merely automation of a process to be set up once with little human interaction requires definition as much as a project with a complex scope. An example of such would be a pre-defined process where content from a content management system is automatically distributed to a predefined list of translators over a LAN, translated and checked back in. During the project definition phases, the following major tasks and task groups are defined. Not all of these tasks are carried out within all projects, and no role is yet assigned: Decision to start project Phase 2 -Pre-Production During the pre-production phase, the files for the project are usually available and are the basis for the continuance of the project. The resources have been identified and costs have been quantified and agreed. Pre-production may involve a significant amount of preparation work, such as linking graphics in a FrameMaker file, or marking up non-translatable text in a series of HTML files. The tasks typically carried out during the pre-production phase include: Collection of material (TM, previous translation, graphic files) Quantification of files (what files are they, directory structure) Analysis of files (administrative, as in word counting as well as linguistic and technical) Preparation (graphics may need to be linked, correct styles applied to text, marking text as do not translate) Creation of Localisation Kit (breaking down project into organised chunks, writing instructions) Documentation of all above Distribution Phase 3 -Production Production is defined as the production of new material, rather than the adaptation of source material. For example, during the pre-production phase a worker may prepare a set of RTF files ensuring that all styles are correctly applied. This does not quantify production, rather preparation, since it is being done to ensure that when new material is produced in multiple languages it will be produced correctly. However, the application of updated styles to a set of translated RTF files during a DTP stage of production, would be defined as a production task.The actual production process within a translation project involves a very quantifiable set of tasks. Many of them will also have taken place in the pre-production phase, for example, distribution -in this phase the files are distributed to a different set of resources. The tasks include, but are not limited to, the following key areas: Distribution Phase 4 -Post-Production Once production is complete, an additional set of tasks are typically performed on the translated product, and to complete the translation production process. These include: Illustration of the pre-production phase Quality Assurance Billing and Accounting Archiving The Configuration of the Translation Production Teams There are multiple configurations of teams working on any translation production process, and it is not possible to quantify nor recommend how a team is best configured via a software solution. However, it is useful for our purposes to itemise the common configurations. The major common configurations currently employed include: A single source project would be, for example, where the documentation team within a software publisher develops all the material for translation, and they produce just one type of material, for example, user guides in FrameMaker format. The Roles within a Translation Production Process Within a translation production process, there are a number of roles which are common across all team configurations. The roles follow a traditional specification: the decision maker (focus is time and costs and planning) the technical specialist (focuses on the technical aspects of implementing the plan and the technical implications for costs, timeliness and planning) the administrator (focus is on facilitating the implementation of the plan) the workers (execute the plan and produce the material) the quality assurance (assure the quality of the produced material) Within the translation production process, the decision maker is normally referred to as the project manager. However, in many environments, this role may have a different name, or parts of the role may be shared amongst, for example, the vendor manager, the language manager, the production manager. The technical specialist may be a translation engineer, a CAT specialist or a DTP specialist. An editor within some environments may also be the technical specialist, specialising on assessing and preparing the linguistic content in its source language. Workers within the translation production environment are the translators, the DTP and graphics personnel, the editors, reviewers and often the CAT specialists. Quality assurance is usually built into the task group, so that linguistic quality assurance is separate to validation of files for re-assembly, which may take place within the DTP department. Bearing this in mind, we define the roles within the translation production process in as standard a fashion as possible. The roles and their definitions follow. The Administrator The administrator is responsible for the administrative work in the project, such as: Flag raising when issues arise An administrator will not normally have managerial, linguistic or technical responsibilities, and is probably the only role within the translation production process which will be replaced by the Workspace. Virtually all roles within the translation production process involve some administration, making this function key within the Workspace development. Translation Engineer (CAT Specialist) The Translation Engineer is frequently referred to as a CAT (Computer Aided Translation Specialist). This role is one whose task list expands and contracts most dependent on the complexity of the project and the configurations of the team. The Translation engineer is responsible for technical aspects of projects where CAT is used, including: Project and File assessment Often the main technical resource, preparing and distributing localisation kits Publications Engineer Given that a large percentage of what will be published in the future will not be published via a desktop application, but rather a web interface -this role refers to those responsible for the layout and formatting of material, including book building where relevant or web page validation and re-building. Editor Many of our team configurations, particularly those working in distributed environments, appoint editors to a language team, to assure that style and content of a set of material is consistent and to perform standard editing tasks on the material. An editor is generally a senior linguist responsible for: Assessment of linguistic issues in files -language, complexity, subject matter, alignment Preparation of guidelines and assistance to translators Sometimes conversion of files or other CAT tasks Sometimes generation of ToC and Index during translation Text editing for language, consistency and completeness Often heads up team of translators, has resource allocation responsibilities and other project management tasks Project Manager Within some team configurations, a project manager carries out the standard project management tasks, but within others, the project manager is also the vendor manager and production manager. Most roles require a degree of project management, and all require interaction with the project management function of the project they are working on. The project management function is responsible for: Scoping of project Quality Assurance A translation production process will have many quality assurance points. The main headings for these are listed below. It is very rare to have dedicated Quality Assurance personnel for each of these areas, it is more usual to have quality assurance built into the job description or task list of one of the other roles: Linguistic Publications Engineering CAT (Technical) Reviewer Reviewers are included in the list of roles within the translation production process, and treated as Workers, since it frequently happens that a review stage is the one which triggers the most change in a project. A local office cannot approve the style of the translation for example, incurring a hunt for a new vendor and subsequent re-working of all steps within the process. Or a reviewer raises serious issues about the content of the material, leading to a reassessment of the entire project. Inefficient, I hear you say, how can this happen? Like all else in localisation (and indeed software development) it shouldn't but it does! A Reviewer is defined as: External to production team Subject matter expert for originator Responsible for Linguistic review Terminologist The role of the terminologist is one which does not appear in the vast majority of team configurations. However, it appears in enough team configurations and is an essential role within those configurations, so is defined as a separate role, rather than a set of tasks, to better describe a workflow or process involving a dedicated terminologist. The role is often absorbed into other roles, the terminologist being responsible for: Developing terminology Yes, we got this far through the role definition without mentioning the translator but the roles are defined in alphabetical order in this document, not in order of importance. However, this is an indication also of the change to the focus of what constitutes the production process within a translation project. In many ways, it also reflects the manner in which translation technology has focussed (and delivered) on the provision of tools to assist the automation of the repetitive tasks involved in translating text. The translator is responsible for: The illustration below gives a graphical example of a generic translation production process:
package info.ganglia.jmxetric; import info.ganglia.gmetric4j.GMonitor; import java.lang.instrument.Instrumentation; // import java.util.logging.Logger; /** * JMXetricAgent is a JVM agent that will sample MBean attributes on a periodic basis, * publishing the value of those attributes to the Ganglia gmond process. * <br> * Use:<br> * <code>java -javaagent:path/jmxetric.jar=args yourmainclass</code> * <br> * Example:<br> * <code>java -javaagent:/opt/jmxetric_0_1/jmxetric.jar=host="localhost",port="8649",config=/opt/jmxetric_0_1/jmxetric.xml yourmainclass</code> * <br> * Arguments can be:<br> * <table> * <tr><th>Argument</th><th>Default</th><th>Description</th></tr> * <tr><td>host</td><td></td><td>Host address for ganglia</td></tr> * <tr><td>port</td><td></td><td>Port for ganglia</td></tr> * <tr><td>config</td><td>jmxetric.xml</td><td>Config file path</td></tr> * </table> */ public class JMXetricAgent extends GMonitor { // private static Logger log = // Logger.getLogger(JMXetricAgent.class.getName()); /** * A log running, trivial main method for test purposes * premain method * @param args Not used */ public static void main(String[] args) throws Exception { while( true ) { Thread.sleep(1000*60*5); System.out.println("Test wakeup"); } } /** * The JVM agent entry point * @param agentArgs * @param inst */ public static void premain(String agentArgs, Instrumentation inst) { System.out.println(STARTUP_NOTICE) ; JMXetricAgent a = null ; try { a = new JMXetricAgent(); XMLConfigurationService.configure(a, agentArgs); a.start(); } catch ( Exception ex ) { // log.severe("Exception starting JMXetricAgent"); ex.printStackTrace(); } } private static final String STARTUP_NOTICE="JMXetricAgent instrumented JVM, see https://github.com/ganglia/jmxetric"; }
Marvel Studios has released a new teaser image for the Daredevil Netflix series that seems to hint at the possibility of Absorbing Man making an appearance. The image is of a flyer for a boxing match between Carl “Crusher” Creel and “Battlin'” Jack Murdock. The former is the alias of the Absorbing Man, and the latter is the name of Matt Murdock's father. Be careful of the Murdock boys. They got the devil in them. #Daredevil pic.twitter.com/ycGOg5sQnz — Daredevil (@Daredevil) February 20, 2015 Whether or not we'll see the fight depends on just how far back in Matt Murdock's history the Daredevil series picks up. It does provide the first connection we've seen between Marvel's ABC television and Netflix television endeavors, as Absorbing Man made an appearance early in the second season of Agents of S.H.I.E.L.D. We know from the comics that, as an adult, Matt struggles with his relationship and sense of debt he harbors towards his father, after his father's death. Perhaps the Absorbing Man's metahuman abilities, if used during his showdown with Battlin' Jack, are responsible for Jack Murdock's demise.
Making decisions for intelligent agents in procedure descriptive framework There are many existing and potential industrial and commercial applications for intelligent agents in the literature. The very challenging issue for these applications is to "do what intelligent agents should do". The BDI (beliefs, desires, intentions) architecture is presented to solve this challenge, and it has attracted much attention. The main difficulty for this architecture is the formalization problem. In this paper, we discuss the "procedure descriptive framework", which is a method for formalizing the BDI architecture. We present decision models for intelligent agents in this framework. This research provides a technique for online agents to form beliefs and make decisions according to the current uncertain environment.
// authJiraClient authenticates the client with the given Username, API token, and URL domain func authJiraClient(user, token, url string) *jira.Client { tp := jira.BasicAuthTransport{ Username: user, Password: token, } jiraClient, err := jira.NewClient(tp.Client(), url) if err != nil { log.Fatalf("Unable to contact Jira: %s", err) } return jiraClient }
<filename>src/main/java/gaia/model/ModelGaiaWerecat.java package gaia.model; import net.minecraft.client.model.ModelRenderer; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.init.Items; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.ItemStack; import net.minecraft.util.math.MathHelper; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; @SideOnly(Side.CLIENT) public class ModelGaiaWerecat extends ModelGaia { private ModelRenderer head; private ModelRenderer headeyes; private ModelRenderer headaccessory; private ModelRenderer neck; private ModelRenderer bodytop; private ModelRenderer bodymiddle; private ModelRenderer bodymiddlebutton; private ModelRenderer bodybottom; private ModelRenderer rightchest; private ModelRenderer leftchest; private ModelRenderer rightarm; private ModelRenderer leftarm; private ModelRenderer rightleg; private ModelRenderer leftleg; private ModelRenderer leash; private ModelRenderer waist; private ModelRenderer tail1; private ModelRenderer tail2; public ModelGaiaWerecat() { textureWidth = 128; textureHeight = 64; head = new ModelRenderer(this, 0, 0); head.addBox(-3F, -6F, -3F, 6, 6, 6); head.setRotationPoint(0F, 1F, -2.5F); head.setTextureSize(64, 32); setRotation(head, 0F, 0F, 0F); headeyes = new ModelRenderer(this, 24, 0); headeyes.addBox(-3F, -6F, -3.1F, 6, 6, 0); headeyes.setRotationPoint(0F, 1F, -2.5F); headeyes.setTextureSize(64, 32); setRotation(headeyes, 0F, 0F, 0F); headaccessory = new ModelRenderer(this, 36, 0); headaccessory.addBox(-3.5F, -6.5F, -3.5F, 7, 7, 7); headaccessory.setRotationPoint(0F, 1F, -2.5F); headaccessory.setTextureSize(64, 32); setRotation(headaccessory, 0F, 0F, 0F); neck = new ModelRenderer(this, 0, 12); neck.addBox(-1F, -1F, -1F, 2, 2, 2); neck.setRotationPoint(0F, 1F, -2.5F); neck.setTextureSize(64, 32); setRotation(neck, 0F, 0F, 0F); bodytop = new ModelRenderer(this, 0, 16); bodytop.addBox(-2.5F, 0F, -1F, 5, 6, 3); bodytop.setRotationPoint(0F, 1F, -3F); bodytop.setTextureSize(64, 32); setRotation(bodytop, 0.0872665F, 0F, 0F); bodymiddle = new ModelRenderer(this, 0, 25); bodymiddle.addBox(-2F, 5.5F, -1.5F, 4, 3, 2); bodymiddle.setRotationPoint(0F, 1F, -3F); bodymiddle.setTextureSize(64, 32); setRotation(bodymiddle, 0.2617994F, 0F, 0F); bodymiddlebutton = new ModelRenderer(this, 0, 25); bodymiddlebutton.addBox(-0.5F, 5.8F, -2.6F, 1, 2, 0); bodymiddlebutton.setRotationPoint(0F, 1F, -2F); bodymiddlebutton.setTextureSize(64, 32); setRotation(bodymiddlebutton, 0.2617994F, 0F, 0F); bodybottom = new ModelRenderer(this, 0, 30); bodybottom.addBox(-3F, 8F, -2.5F, 6, 3, 3); bodybottom.setRotationPoint(0F, 1F, -3F); bodybottom.setTextureSize(64, 32); setRotation(bodybottom, 0.3490659F, 0F, 0F); rightchest = new ModelRenderer(this, 0, 36); rightchest.addBox(-1F, -1F, -1F, 2, 2, 2); rightchest.setRotationPoint(-1.3F, 3F, -4F); rightchest.setTextureSize(64, 32); setRotation(rightchest, 0.9599311F, 0.1745329F, 0.0872665F); leftchest = new ModelRenderer(this, 0, 36); leftchest.mirror = true; leftchest.addBox(-1F, -1F, -1F, 2, 2, 2); leftchest.setRotationPoint(1.3F, 3F, -4F); leftchest.setTextureSize(64, 32); setRotation(leftchest, 0.9599311F, -0.1745329F, -0.0872665F); rightarm = new ModelRenderer(this, 16, 12); rightarm.addBox(-2F, -1F, -1F, 2, 8, 2); rightarm.setRotationPoint(-2.5F, 2.5F, -2.5F); rightarm.setTextureSize(64, 32); setRotation(rightarm, -0.1745329F, 0F, 0.1745329F); leftarm = new ModelRenderer(this, 16, 36); leftarm.addBox(0F, -1F, -1F, 2, 8, 2); leftarm.setRotationPoint(2.5F, 2.5F, -2.5F); leftarm.setTextureSize(64, 32); setRotation(leftarm, -0.1745329F, 0F, -0.1745329F); rightleg = new ModelRenderer(this, 88, 0); rightleg.addBox(-1.5F, -2F, -2F, 3, 8, 3); rightleg.setRotationPoint(-2.5F, 12F, 0F); rightleg.setTextureSize(64, 32); setRotation(rightleg, -0.4363323F, -0.0872665F, -0.0349066F); leftleg = new ModelRenderer(this, 88, 0); leftleg.addBox(-1.5F, -2F, -2F, 3, 8, 3); leftleg.setRotationPoint(2.5F, 12F, 0F); leftleg.setTextureSize(64, 32); setRotation(leftleg, -0.4363323F, 0.0872665F, 0.0349066F); ModelRenderer rightear = new ModelRenderer(this, 36, 28); rightear.addBox(0F, 0F, -4F, 3, 3, 4); rightear.setRotationPoint(-2F, -5F, 0.5F); rightear.setTextureSize(128, 64); setRotation(rightear, -0.7853982F, 0.7853982F, 0F); ModelRenderer leftear = new ModelRenderer(this, 36, 28); leftear.mirror = true; leftear.addBox(-3F, 0F, -4F, 3, 3, 4); leftear.setRotationPoint(2F, -5F, 0.5F); leftear.setTextureSize(128, 64); setRotation(leftear, -0.7853982F, -0.7853982F, 0F); ModelRenderer hair1 = new ModelRenderer(this, 36, 14); hair1.addBox(-4F, -6F, 1F, 8, 4, 3); hair1.setRotationPoint(0F, 1F, -2.5F); hair1.setTextureSize(128, 64); setRotation(hair1, 0F, 0F, 0F); ModelRenderer hair2 = new ModelRenderer(this, 36, 21); hair2.addBox(-4.5F, -3F, 1.5F, 9, 4, 3); hair2.setRotationPoint(0F, 1F, -2.5F); hair2.setTextureSize(128, 64); setRotation(hair2, 0F, 0F, 0F); leash = new ModelRenderer(this, 36, 35); leash.addBox(-2F, -0.5F, -2F, 4, 1, 4); leash.setRotationPoint(0F, 1F, -2.5F); leash.setTextureSize(128, 64); setRotation(leash, 0.2617994F, 0F, 0F); ModelRenderer rightarmlower = new ModelRenderer(this, 64, 0); rightarmlower.addBox(-4.5F, 2F, -1.5F, 2, 8, 3); rightarmlower.setRotationPoint(-2.5F, 2.5F, -2.5F); rightarmlower.setTextureSize(128, 64); setRotation(rightarmlower, -0.1745329F, 0F, -0.0872665F); ModelRenderer leftarmlower = new ModelRenderer(this, 74, 0); leftarmlower.addBox(2.5F, 2F, -1.5F, 2, 8, 3); leftarmlower.setRotationPoint(2.5F, 2.5F, -2.5F); leftarmlower.setTextureSize(128, 64); setRotation(leftarmlower, -0.1745329F, 0F, 0.0872665F); ModelRenderer righthand = new ModelRenderer(this, 64, 11); righthand.addBox(-1.5F, 8F, -2F, 2, 4, 4); righthand.setRotationPoint(-2.5F, 2.5F, -2.5F); righthand.setTextureSize(128, 64); setRotation(righthand, -0.1745329F, 0F, 0.1745329F); ModelRenderer lefthand = new ModelRenderer(this, 76, 11); lefthand.addBox(-0.5F, 8F, -2F, 2, 4, 4); lefthand.setRotationPoint(2.5F, 2.5F, -2.5F); lefthand.setTextureSize(128, 64); setRotation(lefthand, -0.1745329F, 0F, -0.1745329F); waist = new ModelRenderer(this, 64, 19); waist.addBox(-3.5F, 7.5F, -3F, 7, 4, 4); waist.setRotationPoint(0F, 1F, -3F); waist.setTextureSize(64, 32); setRotation(waist, 0.3490659F, 0F, 0F); tail1 = new ModelRenderer(this, 64, 27); tail1.addBox(-1F, 7F, -3.5F, 2, 5, 2); tail1.setRotationPoint(0F, 1F, -2F); tail1.setTextureSize(64, 32); setRotation(tail1, 0.6108652F, 0F, 0F); tail2 = new ModelRenderer(this, 64, 34); tail2.addBox(-1.5F, 11F, -5F, 3, 8, 3); tail2.setRotationPoint(0F, 1F, -2F); tail2.setTextureSize(64, 32); setRotation(tail2, 0.6981317F, 0F, 0F); ModelRenderer chain = new ModelRenderer(this, 36, 35); chain.addBox(-1.5F, 2.5F, -2.5F, 4, 1, 4); chain.setRotationPoint(2F, 12F, 0F); chain.setTextureSize(64, 32); setRotation(chain, -0.4363323F, 0.0872665F, 0.0349066F); ModelRenderer rightfoot = new ModelRenderer(this, 88, 11); rightfoot.addBox(-1.5F, 3.5F, 1F, 3, 8, 2); rightfoot.setRotationPoint(-2.5F, 12F, 0F); rightfoot.setTextureSize(64, 32); setRotation(rightfoot, -0.4363323F, -0.0872665F, -0.0349066F); ModelRenderer leftfoot = new ModelRenderer(this, 88, 11); leftfoot.addBox(-1.5F, 3.5F, 1F, 3, 8, 2); leftfoot.setRotationPoint(2.5F, 12F, 0F); leftfoot.setTextureSize(64, 32); setRotation(leftfoot, -0.4363323F, 0.0872665F, 0.0349066F); ModelRenderer rightfootlower = new ModelRenderer(this, 88, 21); rightfootlower.addBox(-1.5F, 10F, -7F, 3, 1, 3); rightfootlower.setRotationPoint(-2.5F, 12F, 0F); rightfootlower.setTextureSize(64, 32); setRotation(rightfootlower, 0.1745329F, -0.0872665F, -0.0349066F); ModelRenderer leftfootlower = new ModelRenderer(this, 88, 21); leftfootlower.addBox(-1.5F, 10F, -7F, 3, 1, 3); leftfootlower.setRotationPoint(2.5F, 12F, 0F); leftfootlower.setTextureSize(64, 32); setRotation(leftfootlower, 0.1745329F, 0.0872665F, 0.0349066F); convertToChild(head, rightear); convertToChild(head, leftear); convertToChild(head, hair1); convertToChild(head, hair2); convertToChild(rightarm, rightarmlower); convertToChild(rightarm, righthand); convertToChild(leftarm, leftarmlower); convertToChild(leftarm, lefthand); convertToChild(rightleg, rightfoot); convertToChild(rightleg, rightfootlower); convertToChild(leftleg, chain); convertToChild(leftleg, leftfoot); convertToChild(leftleg, leftfootlower); } @Override public void render(Entity entityIn, float limbSwing, float limbSwingAmount, float ageInTicks, float netHeadYaw, float headPitch, float scale) { super.render(entityIn, limbSwing, limbSwingAmount, ageInTicks, netHeadYaw, headPitch, scale); setRotationAngles(limbSwing, limbSwingAmount, ageInTicks, netHeadYaw, headPitch, scale, entityIn); head.render(scale); headaccessory.render(scale); neck.render(scale); bodytop.render(scale); bodymiddle.render(scale); bodymiddlebutton.render(scale); bodybottom.render(scale); rightchest.render(scale); leftchest.render(scale); rightarm.render(scale); leftarm.render(scale); rightleg.render(scale); leftleg.render(scale); leash.render(scale); waist.render(scale); tail1.render(scale); tail2.render(scale); if (entityIn.ticksExisted % 60 == 0 && limbSwingAmount <= 0.1F) { headeyes.render(scale); } } @Override public void setRotationAngles(float limbSwing, float limbSwingAmount, float ageInTicks, float netHeadYaw, float headPitch, float scaleFactor, Entity entityIn) { ItemStack itemstack = ((EntityLivingBase) entityIn).getItemStackFromSlot(EntityEquipmentSlot.HEAD); // head head.rotateAngleY = netHeadYaw / 57.295776F; head.rotateAngleX = headPitch / 57.295776F; headeyes.rotateAngleY = head.rotateAngleY; headeyes.rotateAngleX = head.rotateAngleX; headaccessory.rotateAngleY = head.rotateAngleY; headaccessory.rotateAngleX = head.rotateAngleX; // arms rightarm.rotateAngleX = MathHelper.cos(limbSwing * 0.6662F + (float) Math.PI) * 0.8F * limbSwingAmount * 0.5F; leftarm.rotateAngleX = MathHelper.cos(limbSwing * 0.6662F) * 0.8F * limbSwingAmount * 0.5F; rightarm.rotateAngleZ = 0.0F; leftarm.rotateAngleZ = 0.0F; if (swingProgress > -9990.0F) { holdingMelee(); } rightarm.rotateAngleZ += (MathHelper.cos(ageInTicks * 0.09F) * 0.025F + 0.025F) + 0.1745329F; rightarm.rotateAngleX += MathHelper.sin(ageInTicks * 0.067F) * 0.025F; leftarm.rotateAngleZ -= (MathHelper.cos(ageInTicks * 0.09F) * 0.025F + 0.025F) + 0.1745329F; leftarm.rotateAngleX -= MathHelper.sin(ageInTicks * 0.067F) * 0.025F; if (itemstack.getItem() == Items.FEATHER) { if (entityIn.motionX * entityIn.motionX + entityIn.motionZ * entityIn.motionZ > 2.500000277905201E-7D) { animationFlee(); } } // body tail1.rotateAngleY = MathHelper.cos(degToRad((float) entityIn.ticksExisted * 7)) * degToRad(15); tail2.rotateAngleY = MathHelper.cos(degToRad((float) entityIn.ticksExisted * 7)) * degToRad(20); // legs rightleg.rotateAngleX = MathHelper.cos(limbSwing * 0.6662F) * 0.5F * limbSwingAmount; leftleg.rotateAngleX = MathHelper.cos(limbSwing * 0.6662F + (float) Math.PI) * 0.5F * limbSwingAmount; rightleg.rotateAngleX -= 0.4363323F; leftleg.rotateAngleX -= 0.4363323F; } public void holdingMelee() { float f6; float f7; f6 = 1.0F - swingProgress; f6 *= f6; f6 *= f6; f6 = 1.0F - f6; f7 = MathHelper.sin(f6 * (float) Math.PI); float f8 = MathHelper.sin(swingProgress * (float) Math.PI) * -(head.rotateAngleX - 0.7F) * 0.75F; // right arm rightarm.rotateAngleX = (float) ((double) rightarm.rotateAngleX - ((double) f7 * 1.2D + (double) f8)); rightarm.rotateAngleY += (bodytop.rotateAngleY * 2.0F); rightarm.rotateAngleZ = (MathHelper.sin(swingProgress * (float) Math.PI) * -0.4F); // left arm leftarm.rotateAngleX = (float) ((double) leftarm.rotateAngleX - ((double) f7 * 1.2D + (double) f8)); leftarm.rotateAngleY += (bodytop.rotateAngleY * 2.0F); leftarm.rotateAngleZ -= (MathHelper.sin(swingProgress * (float) Math.PI) * -0.4F); } private void animationFlee() { rightarm.rotateAngleX += 1.0472F; leftarm.rotateAngleX += 1.0472F; } }
//! Shooter-master game //! Author djeck extern crate glutin_window; extern crate graphics; extern crate opengl_graphics; extern crate piston; extern crate rand; use piston::window::WindowSettings; use piston::event_loop::*; use piston::input::*; use glutin_window::GlutinWindow; use opengl_graphics::{GlGraphics, OpenGL}; use std::collections::LinkedList; use std::iter::FromIterator; pub struct Player { gl : GlGraphics, width: u32, height: u32 } pub struct Game { gl : GlGraphics, rows: u32, cols: u32, player: Player, square_width: u32, square_height: u32, score: u32 } impl Game { fn render(&mut self, args: &RenderArgs) { use graphics; const GREEN: [f32; 4] = [0.0, 1.0, 0.0, 1.0]; const WHITE: [f32; 4] = [1.0; 4]; self.gl.draw(args.viewport(), |c, gl| { graphics::clear(GREEN, gl); // rectangle([1.0, 0.0, 0.0, 1.0], // red // [0.0, 0.0, 32.0, 32.0], // c.transform, // gl); // Rectangle::new([0.5, 1.0, 0.0, 0.3]) // .draw([50.0, 50.0, 100.0, 100.0], &draw_state, c.transform, g); }); //self.player.render(args); // TODO remplace //self.element[i].render(&mut self.gl, args, self.square_width); // TODO } fn update(&mut self, args: &UpdateArgs) -> bool { true } fn pressed(&mut self, btn: &Button) { match btn { &Button::Keyboard(Key::Up) => println!("Key up"), &Button::Keyboard(Key::Down) => println!("Key down"), &Button::Keyboard(Key::Left) => println!("Key left"), &Button::Keyboard(Key::Right) => println!("Key right"), _ => println!("Other key") }; } } fn main() { let opengl = OpenGL::V3_2; // OpenGL::V2_1 const COLS: u32 = 30; const ROWS: u32 = 20; const SQUARE_WIDTH: u32 = 20; const SQUARE_HEIGHT: u32 = 20; let WIDTH = COLS * SQUARE_WIDTH; let HEIGHT = ROWS * SQUARE_HEIGHT; let mut window: GlutinWindow = WindowSettings::new("Shooter-master", [WIDTH, HEIGHT]) .opengl(opengl) .exit_on_esc(true) .build() .unwrap(); let mut game = Game { gl: GlGraphics::new(opengl), rows: ROWS, cols: COLS, square_width: SQUARE_WIDTH, square_height: SQUARE_HEIGHT, score: 0, player: Player { gl: GlGraphics::new(opengl), width: SQUARE_WIDTH, height: SQUARE_HEIGHT }, }; let mut events = Events::new(EventSettings::new()).ups(10); while let Some(e) = events.next(&mut window) { if let Some(r) = e.render_args() { game.render(&r); } if let Some(u) = e.update_args() { if !game.update(&u) { break; } } if let Some(k) = e.button_args() { if k.state == ButtonState::Press { game.pressed(&k.button); } } } }
Microsoft is working on a secretive new project in its Windows Phone division, according to a job description it posted over the weekend. The description, for a software development engineer in test, gives few hints about the project. "We are a team working on a top secret project inside the Windows Phone division. Our mission...GO BIG! DISRUPT THE MARKET!" the job description reads. The advertisement says the project involves the first version of a new feature set and the group will complete planning for the features in a month. "We can't give you many details on this [job description], but I can assure you we have a passionate group of engineers charged and ready to take on the challenge," the ad reads. The Windows Phone News blog first spotted the ad, which is running on Microsoft's website and on LinkedIn. One analyst suspected there's not much of a secret behind the job description. "I am going to go out on a limb and guess that the manager who put up this hurried [job description] used a template from early last year before they went public with the Windows Phone 7 UI (in March 2010)," said Al Hilwa, an analyst with IDC. However, Microsoft could be developing new capabilities that it hopes will set Windows Phone apart from the competition. Microsoft lags behind market leaders Google, Apple and Research In Motion, and some unique new features could help Microsoft expand its market share. According to comScore, Microsoft's market share in smartphones dipped to 5.8 percent in May, down from 7.7 percent in February. Nancy Gohring covers mobile phones and cloud computing for The IDG News Service. Follow Nancy on Twitter at @idgnancy. Nancy's e-mail address is [email protected]
Japan and the US are to discuss plans for their troops to jointly deal with a potential stand-off between China and Taiwan, Kyodo news agency reported, a move that has already raised Beijing's ire. Defense and foreign affairs officials from Tokyo and Washington would open talks next month and assess various crises that might occur across the Taiwan Strait, including providing logistical support for US troops in the event of a conflict, the agency said late on Wednesday, citing officials from both countries. Kyodo said the discussions were thought to reflect the wishes of Prime Minister Shinzo Abe, who is considered to be pro-Taiwan. Abe's predecessor, Junichiro Koizumi, told the Diet in 2005 that Japan did not anticipate providing military cooperation to the US during any crisis in the Taiwan Strait, the report said. Any Japanese military contribution would be limited under its Constitution, but the two sides would consider having Japanese troops provide rear-area support, including supplies, transport and medical services for US troops, as well as ship inspection and search and rescue work, Kyodo said. China quickly expressed concern over the plans. "Taiwan is an inseparable part of China's territory and any arrangement between Japan and the United States should respect the one-China principle," Chinese Foreign Ministry Spokesman Liu Jianchao (劉建超) told a news conference yesterday. "We have expressed great concern," he added, calling "independence forces" in Taiwan "the greatest threat to peace and stability across the Taiwan Strait." US-Japan talks on the issue could also stir controversy in Japan. The government has in the past been vague about whether Taiwan is part of Japan's "surrounding areas," meaning those to which its contingency legislation would apply. Japan and the US will also discuss details of how they would deal with contingencies in North Korea following Pyongyang's missile and nuclear tests last year, the Asahi Shimbun said yesterday. Tokyo and Washington last month began to study possible plans involving North Korea, including their response to a possible conflict on the peninsula or a missile attack on Japan, the Asahi Shimbun said. Japanese troops could carry out search and rescue missions for US troops, refuel US military planes and ships, and allow them to use Japanese air bases and sea ports for attacks on North Korea, the report said, citing unnamed officials. The allies could also work together to shoot down missiles and attack Pyongyang's missile bases, the report said. Tokyo and Washington already work together on security policy in the region. The US stations about 50,000 troops in Japan under a security treaty, and the allies stepped up efforts to build a missile defense system following North Korea's nuclear test in October. Foreign Ministry official Naoki Kumagai said Tokyo and Washington "are discussing responses to possible regional contingencies," but refused to elaborate. He agreed, however, that the nuclear standoff with North Korea and tension between China and Taiwan were "key concerns" facing Japan and the US in the region. Japan, which resumed ties with communist China in 1972, has so far balked at the prospect of getting involved in any conflict in the Taiwan Strait. The reports came amid a recent drive by Tokyo to raise its political and military profile overseas.
/* * I2C.H * * Created on: 9 thg 7, 2018 * Author: Phong */ #include <msp430.h> #include <stdint.h> #ifndef I2C_I2C_H_ #define I2C_I2C_H_ uint8_t I2C_Read_Buffer[]; uint8_t I2C_Write_Buffer[]; void I2C_Init(unsigned char slave_address); void I2C_Write_Byte(unsigned char add_register, unsigned char Data); unsigned char I2C_Read_Byte(unsigned char add_register); void I2C_Write_Data(unsigned char add_register, unsigned char numbyte, unsigned char * poi_send); void I2C_Read_Data(unsigned char add_register, unsigned char numbyte, unsigned char * poi_read); #endif /* I2C_I2C_H_ */
OSLO, Nov 15 (Reuters) - Norway’s Equinor has bought a 9.7-percent stake in solar energy producer Scatec Solar , raising its ownership to 10 percent, the state-controlled energy firm said on Thursday. Oil and gas firms are rushing to invest in renewable energy to reduce their carbon footprint in the wake of the 2016 Paris Agreement and as falling costs have made solar and wind power competitive with other energy sources. “This is in line with our strategy to develop into a broad energy company,” it added. Equinor said the investment was made with a “long term perspective,” declining to say whether it planned to increase it further in the future. “This is the level that we are comfortable to have. We never comment on investment plans,” said an Equinor spokesman. Shares in Scatec Solar were trading 11.2 percent higher by 1000 GMT. Scatec Solar operates plants with an installed capacity of 357 megawatt (MW) and has another 1,057 MW under construction. The Equinor spokesman said the company supported Scatec Solar’s current strategy and will not seek a seat on its board. Equinor has declined to comment on its future investment plans in solar. “We will continue to build our position within the renewable energy, we don’t want to give any indications of what business opportunities we are looking at,” the spokesman said. Its peers in Europe have invested more in solar. Anglo-Dutch Shell acquired a 43.9 percent stake in the U.S. solar power developer Silicon Ranch Corporation for $217 million in January, following BP’s acquisition of a 43 percent stake in Lightsource, Europe’s largest solar developer, for $200 million in 2017.
<reponame>awemulya/fieldsight-kobocat from django.core.management.base import BaseCommand from django.conf import settings from onadata.apps.fsforms.models import Stage, FInstance class Command(BaseCommand): help = 'Safely delete a substage and notify' def add_arguments(self, parser): parser.add_argument('substage_id', type=int) def handle(self, *args, **options): substage_id = options['substage_id'] stage = Stage.objects.get(pk=substage_id) try: form = stage.stage_forms if FInstance.objects.filter(site_fxf=form).exists() or FInstance.objects.filter(project_fxf=form).exists(): self.stdout.write('Substage form have submissions ! cant delete') return else: form.is_deleted = True form.stage = None form.save() stage.delete() self.stdout.write('Substage Deleted') except Exception as e: self.stdout.write('Substage have no Form') stage.delete() self.stdout.write('Substage Deleted')
While foreclosure activity hit an all-time record in the first quarter, according to statistics released Thursday morning by the Mortgage Bankers Association, a shift of the mortgage mess towards prime borrowers appears to be taking place as well -- signaling that the credit crunch that began among those with less-than-perfect credit may now be marching onward towards borrowers usually deemed better credit risks. It shouldn't surprise anyone at this point to learn that first quarter foreclosure activity was the highest since 1979, the first year MBA's data on foreclosure activity is available. The percentage of loans in the foreclosure process was 2.47 percent at the end of the first quarter, the MBA said, an increase of 43 basis points from the fourth quarter of 2007 and 119 basis points from one year ago. The percent of loans on which foreclosure actions were started during the quarter was 0.99 percent on a seasonally-adjusted basis, 16 basis points higher than the previous quarter and up 41 basis points from one year ago, the MBA said. And while the overall numbers for the first quarter show that the majority of troubled borrowers are in the subprime credit category, the pace at which prime borrowers are running into a wall now strongly outstrips anything being seen in the subprime arena. Velocity shifts away from subprime Among subprime borrowers, severe delinquencies -- a measure that includes 90+ day delinquencies and foreclosures -- increased from 14.44 percent of loans in the fourth quarter to 16.42 percent in Q1. In contrast, just 1.99 percent of all prime borrowers were severely delinquent at the end of Q1, compared to 1.67 percent at the end of last year, numbers that illustrate the relatively greater distress felt by subprime borrowers. But it's the velocity of these changes that's most worth noting from an investor's perspective -- the Q4 to Q1 change in severe delinquencies strongly favors prime borrowers, for example, with severe DQs increasing by 19.2 percent for prime and 13.7 percent for subprime borrowers. By splitting out fixed-rate and adjustable-rate DQs, the increasing distress now being felt by prime borrowers becomes even more evident: prime ARMs showed the highest velocity of change of any major loan category in nearly every measure of distress published by the MBA. Severe delinquences increased a whopping 28.71 percent among prime ARMs during Q1, while in comparison, subprime ARMs saw severe DQs jump 18 percent. It's a pattern repeated outside of ARMs, too. The velocity of severe delinquencies among prime, fixed-rate borrowers actually came close to doubling that recorded by subprime FRMs during the first quarter. Prime FRMs saw severe DQs increase 12.1 percent in the first quarter, while subprime FRMs posted a 6.7 percent increase in severe delinquencies over the same time frame. It's a difference Jay Brinkmann, MBA's vice president for research and economics, took notice of. "Prime ARMs represent 15 percent of the loans outstanding, but 23 percent of the foreclosures started," he said. "Out of the approximately 516,000 foreclosures started during the first quarter, subprime ARM loans accounted for about 195,000 and prime ARM loans 117,000, but the increase in prime ARM foreclosures exceeded subprime ARM foreclosures with increases of 29,000 and 20,000 respectively over the previous quarter.” Location, location, location The old real estate mantra that location matters is proving true as the mortgage and housing mess rolls on, as well. The MBA said that a continued increase in the overall delinquency rate was driven by increases in the number of loans 60 and 90 or more days past due, primarily in California and Florida. "The problems in California and Florida are extraordinary and they are the main drivers of the national trend," Brinkmann said. "The quarterly rate of foreclosure starts on subprime ARM loans in California was 9.24 percent. "This rate, combined with Florida’s rate of 8.25 percent, drove up the national average foreclosure start rate to the point where 43 states were below the national average of 6.32 percent." California saw a total of approximately 109,000 foreclosure starts and Florida 77,000, the MBA reported; in contrast, the next highest states were Texas, Michigan and Ohio with between 24,000 and 20,000 each. Taking California, Florida, Arizona and Nevada together, the four states represented 62 percent of all foreclosures started on prime ARM loans, and 84 percent of the increase in prime ARM foreclosure, Brinkmann noted. "About 20 states had drops in their number of foreclosures started, including Michigan, Ohio and Indiana where problems have been the most severe for the last several years," he said. For more information, visit http://www.mortgagebankers.org.
// Commit saves a record database to hard disk. func (db *DB) Commit() error { if db.changed { if err := db.saveTaxList(); err != nil { return err } } for _, tax := range db.tids { if err := tax.commit(); err != nil { return errors.Wrap(err, "records: db: commit") } } db.changed = false return nil }
/* * BSD 3-Clause License * * Copyright (c) 2021, Joint Research Centre (JRC) All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ package eu.cise.sim.api.messages.dto.label.incident; import eu.cise.datamodel.v1.entity.event.ObjectRoleInEventType; import eu.cise.datamodel.v1.entity.vessel.VesselType; import java.io.Serializable; import java.util.ArrayList; import java.util.List; public class VesselLabelDto implements Serializable { private static final long serialVersionUID = 42L; private final List<String> typeList; private final List<String> roleList; private static VesselLabelDto instance; static { List<String> typeList = new ArrayList<>(); for (VesselType type : VesselType.values()) { typeList.add(type.value()); } List<String> roleList = new ArrayList<>(); for (ObjectRoleInEventType type : ObjectRoleInEventType.values()) { roleList.add(type.value()); } instance = new VesselLabelDto(typeList, roleList); } private VesselLabelDto(List<String> typeList, List<String> roleList) { this.typeList = typeList; this.roleList = roleList; } public List<String> getTypeList() { return typeList; } public List<String> getRoleList() { return roleList; } public static VesselLabelDto getInstance() { return instance; } }
from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from pathlib import Path import shutil import smtplib import ssl import os import markdown from ._data_dir import DATA_DIR class Email: def __init__(self, username=None, password=<PASSWORD>): self.email_addr = username or os.getenv('EMAIL_USERNAME', '') self.password = password or os.getenv('EMAIL_PASSWORD', '') self.connection = None # create ~/.teacherhelper/email_templates if needed self.template_dir = Path( DATA_DIR, '.teacherhelper', 'email_templates' ) if not self.template_dir.exists(): os.makedirs(self.template_dir) default_template = Path(self.template_dir, 'default.html') if not default_template.exists(): shutil.copyfile( Path(Path(__file__).parent, 'email_default_template.html'), default_template ) def __enter__(self): context = ssl.create_default_context() self.connection = smtplib.SMTP_SSL( 'smtp.gmail.com', port=465, context=context, ) self.connection.login( self.email_addr, self.password, ) return self def __exit__(self, *_): if self.connection: self.connection.close() self.connection = None def send( self, *, to: str, subject: str, message: str, cc: str=None, # TODO: should support a list bcc: str=None, # TODO: should support a list template_name: str='default.html' ): """ Simple utility for sending an email. Helpful for mail merges! *message* should be a string of markdown text, which will be converted into html and plain text email attachments. *template_name* is the name of an html email template in ~/.teacherhelper/email_templates. An email template can be any html file with the template tag `{{ email_content }}` in it. The markdown input will be converted into html, and that html will replace the `{{ email_content }}` tag. """ if not self.connection: raise ValueError( 'Connection must be established in __enter__. Use this class ' 'as a context manager' ) me = self.email_addr msg = MIMEMultipart('alternative') msg['Subject'] = subject msg['From'] = me msg['To'] = to if cc: msg['Cc'] = cc if bcc: msg['Bcc'] = bcc html = self.make_html_message(message, template_name) part1 = MIMEText(message, 'plain') part2 = MIMEText(html, 'html') msg.attach(part1) msg.attach(part2) self.connection.sendmail(me, to, msg.as_string()) def make_html_message( self, markdown_message: str, template_name: str='default.html', ): template_path = Path(self.template_dir, template_name) with open(template_path, 'r') as fp: template = fp.read() html = template.replace('{{ email_content }}', markdown.markdown(markdown_message)) return html
package com.jdt.fedlearn.frontend.service.impl.match; import com.jd.blockchain.ledger.TypedKVEntry; import com.jdt.fedlearn.frontend.constant.Constant; import com.jdt.fedlearn.frontend.constant.JdChainConstant; import com.jdt.fedlearn.frontend.constant.RequestConstant; import com.jdt.fedlearn.frontend.entity.match.*; import com.jdt.fedlearn.frontend.entity.project.MatchPartnerInfo; import com.jdt.fedlearn.frontend.entity.project.PartnerDTO; import com.jdt.fedlearn.frontend.exception.RandomServerException; import com.jdt.fedlearn.frontend.jdchain.config.JdChainCondition; import com.jdt.fedlearn.frontend.mapper.JdChainBaseMapper; import com.jdt.fedlearn.frontend.service.IMatchService; import com.jdt.fedlearn.frontend.service.IPartnerService; import com.jdt.fedlearn.frontend.util.HttpClientUtil; import com.jdt.fedlearn.frontend.util.JsonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Conditional; import org.springframework.stereotype.Service; import org.springframework.ui.ModelMap; import javax.annotation.Resource; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @Conditional(JdChainCondition.class) @Service public class MatchServiceJdchainImpl implements IMatchService { private static Logger logger = LoggerFactory.getLogger(MatchServiceJdchainImpl.class); @Resource private IPartnerService partnerService; @Resource JdChainBaseMapper jdChainBaseMapper; @Override public String matchStart(MatchStart matchStart) { String taskId = matchStart.getTaskId(); String userName = matchStart.getUsername(); String matchAlgorithm = matchStart.getMatchType(); String url = randomServer(userName, taskId, matchAlgorithm); List<PartnerDTO> partnerDTOS = partnerService.queryPartnerDTOList(taskId); List<MatchPartnerInfo> clientInfosNew = partnerDTOS.stream().map(x -> new MatchPartnerInfo(x.url(), x.getDataset(), "uid")).collect(Collectors.toList()); Map<String, Object> request = new HashMap<>(); request.put(TASK_ID, taskId); request.put(MATCH_ALGORITHM, matchStart.getMatchType()); request.put(CLIENT_LIST, clientInfosNew); logger.info("random server is {}", url); String modelMap = HttpClientUtil.doHttpPost(url + RequestConstant.MATCH_START, request); return modelMap; } @Override public String matchProgress(MatchProgressReq matchProgressReq) { String matchId = matchProgressReq.getMatchId(); String taskId = matchId.substring(0, matchId.indexOf("-")); Map<String, Object> request = new HashMap<>(); request.put(MATCH_ID, matchProgressReq.getMatchId()); String url = getRandomServer(taskId); String modelMap = HttpClientUtil.doHttpPost(url + RequestConstant.MATCH_PROGRESS, request); return modelMap; } @Override public String detail(MatchDetailReq matchProgressReq) { String matchId = matchProgressReq.getMatchId(); String taskId = matchId.substring(0, matchId.indexOf("-")); Map<String, Object> request = new HashMap<>(); request.put(MATCH_ID, matchProgressReq.getMatchId()); String url = getRandomServer(taskId); String modelMap = HttpClientUtil.doHttpPost(url + RequestConstant.MATCH_PROGRESS, request); return modelMap; } @Override public String matchList(MatchListReq matchListReq) { return null; } @Override public String matchDelete(MatchDeleteReq deleteReq) { return null; } private String randomServer(String userName, String taskId, String matchAlgorithm) { String result = jdChainBaseMapper.invokeRandomtraining(userName, taskId, matchAlgorithm); return parseRandomServer(result); } private String getRandomServer(String taskId) { String queryKey = JdChainConstant.INVOKE_RANDOM_TRAINING + JdChainConstant.SEPARATOR + taskId + JdChainConstant.SEPARATOR + JdChainConstant.FRONT; TypedKVEntry typedKVEntry = jdChainBaseMapper.queryByChaincode(queryKey); if (typedKVEntry != null) { String result = (String) typedKVEntry.getValue(); return parseRandomServer(result); } else { throw new RandomServerException("queryKey:" + queryKey + "server not found!"); } } private static final String IDENTITY = "identity"; private static final String API = "/api/"; private String parseRandomServer(String result) { ModelMap modelMap = JsonUtil.json2Object(result, ModelMap.class); String server = (String) JsonUtil.object2map(modelMap.get(JdChainConstant.SERVER)).get(IDENTITY); // String server = "127.0.0.1:8092"; return Constant.HTTP_PREFIX + server + API; } }
The Steelers head out west this week to take on the Arizona Cardinals after skating past the lowly Jaguars last week at home. Several key players remain out in Casey Hampton and James Harrison. Doug Legursky is also now sidelined in addition with a dislocated toe. The offense needs to keep their foot on the gas this week and the defense just needs to keep up what they have been doing the last few weeks. As I do each and every week, here are my 7 things to watch for this week against the Cardinals as the Steelers look to move to 5-2 on the season and come back home to Pittsburgh for their tough match-ups against the Patriots and the Ravens. At the bottom is my score prediction for the game and I will have my week 7 picks and predictions up for the other games later tonight. Beanie Capped – Cardinals running back Beanie Wells is a very strong north-south runner who has amassed 381 yards rushing in 5 games this season for a 4.8 yards per carry average. The Cardinals look to pound him early and often between the tackles and attack young nose tackle Steve McLendon, who likely will make his first career start Sunday. Wells is not a shifty back, so it is up to the Steelers defensive line and linebackers to stay solid in gap control and wrap him up on first contact. The Cardinals hope to force the Steelers into bringing an extra man down in the box to help defend against the run to open up the deep passing game. The number one key this week, like most weeks, is to shut down the run and force the Cardinals into obvious 3rd and long passing situations. Collapse The Pocket – If the Steelers are able to get the Cardinals in obvious passing situations, the defense must press and collapse the pocket on quarterback Kevin Kolb. He is still not completely settled in with the new offense and has made some very poor decisions when under duress. Making him get rid of the ball early because of pressure is the Steelers best chance at getting a few well needed turnovers this week. The play of the Cardinals tackles has not been great this season and Daryn Colledge has given up quite a few pressures inside at left guard. He and left tackle Levi Brown have combined to give up over 30 pressures through the first 5 games. Attack The Secondary – The Cardinals will be without starting free safety Kerry Rhodes after he broke his foot against the Vikings so 3rd year safety Rashad Johnson now gets thrust into the lineup. The Cardinals have already given up a tremendous amount of explosive plays through the air thus far this season as rookie Patrick Peterson is still trying to adjust to the NFL and the 3-4 defense. A.J. Jefferson has been up and down this year in coverage on the other side and very well could be the one saddled with the job of covering Mike Wallace this week, with help over top from Johnson. This would mean that Hines Ward, Emmanuel Sanders and Antonio Brown should have some opportunities to make some plays this week. Ben Roethlisberger has struggled with accuracy at times on his deeper balls this season and he will likely not have the wind to blame this week. He needs to hit these big plays as they should be there for the taking. Worilds World? – All eyes will be on Jason Worilds this week if he dresses on Sunday. Worilds has missed the last two games due to a quad injury and also missed a golden opportunity to show what he can do full time at right outside linebacker with Harrison sidelined. Lawrence Timmons has played outside instead and has not been very effective at stopping the run or rushing the passer there. Timmons is much better suited inside at his normal spot and Worilds ability to rush the passer could be just what the Steelers need this week. If Worilds is active and does not start, it may tell you what the Steelers think of the progress of the second year, second round draft pick. It is time to see what he has and hopefully he makes a difference. Early On 3rd – While I do not want to downplay how crucial it is to contain Cardinals wide receiver Larry Fitzgerald, Early Doucet could be a bigger player to control on 3rd downs as he leads all receivers this year with 13, 3rd down catches for 214 yards and has moved the chains 10 times. That equates to a 16.5 yards per catch. He will likely draw William Gay in coverage depending on where he lines up. Counter Piking & Running Right – The Steelers get back left guard Chris Kemoeatu this week and his ability to pull and finish on the counter pike play the Steelers love to run might be crucial in this game as the Cardinals have not been good against runs to the right. They are allowing an average gain of 7.29 yards on runs to right guard, 4.29 on right tackle runs and 5.47 yards on right end runs. The Steelers are a heavy right handed run team so Ramon Foster, Marcus Gilbert and both tight ends will be counted on to control that side in the run game. Fried Suishi? – Shaun Suisham has been less than spectacular this season as he is just 6 of 10 on field goal tries through the first 6 weeks. Outside of 30 yards he is 3 of 7 and has not attempted one of 50 yards or more. This has to stop before it cost the Steelers a game at the end and hopefully it will not come down to needing him to drill one for the win this week. If it does, he better nail it or he could be hitting the road soon. Nearly 52% of his kickoffs have been returned this year and he has just 13 touchbacks in 27 kicks. Steelers Cardinals Game Prediction – The Steelers should have steam rolled the Jaguars last week, but the offense took the second half off. The defense still is not totally sound against the run, despite their overall number one defensive ranking and they have only produced 2 turnovers through 6 games. The Cardinals are a team in transition right now and have yet to put together a complete game. The Steelers are not in transition and they too have yet to put together a complete game this year and have had problems on the road. These two teams know each other schematically, so I think it is a wash there. With the Patriots and Ravens on the horizon, this is the game they need to put everything together in. I think they will and a late Cardinals garbage touchdown will make it look closer than it actually was.
Larger right atrium than left atrium is associated with allcause mortality in elderly patients with heart failure While left atrial (LA) enlargement is known as an early sign of left heart disease with prognostic implications in heart failure (HF), the importance of right atrial (RA) enlargement is less well studied, and the prognostic implications of interatrial size comparison are insufficiently understood. The aim of this study was to test the hypothesis that RA area larger than LA area in apical fourchamber view is associated with allcause mortality in elderly patients with HF independent of left ventricular ejection fraction (LVEF).
Labor Productivity In Russian's Regions: Problems And Growth Perspectives This article is devoted to the studing of the problem of labor productivity and opportunities of labor productivity growth in regions of Russia. Methodological issues of measuring labor productivity in regions of Russia are considered. Key problems in statistical measurement of productivity in order to carry out comparative regional researchers are defined. The comparative characteristic of regions on labor productivity level and its dynamics is carried out. The author has implemented classification of regions based on the studing of dynamics of labor productivity for the last six years. It was revealed that there is a decrease in regional differentiation to the labor productivity index. The author identifies the key factors affecting labor productivity in regions. The high role of the mining industry in increasing of labor productivity level is proved. It is determined that parameters characterizing the innovative development of regions do not make a significant contribution in improving of labor productivity. An analysis of connection between the growth rates of labor productivity and the growth rates of actual income level of population has shown that the quality of remuneration system, applied by enterprises, is increasing. The increase in labor productivity is possible with the renewal of fixed assets, Improvement of education level, strengthening of correlation degree between salaries and level of efficiency of enterprise structures. © 2019 Published by Future Academy www.FutureAcademy.org.UK
package freq_params_traffic import "github.com/alibaba/sentinel-golang/core/freq_params_traffic/cache" const ( ConcurrencyMaxCount = 4000 ParamsCapacityBase = 4000 ParamsMaxCapacity = 20000 ) // ParamsMetric cache the frequent(hot spot) parameters for each value. // ParamsMetric is used for pair <resource, TrafficShapingController>. type ParamsMetric struct { // cache's key is the hot value // cache's value is the counter // RuleTimeCounter record the last add token time RuleTimeCounter cache.ConcurrentCounterCache // RuleTokenCounter record the number of token RuleTokenCounter cache.ConcurrentCounterCache // ConcurrencyCounter record the number of goroutine ConcurrencyCounter cache.ConcurrentCounterCache }
1. Field of the Invention The present invention relates to a transmission unit and a two-way signal conversion method. More particularly, the present invention relates to a transmission unit which transports digital signals, and to a method to convert signal formats, transmission rates, hierarchical levels, and other attributes in both directions. 2. Description of the Related Art Synchronous Digital Hierarchy (SDH) and Synchronous Optical Network (SONET) specifications have been standardized and implemented as today's core data multiplexing technologies which provide an efficient way of integrating a wide variety of high-speed and low-speed services. Digital signals that this type of synchronous networks carry are organized in accordance with prescribed hierarchical multiplexing levels, where lower-order group signals (i.e., slower signals) are combined into a higher-order signal (i.e., faster signal). Each network element accepts such higher-order multiplexed signals form the upstream element and retransmits them the downstream element, while extracting and inserting some lower-order signals. Some nodes with cross-connect capabilities provide consolidation and segregation of transport signals. As an approach for improved maintainability of the above transport systems, a signal loopback technique is disclosed in the Japanese Unexamined Patent Publication No. 10-243008 (1998). According to the disclosed technique, the signal in any specified time slot of a low-order group channel is looped back to another low-order group channel. This conventional technique, however, is limited to low-order group signals of the same hierarchical series of multiplexing levels, and it is therefore not possible to loop back a signal from one channel to a different type of channel, such as from a high-order SONET channel to a high-order SDH channel. That is, conventional systems do not support looping back between networks having different signal specifications. For operations and maintenance of various telecommunication services on a global scale, it is necessary to develop an improved transport system which supports efficiently converting, not only between SONET and SDH, but other signals having different specifications to one another.
Electric vehicles need large electric power storage devices. The large electric power storage devices, which are presently being developed, are mainly secondary batteries (also known as storage batteries). These secondary batteries have the great disadvantage of having a short cycle life. In particular, it is generally said that the nominal life of ordinary secondary batteries expires after about 300 to 500 discharges are completed, although the number of the discharges differ, depending of the kind, the rated values, and the depth of discharge. With respect to individual secondary batteries, lead-acid batteries are most often used. The life of lead-acid batteries shortens drastically when the depth of discharge exceeds 60-70%. If nickel-cadmium batteries are recharged before completion of discharge, a significant loss of storage capacity takes place. An electric double layer capacitor uses activated carbon as its electrode material. Activated carbon is electrochemically inactive and has a large specific surface. The activated carbon is combined with an electrolyte to form a large electric double layer capacitance. The electric double layer capacitor has attracted attention as a power source for an electric vehicle (electric vehicles are an alternative to vehicles with internal combustion engines because electric vehicles produce no harmful exhaust gas). Electric double layer capacitors can be charged in shorter times and have longer cycle lives than secondary batteries. A cycle life of more than 10,000 recharges has been estimated from the experiment carried out by the inventors. A secondary battery has the advantage that its working voltage does not drop greatly even after the battery has been discharged. Even if the voltage drops, only a slight voltage drop is observed under unloaded condition. For example, in the case of a lead-acid battery, the terminal voltage in a fully charged state is 2.2 volts. This voltage drops only to about 1.8 volts except during charging and in an immediately subsequent transient state. This means the battery is a constant-voltage device which can be charged with little loss by the use of a voltage regulated source. Methods and apparatus for charging secondary batteries are known. However, techniques for efficiently charging an electric double layer capacitor of large capacitance in a short time have not been yet established. One conceivable method of charging an electric double layer capacitor is to use a voltage regulated source in the same way as in the above-described case of secondary batteries. This is discussed further below. Energy Ec stored in a capacitor having terminals across which a voltage V is developed is given by Ec=CV.sup.2 /2, where C is the capacitance of the capacitor. The voltage V across the terminals drops as the capacitor is discharged. Conversely, if the capacitor is charged under a fully discharged state, the voltage across the terminals gradually rises from zero. That is, the capacitor is not a constant-voltage device. If the capacitor, which is not a constant-voltage device, is charged with a voltage regulated source producing an output voltage V, for example, then the output voltage V is principally applied to a resistive component existing in the charging circuit when a charging operation is started, i.e., when the voltage across the terminals is null. The electric power is wastefully consumed by this resistive component. As the charging progresses and the voltage across the terminals increases, the electric power consumed by the resistive component (other than the capacitor) decreases and becomes zero when the voltage across the terminals reaches V. The total energy Ep supplied from the power supply until the charging operation is completed is equal to the product of the integrated current flowing to the capacitor, or the total charge, and the voltage V. That is, Ep=QV=CV.sup.2. The charging efficiency, which is defined as the ratio of the energy Ec to the energy Ep, is calculated at 50%. Fifty percent of electric power which is not stored in the capacitor is consumed by the resistive component existing in the charging circuit as described above. This means that if the capacitor is charged from a voltage regulated source, the maximum charging efficiency attainable is only 50%. As an example, as shown in FIG. 1, if a dry battery voltage source VS is connected in parallel with an electric double layer capacitor CB having an internal resistance RB and is electrically charged, then only half of the energy released from the dry battery voltage source VS can be stored in the capacitor. It is an object of the present invention to provide a method and apparatus for charging an electric double layer capacitor efficiently and quickly. It is another object of the invention to provide a method for charging an electric double layer capacitor up to its rated level in a short time. It is a further object of the invention to provide a method for charging an electric double layer capacitor efficiently and quickly if the capacitor is charged from a solar battery.
import * as React from 'react'; import { pure } from 'recompose'; import { PlayerForm, PlayerFormValue } from '../../shared/utils/player'; import { ArrowContainer } from './styles'; export interface Props { form: PlayerForm; } export const FormArrow = pure<Props>(({ form }) => { return ( <ArrowContainer className={PlayerFormValue[form]}> <svg viewBox="0 0 530 530" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlnsXlink="http://www.w3.org/1999/xlink" > {/* Generator: Sketch 49.3 (51167) - http://www.bohemiancoding.com/sketch */} <title>form_arrow</title> <desc>Created with Sketch.</desc> <g> <path // tslint:disable-next-line:max-line-length d="M510.098322,260.101745 C508.269106,255.716876 504.000568,252.855554 499.263733,252.855554 L370.773375,252.855554 L370.773375,30.7255941 C370.773375,24.2532684 365.519874,19 359.047262,19 L171.434959,19 C164.962348,19 159.708847,24.2532684 159.708847,30.7255941 L159.708847,252.856654 L30.7260071,252.856654 C25.9891721,252.856654 21.7206341,255.717976 19.8914177,260.07976 C18.0852864,264.464629 19.0702491,269.506843 22.4241789,272.860625 L256.352812,507.552783 C258.556886,509.756759 261.535958,511 264.65464,511 C267.773322,511 270.752395,509.756759 272.956468,507.575867 L507.565561,272.883709 C510.919491,269.528828 511.927539,264.487713 510.098322,260.101745 Z" className="arrow" transform="translate(265.000000, 265.000000) scale(1, -1) translate(-265.000000, -265.000000) " /> </g> </svg> </ArrowContainer> ); });
WASHINGTON – Evangelicals are hopeful that with the retirement of Justice Anthony Kennedy, President Donald Trump's second Supreme Court nominee is not only pro-life but will fight to overturn Roe v. Wade. A conservative justice would give the high court a 5-4 majority in favor of backtracking on abortion rights. Conservative lawyers don't expect justices would go for a sudden and complete reversal of Roe v. Wade, but that they would first revisit other court decisions like Planned Parenthood v. Casey or Whole Women's Health v. Hellerstedt. In Hellerstedt, the case focused on intrusive abortion clinic regulations in Texas, and the court ruled 5-3 that the Lone Star State can't enact restrictions on the delivery of abortion services. The Supreme Court could start with that case and reverse the ruling, which could force abortion clinics out of business. That reversal could establish groundwork to tackle the 1973 Roe v. Wade decision that legalized abortion. The high court could also take a different route and declare that a fetus is a "person," which wouldn't allow states to provide abortions under the 14th Amendment. Congress has already proposed a 20-week ban after conception, but the measure was blocked by the Senate. Lawmakers argued exceptions need to be made in instances where pregnancy puts the life and health of a woman at risk. It's clear overturning Roe v. Wade is not a one-way street and would take maneuvering state laws to ban abortions nationwide.
Body and Anthropology: Symbolic Effectiveness Every human community creates its own representation of its surrounding world and of the men who constitute that world. It sets out in an orderly fashion the raison detre of social and cultural organisation, it ritualises the ties between men and their relationship with their environment. Man creates the world while the world creates man, through a relationship which varies with each society; ethnography shows us innumerable versions. Human
Polybrominated diphenyl ethers perturb axonal growth and actin distribution We investigated toxicological effects of two of the most common polybrominated diphenyl ether (PBDE) flame retardants, BDE-47 and BDE-209, in model PC12 cell line under two environmentally relevant exposure conditions: long term exposure to microgram-per-liter levels of PBDEs and acute exposure to high concentrations of PBDEs. Cells treated under both long term and acute exposure conditions showed significantly perturbed cell growth and differentiation. Importantly, even when the cells were exposed to microgram-per-liter concentration of PBDEs over an extended period, both the fraction of differentiated cells and the axonal growth were affected. The calcium release assay showed that PBDEs perturbed intracellular calcium release in a concentration dependent manner, indicating that intracellular Ca++ homeostasis and signaling was involved in the neurotoxicity. More interestingly, depending on PBDE concentration and exposure conditions, cytoskeleton F-actin distribution was altered. BIBECHANA 16 64-78
<filename>scripts/factorial_recursion.c #include <stdio.h> int fact(int num) { if(num <= 1) { return 1; } return num * fact(num-1); } int main() { int num, result; printf("Insert the number: "); scanf("%d", &num); //result = fact(num); printf("Result = %d", fact(num)); return 0; }
/** * CRUD Runner for testing repositories. */ @ComponentScan("edu.kit.datamanager.metastore") public class CrudRunner implements CommandLineRunner { @Autowired private ArangoOperations operations; @Autowired private MetsDocumentRepository repository; @Autowired private SectionDocumentRepository secRepository; @Autowired private XmlSchemaDefinitionRepository xsdRepository; @Autowired private MetsFileRepository metsFileRepository; @Autowired private MetsPropertiesRepository metsPropertiesRepository; /** * Repository persisting METS identifiers. */ @Autowired private MetsIdentifierRepository metsIdentifierRepository; /** * Repository persisting page metadata.. */ @Autowired private PageMetadataRepository pageMetadataRepository; /** * Repository persisting classification metadata. */ @Autowired private ClassificationMetadataRepository classificationMetadataRepository; /** * Repository persisting genre metadata. */ @Autowired private GenreMetadataRepository genreMetadataRepository; /** * Repository persisting language metadata. */ @Autowired private LanguageMetadataRepository languageMetadataRepository; @Override public void run(final String... args) throws Exception { final String METS_DOCUMENT = "mets"; final String SECTION_DOCUMENT = "sec"; final String METS_FILES = "file"; final String XSD = "xsd"; final String METS_PROPERTIES = "prop"; final String CLASSIFICATION = "class"; final String GENRE = "genre"; final String LANGUAGE = "lang"; final String IDENTIFIER = "ident"; final String PAGE = "page"; final String DROP_DATABASE = "dropArangoDBOnly"; List<String> argumentList = Arrays.asList(args); System.out.println("Run CRUD Runner!"); // first drop the database so that we can run this multiple times with the same dataset try { operations.dropDatabase(); } catch (DataAccessException dae) { System.out.println("This message should be printed only once!"); System.out.println(dae.toString()); } if (argumentList.contains(DROP_DATABASE)) { return; } System.out.println("# CRUD operations"); // save a single entity in the database // there is no need of creating the collection first. This happen automatically System.out.println("********************************************************************************************************************"); System.out.println("******************************* Build Database ***************************************************"); System.out.println("********************************************************************************************************************"); if (argumentList.contains(METS_PROPERTIES)) { System.out.println("******************************* MetsProperties ***************************************************"); for (MetsProperties metsProperty : createMetsProperties()) { metsPropertiesRepository.save(metsProperty); System.out.println("MetsProperty saved! " + metsProperty); } } if (argumentList.contains(METS_DOCUMENT)) { System.out.println("******************************* MetsDocuments ***************************************************"); for (MetsDocument metsDocument : createMetsDocuments()) { repository.save(metsDocument); System.out.println("MetsDocument saved! " + metsDocument); } } if (argumentList.contains(SECTION_DOCUMENT)) { System.out.println("******************************* SectionDocuments ***************************************************"); for (SectionDocument document : createSectionDocuments()) { secRepository.save(document); System.out.println(String.format("secDocument saved in the database with id: '%s' with prefix: '%s' resourceId %s", document.getId(), document.getPrefix(), document.getResourceId())); } } if (argumentList.contains(METS_FILES)) { System.out.println("******************************* MetsFiles ***************************************************"); for (MetsFile metsFile : createMetsFiles()) { metsFileRepository.save(metsFile); System.out.println("MetsFile saved! " + metsFile); } } if (argumentList.contains(XSD)) { System.out.println("******************************* XSD ***************************************************"); for (XmlSchemaDefinition document : createSchemaDefinitions()) { xsdRepository.save(document); System.out.println(String.format("xsdDocument saved in the database with id: '%s' with prefix: '%s' namespace %s", document.getId(), document.getPrefix(), document.getNamespace())); } } if (argumentList.contains(CLASSIFICATION)) { System.out.println("******************************* Classification Metadata ***************************************************"); for (ClassificationMetadata classificationMetadata : createClassifications()) { classificationMetadataRepository.save(classificationMetadata); System.out.println("ClassificationMetadata saved! " + classificationMetadata); } } if (argumentList.contains(GENRE)) { System.out.println("******************************* Genre Metadata ***************************************************"); for (GenreMetadata genreMetadata : createGenre()) { genreMetadataRepository.save(genreMetadata); System.out.println("GenreMetadata saved! " + genreMetadata); } } if (argumentList.contains(LANGUAGE)) { System.out.println("******************************* Language Metadata ***************************************************"); for (LanguageMetadata languageMetadata : createLanguageMetadata()) { languageMetadataRepository.save(languageMetadata); System.out.println("LanguageMetadata saved! " + languageMetadata); } } if (argumentList.contains(IDENTIFIER)) { System.out.println("******************************* Identifier Metadata ***************************************************"); for (MetsIdentifier metsIdentifier : createMetsIdentifier()) { metsIdentifierRepository.save(metsIdentifier); System.out.println("MetsIdentifier saved! " + metsIdentifier); } } if (argumentList.contains(PAGE)) { System.out.println("******************************* Page Metadata ***************************************************"); for (PageMetadata pageMetadata : createPageMetadata()) { pageMetadataRepository.save(pageMetadata); System.out.println("PageMetadata saved! " + pageMetadata); } } System.out.println("********************************************************************************************************************"); System.out.println("************************ START TESTS ************************************************************"); System.out.println("********************************************************************************************************************"); if (argumentList.contains(METS_PROPERTIES)) { System.out.println("********************************************************************************************************************"); System.out.println("******************************* MetsProperties ***************************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for metsPorperties: " + metsPropertiesRepository.count()); System.out.println("metsPropertiesRepository.findByResourceId(\"id_4\")"); Iterable<MetsProperties> findByResourceId = metsPropertiesRepository.findByResourceId("id_4"); for (Iterator<MetsProperties> it = findByResourceId.iterator(); it.hasNext();) { MetsProperties index = it.next(); System.out.println(index.toString()); } System.out.println("metsPropertiesRepository.findResourceIdByPpn(\"ppn2\")"); Iterable<IResourceId> findResourceIdByPpn = metsPropertiesRepository.findResourceIdByPpn("ppn2"); for (Iterator<IResourceId> it = findResourceIdByPpn.iterator(); it.hasNext();) { IResourceId index = it.next(); System.out.println(index.getResourceId()); } System.out.println("metsPropertiesRepository.findResourceIdByPpn(\"ppn1\")"); findResourceIdByPpn = metsPropertiesRepository.findResourceIdByPpn("ppn1"); for (Iterator<IResourceId> it = findResourceIdByPpn.iterator(); it.hasNext();) { IResourceId index = it.next(); System.out.println(index.getResourceId()); } System.out.println("metsPropertiesRepository.findResourceIdByTitle(\"Titel\")"); Iterable<IResourceId> findResourceIdByTitle = metsPropertiesRepository.findResourceIdByTitle("Titel"); for (Iterator<IResourceId> it = findResourceIdByTitle.iterator(); it.hasNext();) { IResourceId index = it.next(); System.out.println(index.getResourceId()); } System.out.println("metsPropertiesRepository.findResourceIdByTitle(\"Titel3\")"); findResourceIdByTitle = metsPropertiesRepository.findResourceIdByTitle("Titel3"); for (Iterator<IResourceId> it = findResourceIdByTitle.iterator(); it.hasNext();) { IResourceId index = it.next(); System.out.println(index.getResourceId()); } } if (argumentList.contains(SECTION_DOCUMENT)) { System.out.println("********************************************************************************************************************"); System.out.println("************************ SectionDocument ************************************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for sectionDocument: " + secRepository.count()); // the generated id from the database is set in the original entity //Thread.sleep(2000); System.out.println("secRepository.findByResourceIdAndPrefix(\"id_0002\", \"dc\")"); Iterable<SectionDocument> findByResourceIdAndPrefix = secRepository.findByResourceIdAndPrefix("id_0002", "dc"); System.out.println(findByResourceIdAndPrefix.iterator().next().getSectionDocument()); } if (argumentList.contains(METS_DOCUMENT)) { System.out.println("********************************************************************************************************************"); System.out.println("*********** METSDOCUMENT METSDOCUMENT METSDOCUMENT METSDOCUMENT METSDOCUMENT ***********************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for metsDocument: " + repository.count()); Iterable<IVersion> version; Iterator<IVersion> versionIterator; System.out.println("********************************************************************************************************************"); System.out.println("************************ Find all current versions of MetsDocuments ************************************"); System.out.println("********************************************************************************************************************"); System.out.println("repository.findByCurrentTrue()"); Iterable<MetsDocument> xmlOnly = repository.findByCurrentTrue(); Iterator<MetsDocument> xmlOnlyIterator = xmlOnly.iterator(); while (xmlOnlyIterator.hasNext()) { System.out.println(xmlOnlyIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find latest MetsDocument by resourceId ********************************************"); System.out.println("********************************************************************************************************************"); System.out.println("repository.findByResourceIdAndCurrentTrue(\"id_0002\")"); MetsDocument metsDocSingle = repository.findByResourceIdAndCurrentTrue("id_0002"); System.out.println(metsDocSingle); System.out.println("repository.findByResourceIdAndCurrentTrue(\"id_0015\")"); metsDocSingle = repository.findByResourceIdAndCurrentTrue("id_0015"); System.out.println(metsDocSingle); System.out.println("********************************************************************************************************************"); System.out.println("************************ Find all versions of MetsDocument by resourceId ************************************"); System.out.println("********************************************************************************************************************"); System.out.println("repository.findVersionByResourceIdOrderByVersionDesc(\"id_0002\")"); version = repository.findVersionByResourceIdOrderByVersionDesc("id_0002"); versionIterator = version.iterator(); Integer oldVersion = -1; while (versionIterator.hasNext()) { IVersion version2 = versionIterator.next(); System.out.println(version2.getVersion()); System.out.println(version2); oldVersion = version2.getVersion(); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsDocument by resourceId and version ***************************************"); System.out.println("********************************************************************************************************************"); System.out.println("repository.findByResourceIdAndVersion(\"id_0002\", oldVersion);"); metsDocSingle = repository.findByResourceIdAndVersion("id_0002", oldVersion); System.out.println(metsDocSingle); } if (argumentList.contains(METS_FILES)) { System.out.println("********************************************************************************************************************"); System.out.println("*********** METSFILE METSFILE METSFILE METSFILE METSFILE METSFILE METSFILE *****************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for metsFile: " + metsFileRepository.count()); Iterable<MetsFile> metsFile; Iterator<MetsFile> metsFileIterator; Iterable<IUrl> urlOfMetsFile; Iterator<IUrl> urlIterator; System.out.println("********************************************************************************************************************"); System.out.println("************************ Find all METSFiles *********************"); System.out.println("********************************************************************************************************************"); metsFile = metsFileRepository.findAll(); metsFileIterator = metsFile.iterator(); while (metsFileIterator.hasNext()) { System.out.println(metsFileIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find one MetsFile by resourceId and fileID (highest version) *********************"); System.out.println("********************************************************************************************************************"); MetsFile metsFile1 = metsFileRepository.findTop1DistinctByResourceIdAndFileIdOrderByVersionDesc("id_0002", "PAGE-0001_IMG_BIN"); System.out.println(metsFile1); System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile by resourceId and USE *********************************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findByResourceIdAndUse(\"id_0002\", \"OCR-D-GT-IMG-BIN\")"); metsFile = metsFileRepository.findByResourceIdAndUseAndCurrentTrue("id_0002", "OCR-D-GT-IMG-BIN"); metsFileIterator = metsFile.iterator(); while (metsFileIterator.hasNext()) { System.out.println(metsFileIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile by resourceId and USE IN ******************************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findByResourceIdAndUseIn(\"id_0002\", Arrays.asList(\"OCR-D-GT-IMG-BIN\", \"OCR-D-GT-IMG-CROP\")"); metsFile = metsFileRepository.findByResourceIdAndUseInAndCurrentTrue("id_0002", Arrays.asList("OCR-D-GT-IMG-BIN", "OCR-D-GT-IMG-CROP")); metsFileIterator = metsFile.iterator(); while (metsFileIterator.hasNext()) { System.out.println(metsFileIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile URL by resourceId and USE *****************************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findUrlByResourceIdAndUse(\"id_0002\", \"OCR-D-GT-IMG-BIN\")"); urlOfMetsFile = metsFileRepository.findUrlByResourceIdAndUseAndCurrentTrue("id_0002", "OCR-D-GT-IMG-BIN"); urlIterator = urlOfMetsFile.iterator(); while (urlIterator.hasNext()) { System.out.println(urlIterator.next().getUrl()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile URL by resourceId and USE IN **************************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findUrlByResourceIdAndUseIn(\"id_0002\", Arrays.asList(\"OCR-D-GT-IMG-BIN\", \"OCR-D-GT-IMG-CROP\")"); urlOfMetsFile = metsFileRepository.findUrlByResourceIdAndUseInAndCurrentTrue("id_0002", Arrays.asList("OCR-D-GT-IMG-BIN", "OCR-D-GT-IMG-CROP")); urlIterator = urlOfMetsFile.iterator(); while (urlIterator.hasNext()) { System.out.println(urlIterator.next().getUrl()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile by resourceId and PAGEID ******************************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findByResourceIdAndPageId(\"id_0002\", \"PAGE-0001\")"); metsFile = metsFileRepository.findByResourceIdAndPageIdAndCurrentTrue("id_0002", "PAGE-0001"); metsFileIterator = metsFile.iterator(); while (metsFileIterator.hasNext()) { System.out.println(metsFileIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile by resourceId and PAGEID IN **************************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findByResourceIdAndPageIdIn(\"id_0002\", Arrays.asList(\"PAGE-0001\", \"PAGE-0002\")"); metsFile = metsFileRepository.findByResourceIdAndPageIdInAndCurrentTrue("id_0002", Arrays.asList("PAGE-0001", "PAGE-0002")); metsFileIterator = metsFile.iterator(); while (metsFileIterator.hasNext()) { System.out.println(metsFileIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile URL by resourceId and PAGEID *************************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findUrlByResourceIdAndPageId(\"id_0002\", \"PAGE-0001\")"); urlOfMetsFile = metsFileRepository.findUrlByResourceIdAndPageIdAndCurrentTrue("id_0002", "PAGE-0001"); urlIterator = urlOfMetsFile.iterator(); while (urlIterator.hasNext()) { System.out.println(urlIterator.next().getUrl()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile URL by resourceId and PAGEID IN **********************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findUrlByResourceIdAndPageIdIn(\"id_0002\", Arrays.asList(\"PAGE-0001\", \"PAGE-0002\")"); urlOfMetsFile = metsFileRepository.findUrlByResourceIdAndPageIdInAndCurrentTrue("id_0002", Arrays.asList("PAGE-0001", "PAGE-0002")); urlIterator = urlOfMetsFile.iterator(); while (urlIterator.hasNext()) { System.out.println(urlIterator.next().getUrl()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile by resourceId and USE AND PAGEID **********************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findByResourceIdAndUseAndPageId(\"id_0002\", \"OCR-D-GT-IMG-BIN\", \"PAGE-0001\")"); metsFile = metsFileRepository.findByResourceIdAndUseAndPageIdAndCurrentTrue("id_0002", "OCR-D-GT-IMG-BIN", "PAGE-0001"); metsFileIterator = metsFile.iterator(); while (metsFileIterator.hasNext()) { System.out.println(metsFileIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile by resourceId and USE AND PAGEID IN ******************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findByResourceIdAndUseInAndPageIdIn(\"id_0002\", Arrays.asList(\"OCR-D-GT-IMG-BIN\", \"OCR-D-GT-IMG-DESPEC\"), Arrays.asList(\"PAGE-0001\", \"PAGE-0002\")"); metsFile = metsFileRepository.findByResourceIdAndUseInAndPageIdInAndCurrentTrue("id_0002", Arrays.asList("OCR-D-GT-IMG-BIN", "OCR-D-GT-IMG-DESPEC"), Arrays.asList("PAGE-0001", "PAGE-0002")); metsFileIterator = metsFile.iterator(); while (metsFileIterator.hasNext()) { System.out.println(metsFileIterator.next()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile URL by resourceId USE AND and PAGEID *****************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findUrlByResourceIdAndUseAndPageId(\"id_0002\", \"OCR-D-GT-IMG-BIN\", \"PAGE-0001\")"); urlOfMetsFile = metsFileRepository.findUrlByResourceIdAndUseAndPageIdAndCurrentTrue("id_0002", "OCR-D-GT-IMG-BIN", "PAGE-0001"); urlIterator = urlOfMetsFile.iterator(); while (urlIterator.hasNext()) { System.out.println(urlIterator.next().getUrl()); } System.out.println("********************************************************************************************************************"); System.out.println("************************ Find MetsFile URL by resourceId USE AND and PAGEID IN **************************"); System.out.println("********************************************************************************************************************"); System.out.println("metsFileRepository.findUrlByResourceIdAndUseInAndPageIdIn(\"id_0002\", Arrays.asList(\"OCR-D-GT-IMG-BIN\", \"OCR-D-GT-IMG-DESPEC\"), Arrays.asList(\"PAGE-0001\", \"PAGE-0002\")"); urlOfMetsFile = metsFileRepository.findUrlByResourceIdAndUseInAndPageIdInAndCurrentTrue("id_0002", Arrays.asList("OCR-D-GT-IMG-BIN", "OCR-D-GT-IMG-DESPEC"), Arrays.asList("PAGE-0001", "PAGE-0002")); urlIterator = urlOfMetsFile.iterator(); while (urlIterator.hasNext()) { System.out.println(urlIterator.next().getUrl()); } } if (argumentList.contains(CLASSIFICATION)) { System.out.println("********************************************************************************************************************"); System.out.println("******************************* Classification Metadata ***************************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for classification Metadata: " + classificationMetadataRepository.count()); System.out.println("classificationMetadataRepository.findByClassification(\"Geburtstag\")"); Iterable<ClassificationMetadata> classificationList = classificationMetadataRepository.findByClassification("Geburtstag"); Iterator<ClassificationMetadata> iteratorClassification = classificationList.iterator(); while (iteratorClassification.hasNext()) { System.out.println(iteratorClassification.next().toString()); } System.out.println("classificationMetadataRepository.findByResourceId(\"id_0015\")"); classificationList = classificationMetadataRepository.findByResourceId("id_0015"); iteratorClassification = classificationList.iterator(); while (iteratorClassification.hasNext()) { System.out.println(iteratorClassification.next().toString()); } } if (argumentList.contains(GENRE)) { System.out.println("********************************************************************************************************************"); System.out.println("******************************* Genre Metadata ***************************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for genre Metadata: " + genreMetadataRepository.count()); System.out.println("genreMetadataRepository.findByGenre(\"Horror\")"); Iterable<GenreMetadata> genreList = genreMetadataRepository.findByGenre("Horror"); Iterator<GenreMetadata> iteratorGenre = genreList.iterator(); while (iteratorGenre.hasNext()) { System.out.println(iteratorGenre.next().toString()); } System.out.println("genreMetadataRepository.findByResourceId(\"id_0018\")"); genreList = genreMetadataRepository.findByResourceId("id_0018"); iteratorGenre = genreList.iterator(); while (iteratorGenre.hasNext()) { System.out.println(iteratorGenre.next().toString()); } } if (argumentList.contains(LANGUAGE)) { System.out.println("********************************************************************************************************************"); System.out.println("******************************* Language Metadata ***************************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for language Metadata: " + languageMetadataRepository.count()); System.out.println("languageMetadataRepository.findByLanguage(\"en\")"); Iterable<LanguageMetadata> languageList = languageMetadataRepository.findByLanguage("en"); Iterator<LanguageMetadata> iteratorLanguage = languageList.iterator(); while (iteratorLanguage.hasNext()) { System.out.println(iteratorLanguage.next().toString()); } System.out.println("languageMetadataRepository.findByResourceId(\"id_0016\")"); languageList = languageMetadataRepository.findByResourceId("id_0016"); iteratorLanguage = languageList.iterator(); while (iteratorLanguage.hasNext()) { System.out.println(iteratorLanguage.next().toString()); } } if (argumentList.contains(IDENTIFIER)) { System.out.println("********************************************************************************************************************"); System.out.println("******************************* Identifier Metadata ***************************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for identifier Metadata: " + metsIdentifierRepository.count()); System.out.println("metsIdentifierRepository.findByLanguage(\"url1\")"); Iterable<MetsIdentifier> metsIdentifierList = metsIdentifierRepository.findByIdentifier("url1"); Iterator<MetsIdentifier> metsIdentifierIterator = metsIdentifierList.iterator(); while (metsIdentifierIterator.hasNext()) { System.out.println(metsIdentifierIterator.next().toString()); } System.out.println("metsIdentifierRepository.findByResourceId(\"id_0002\")"); metsIdentifierList = metsIdentifierRepository.findByResourceId("id_0002"); metsIdentifierIterator = metsIdentifierList.iterator(); while (metsIdentifierIterator.hasNext()) { System.out.println(metsIdentifierIterator.next().toString()); } System.out.println("metsIdentifierRepository.findByResourceIdAndType(\"id_0017\", \"handle\")"); metsIdentifierList = metsIdentifierRepository.findByResourceIdAndType("id_0017", "handle"); metsIdentifierIterator = metsIdentifierList.iterator(); while (metsIdentifierIterator.hasNext()) { System.out.println(metsIdentifierIterator.next().toString()); } System.out.println("metsIdentifierRepository.findByIdentifierAndType(\"id_0017\", \"handle\")"); metsIdentifierList = metsIdentifierRepository.findByIdentifierAndType("url3", "url"); metsIdentifierIterator = metsIdentifierList.iterator(); while (metsIdentifierIterator.hasNext()) { System.out.println(metsIdentifierIterator.next().toString()); } } if (argumentList.contains(PAGE)) { System.out.println("********************************************************************************************************************"); System.out.println("******************************* Page Metadata ***************************************************"); System.out.println("********************************************************************************************************************"); System.out.println("No of entities for page Metadata: " + pageMetadataRepository.count()); System.out.println("pageMetadataRepository.findByFeature(\"" + GroundTruthProperties.ADMINS.toString() + "\")"); Iterable<PageMetadata> pageList = pageMetadataRepository.findByFeature(GroundTruthProperties.ADMINS); Iterator<PageMetadata> iteratorPage = pageList.iterator(); while (iteratorPage.hasNext()) { System.out.println(iteratorPage.next().toString()); } System.out.println("pageMetadataRepository.findByResourceId(\"id_0002\")"); pageList = pageMetadataRepository.findByResourceId("id_0002"); iteratorPage = pageList.iterator(); while (iteratorPage.hasNext()) { System.out.println(iteratorPage.next().toString()); } System.out.println("pageMetadataRepository.findByResourceId(\"id_0002\")"); pageList = pageMetadataRepository.findByResourceIdAndPageId("id_0002", "phys_0001"); iteratorPage = pageList.iterator(); while (iteratorPage.hasNext()) { System.out.println(iteratorPage.next().toString()); } } // // // lets take a look whether we can find Ned Stark in the database // final Character foundNed = repository.findById(nedStark.getId()).get(); // System.out.println(String.format("Found %s", foundNed)); // // // as everyone probably knows Ned Stark died in the first season. // // So we have to update his 'alive' flag // nedStark.setAlive(false); // repository.save(nedStark); // final Character deadNed = repository.findById(nedStark.getId()).get(); // System.out.println(String.format("Ned Stark after 'alive' flag was updated: %s", deadNed)); // // // lets save some additional characters // final Collection<Character> createCharacters = createCharacters(); // System.out.println(String.format("Save %s additional characters", createCharacters.size())); // repository.saveAll(createCharacters); // // final Iterable<Character> all = repository.findAll(); // final long count = StreamSupport.stream(Spliterators.spliteratorUnknownSize(all.iterator(), 0), false).count(); // System.out.println(String.format("A total of %s characters are persisted in the database", count)); // // System.out.println("## Return all characters sorted by name"); // final Iterable<Character> allSorted = repository.findAll(new Sort(Sort.Direction.ASC, "name")); // allSorted.forEach(System.out::println); // // System.out.println("## Return the first 5 characters sorted by name"); // final Page<Character> first5Sorted = repository // .findAll(PageRequest.of(0, 5, new Sort(Sort.Direction.ASC, "name"))); // first5Sorted.forEach(System.out::println); } public static void setupMetsProperties() { } public static Collection<MetsProperties> createMetsProperties() { MetsProperties metsProperties1 = new MetsProperties(); metsProperties1.setTitle("Titel"); metsProperties1.setPpn("ppn1"); metsProperties1.setResourceId("id_1"); MetsProperties metsProperties2 = new MetsProperties(); metsProperties2.setTitle("Titel2"); metsProperties2.setPpn("ppn2"); metsProperties2.setResourceId("id_2"); MetsProperties metsProperties3 = new MetsProperties(); metsProperties3.setTitle("Titel"); metsProperties3.setPpn("ppn3"); metsProperties3.setResourceId("id_3"); MetsProperties metsProperties4 = new MetsProperties(); metsProperties4.setTitle("Titel3"); metsProperties4.setPpn("ppn1"); metsProperties4.setResourceId("id_4"); return Arrays.asList(metsProperties1, metsProperties2, metsProperties3, metsProperties4); } public static Collection<MetsDocument> createMetsDocuments() { MetsDocument eins = new MetsDocument("id_0002", "Am staerksten"); MetsDocument zwei = eins.updateMetsContent("noch staerker"); MetsDocument drei = zwei.updateMetsContent("staerker"); MetsDocument vier = drei.updateMetsContent("stark"); return Arrays.asList( eins, zwei, drei, vier, new MetsDocument("id_0001", "someXML"), new MetsDocument("id_0003me", "Lannister"), new MetsDocument("id_0004sei", "Lannister"), new MetsDocument("id_0005ah", "Mormont"), new MetsDocument("id_0006e6rys", "Targaryen"), new MetsDocument("id_0007sa", "Stark"), new MetsDocument("id_0008b", "Stark"), new MetsDocument("id_0009n", "Stark"), new MetsDocument("id_0010dor", "Clegane"), new MetsDocument("id_0001l", "Drogo"), new MetsDocument("id_0012os", "Seaworth"), new MetsDocument("id_0013nnis", "Baratheon"), new MetsDocument("id_00140gaery", "Tyrell"), new MetsDocument("id_0015", "XML 00015"), new MetsDocument("id_0016", "Maegyr"), new MetsDocument("id_0017", "xml 0017"), new MetsDocument("id_0018", "xml00018"), new MetsDocument("id_0019", "Bolton"), new MetsDocument("id_0020", "Naharis"), new MetsDocument("id_0021", "Baratheon"), new MetsDocument("id_0022", "Bolton")); } public static Collection<SectionDocument> createSectionDocuments() { return Arrays.asList( new SectionDocument("id_0002", 1, "bmd", "secId", MdType.OTHER, null, "someBMDContent"), new SectionDocument("id_0002", 1, "tei", "secId", MdType.OTHER, null, "someTeiContent"), new SectionDocument("id_0002", 1, "dc", "secId", MdType.OTHER, null, "someDCContent"), new SectionDocument("id_0002", 1, "file", "secId", MdType.OTHER, null, "someFileContent"), new SectionDocument("id_0002", 1, "gt_ocr-d", "GT_OCR-D", MdType.OTHER, null, "<ocrd><groundTruth><language>deutsch</language></groundTruth></ocrd>someFileContent")); // new SectionDocument("id_0003me", "Lannister"), // new SectionDocument("id_0004sei", "Lannister"), // new SectionDocument("id_0005ah", "Mormont"), // new SectionDocument("id_0006e6rys", "Targaryen"), // new SectionDocument("id_0007sa", "Stark"), // new SectionDocument("id_0008b", "Stark"), // new SectionDocument("id_0009n", "Stark"), // new SectionDocument("id_0010dor", "Clegane"), // new SectionDocument("id_0001l", "Drogo"), // new SectionDocument("id_0012os", "Seaworth"), // new SectionDocument("id_0013nnis", "Baratheon"), // new SectionDocument("id_00140gaery", "Tyrell"), // new SectionDocument("id_0015", "XML 00015"), // new SectionDocument("id_0016", "Maegyr"), // new SectionDocument("id_0017", "xml 0017"), // new SectionDocument("id_0018", "xml00018"), // new SectionDocument("id_0019", "Bolton"), // new SectionDocument("id_0020", "Naharis"), // new SectionDocument("id_0021", "Baratheon"), // new SectionDocument("id_0022", "Bolton")); } public static Collection<XmlSchemaDefinition> createSchemaDefinitions() { return Arrays.asList( new XmlSchemaDefinition("bmd", "namespace1", "someBMDContent"), new XmlSchemaDefinition("tei", "namespace2", "someTeiContent"), new XmlSchemaDefinition("dc", "namespace3", "someDCContent"), new XmlSchemaDefinition("file", "namespace4", "someFileContent")); } public static Collection<MetsFile> createMetsFiles() { return Arrays.asList( new MetsFile("id_0002", 1, "PAGE-0001_IMG_BIN", "image/png", "PAGE-0001", "OCR-D-GT-IMG-BIN", "url1"), new MetsFile("id_0002", 1, "PAGE-0001_IMG-CROP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-CROP", "url2"), new MetsFile("id_0002", 1, "PAGE-0001_IMG-DESPEC", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DESPEC", "url3"), new MetsFile("id_0002", 1, "PAGE-0001_IMG-DEWARP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DEWARP", "url4"), new MetsFile("id_0002", 2, "PAGE-0001_IMG_BIN", "image/png", "PAGE-0001", "OCR-D-GT-IMG-BIN", "url1_v2"), new MetsFile("id_0002", 2, "PAGE-0001_IMG-CROP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-CROP", "url2_v2"), new MetsFile("id_0002", 2, "PAGE-0001_IMG-DESPEC", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DESPEC", "url3_v2"), new MetsFile("id_0002", 2, "PAGE-0001_IMG-DEWARP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DEWARP", "url4_v2"), new MetsFile("id_0015", 1, "PAGE-0001_IMG_BIN", "image/png", "PAGE-0001", "OCR-D-GT-IMG-BIN", "url11"), new MetsFile("id_0015", 1, "PAGE-0001_IMG-CROP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-CROP", "url21"), new MetsFile("id_0015", 1, "PAGE-0001_IMG-DESPEC", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DESPEC", "url31"), new MetsFile("id_0015", 1, "PAGE-0001_IMG-DEWARP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DEWARP", "url41"), new MetsFile("id_0016", 1, "PAGE-0001_IMG_BIN", "image/png", "PAGE-0001", "OCR-D-GT-IMG-BIN", "url16"), new MetsFile("id_0017", 1, "PAGE-0001_IMG-CROP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-CROP", "url17"), new MetsFile("id_0018", 1, "PAGE-0001_IMG-DESPEC", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DESPEC", "url18"), new MetsFile("id_0019", 1, "PAGE-0001_IMG-DEWARP", "image/png", "PAGE-0001", "OCR-D-GT-IMG-DEWARP", "url19"), new MetsFile("id_0002", 1, "PAGE-0002_IMG_BIN", "image/png", "PAGE-0002", "OCR-D-GT-IMG-BIN", "url2_1"), new MetsFile("id_0002", 1, "PAGE-0002_IMG-CROP", "image/png", "PAGE-0002", "OCR-D-GT-IMG-CROP", "url2_2"), new MetsFile("id_0002", 1, "PAGE-0002_IMG-DESPEC", "image/png", "PAGE-0002", "OCR-D-GT-IMG-DESPEC", "url2_3"), new MetsFile("id_0002", 1, "PAGE-0002_IMG-DEWARP", "image/png", "PAGE-0002", "OCR-D-GT-IMG-DEWARP", "url2_4"), new MetsFile("id_0015", 1, "PAGE-0002_IMG_BIN", "image/png", "PAGE-0002", "OCR-D-GT-IMG-BIN", "url2_11"), new MetsFile("id_0015", 1, "PAGE-0002_IMG-CROP", "image/png", "PAGE-0002", "OCR-D-GT-IMG-CROP", "url2_21"), new MetsFile("id_0015", 1, "PAGE-0002_IMG-DESPEC", "image/png", "PAGE-0002", "OCR-D-GT-IMG-DESPEC", "url2_31"), new MetsFile("id_0015", 1, "PAGE-0002_IMG-DEWARP", "image/png", "PAGE-0002", "OCR-D-GT-IMG-DEWARP", "url2_41"), new MetsFile("id_0016", 1, "PAGE-0002_IMG_BIN", "image/png", "PAGE-0002", "OCR-D-GT-IMG-BIN", "url2_16"), new MetsFile("id_0017", 1, "PAGE-0002_IMG-CROP", "image/png", "PAGE-0002", "OCR-D-GT-IMG-CROP", "url2_17"), new MetsFile("id_0018", 1, "PAGE-0002_IMG-DESPEC", "image/png", "PAGE-0002", "OCR-D-GT-IMG-DESPEC", "url2_18"), new MetsFile("id_0019", 1, "PAGE-0002_IMG-DEWARP", "image/png", "PAGE-0002", "OCR-D-GT-IMG-DEWARP", "url2_19")); } public static Collection<ClassificationMetadata> createClassifications() { return Arrays.asList( new ClassificationMetadata("id_0002", "Geburtstag"), new ClassificationMetadata("id_0002", "Jahrestag"), new ClassificationMetadata("id_0015", "Tagebuch"), new ClassificationMetadata("id_0015", "Geburtstag"), new ClassificationMetadata("id_0015", "Krimi"), new ClassificationMetadata("id_0015", "url41"), new ClassificationMetadata("id_0016", "Novelle"), new ClassificationMetadata("id_0017", "Gedicht"), new ClassificationMetadata("id_0018", "Geburtstag"), new ClassificationMetadata("id_0019", "Gedicht")); } public static Collection<GenreMetadata> createGenre() { return Arrays.asList( new GenreMetadata("id_0002", "Drama"), new GenreMetadata("id_0002", "Musical"), new GenreMetadata("id_0015", "Western"), new GenreMetadata("id_0015", "Drama"), new GenreMetadata("id_0015", "Horror"), new GenreMetadata("id_0015", "Action"), new GenreMetadata("id_0016", "Thriller"), new GenreMetadata("id_0017", "Krimi"), new GenreMetadata("id_0018", "Drama"), new GenreMetadata("id_0019", "Krimi")); } public static Collection<LanguageMetadata> createLanguageMetadata() { return Arrays.asList( new LanguageMetadata("id_0002", "en"), new LanguageMetadata("id_0002", "deu"), new LanguageMetadata("id_0015", "deu"), new LanguageMetadata("id_0016", "deu"), new LanguageMetadata("id_0017", "deu"), new LanguageMetadata("id_0017", "en"), new LanguageMetadata("id_0018", "en"), new LanguageMetadata("id_0019", "en")); } public static Collection<MetsIdentifier> createMetsIdentifier() { return Arrays.asList( new MetsIdentifier("id_0002", "purl", "purl1"), new MetsIdentifier("id_0002", "url", "url1"), new MetsIdentifier("id_0015", "urn", "urn1"), new MetsIdentifier("id_0016", "handle", "handle1"), new MetsIdentifier("id_0017", "url", "url2"), new MetsIdentifier("id_0017", "handle", "handle2"), new MetsIdentifier("id_0018", "urn", "urn2"), new MetsIdentifier("id_0019", "url", "url3")); } public static Collection<PageMetadata> createPageMetadata() { return Arrays.asList( new PageMetadata("id_0002", 1, "phys_0001", GroundTruthProperties.ACQUISITION), new PageMetadata("id_0002", 1, "phys_0001", GroundTruthProperties.ADMINS), new PageMetadata("id_0002", 2, "phys_0002", GroundTruthProperties.ADMINS), new PageMetadata("id_0015", 1, "phys_0001", GroundTruthProperties.ANDROID), new PageMetadata("id_0016", 1, "phys_0001", GroundTruthProperties.TOC), new PageMetadata("id_0017", 1, "phys_0001", GroundTruthProperties.FAX), new PageMetadata("id_0017", 2, "phys_0002", GroundTruthProperties.DIA), new PageMetadata("id_0018", 1, "phys_0001", GroundTruthProperties.ADMINS), new PageMetadata("id_0019", 1, "phys_0001", GroundTruthProperties.LATIN)); } public static Collection<TextRegion> createTextRegion() { TextRegion text1 = new TextRegion(); text1.setResourceId("id_0002"); text1.setRegion("region1"); text1.setPageUrl("OCR-D-IMG_0001"); text1.setImageUrl("OCR-D-IMG_0001"); text1.setOrder(1); text1.setText("This is a really long text from a very large page. Unfortunately it " + "repeats the same text several times."); text1.setVersion(1); text1.setConfidence(1.0f); TextRegion text2 = new TextRegion(); text2.setResourceId("id_0002"); text2.setRegion("region2"); text2.setPageUrl("OCR-D-IMG_0002"); text2.setImageUrl("OCR-D-IMG_0002"); text2.setOrder(2); text2.setText("On page 2 there is a shorter text. Unfortunately it " + "repeats the same text several times."); text2.setVersion(1); text2.setConfidence(0.6f); TextRegion text3 = new TextRegion(); text3.setResourceId("id_0002"); text3.setRegion("region3"); text3.setPageUrl("OCR-D-IMG_0003"); text3.setImageUrl("OCR-D-IMG_0003"); text3.setOrder(3); text3.setText("Dito on page 3."); text3.setVersion(1); text3.setConfidence(0.8f); TextRegion text4 = new TextRegion(); text4.setResourceId("id_0002"); text4.setRegion("region4"); text4.setPageUrl("OCR-D-IMG_0004"); text4.setImageUrl("OCR-D-IMG_0004"); text4.setOrder(4); text4.setText("This is a really long text from a very large page. Unfortunately it " + "repeats the same text several times." + "This is a really long text from a very large page. Unfortunately it " + "repeats the same text several times." + "This is a really long text from a very large page. Unfortunately it " + "repeats the same text several times."); text4.setVersion(1); text4.setConfidence(0.5f); TextRegion text5 = new TextRegion(); text5.setResourceId("id_0003"); text5.setRegion("region5"); text5.setPageUrl("OCR-D-IMG_0005"); text5.setImageUrl("OCR-D-IMG_0005"); text5.setOrder(5); text5.setText("This is the last page."); text5.setVersion(1); text5.setConfidence(0.9f); return Arrays.asList( text2, text3, text5, text1, text4); } public static Collection<ProvenanceMetadata> createProvenanceMetadata() { ProvenanceMetadata pmd1 = new ProvenanceMetadata(); pmd1.setResourceId("id_0002"); pmd1.setDurationProcessor(20l); pmd1.setDurationWorkflow(100l); pmd1.setInputFileGrps("group1"); pmd1.setOutputFileGrps("group2"); pmd1.setParameterFile("Content of parameter file."); pmd1.setProcessorLabel("ocrd-tesserocr-binarize"); pmd1.setStartProcessor(new Date(987654321l)); pmd1.setStartWorkflow(new Date(987654311l)); pmd1.setWorkflowId("any workflow ID"); ProvenanceMetadata pmd2 = new ProvenanceMetadata(); pmd2.setResourceId("id_0002"); pmd2.setDurationProcessor(12l); pmd2.setDurationWorkflow(100l); pmd2.setInputFileGrps("group2"); pmd2.setOutputFileGrps("group3, group4"); pmd2.setParameterFile("Content of parameter file."); pmd2.setProcessorLabel("ocrd-tesserocr-segment-line"); pmd2.setStartProcessor(new Date(987654341l)); pmd2.setStartWorkflow(new Date(987654311l)); pmd2.setWorkflowId("any workflow ID"); ProvenanceMetadata pmd3 = new ProvenanceMetadata(); pmd3.setResourceId("id_0002"); pmd3.setDurationProcessor(13l); pmd3.setDurationWorkflow(100l); pmd3.setInputFileGrps("group3"); pmd3.setOutputFileGrps("group5"); pmd3.setProcessorLabel("ocrd-tesserocr-recognize"); pmd3.setStartProcessor(new Date(987654354l)); pmd3.setStartWorkflow(new Date(987654311l)); pmd3.setWorkflowId("any workflow ID"); ProvenanceMetadata pmd4 = new ProvenanceMetadata(); pmd4.setResourceId("id_0002"); pmd4.setDurationProcessor(14l); pmd4.setDurationWorkflow(100l); pmd4.setInputFileGrps("group1, group2"); pmd4.setOutputFileGrps("group3"); pmd4.setParameterFile("Content of parameter file."); pmd4.setProcessorLabel("ocrd-calamari-recognize"); pmd4.setStartProcessor(new Date(987654400l)); pmd4.setStartWorkflow(new Date(987654390l)); pmd4.setWorkflowId("another workflow ID"); ProvenanceMetadata pmd5 = new ProvenanceMetadata(); pmd5.setResourceId("id_0002"); pmd5.setDurationProcessor(15l); pmd5.setDurationWorkflow(100l); pmd5.setInputFileGrps("group3"); pmd5.setOutputFileGrps("group4"); pmd5.setParameterFile("Content of parameter file."); pmd5.setProcessorLabel("ocrd-evaluation"); pmd5.setStartProcessor(new Date(987654414l)); pmd5.setStartWorkflow(new Date(987654390l)); pmd5.setWorkflowId("another workflow ID"); ProvenanceMetadata pmd6 = new ProvenanceMetadata(); pmd6.setResourceId("id_0003"); pmd6.setDurationProcessor(16l); pmd6.setDurationWorkflow(150l); pmd6.setInputFileGrps("group1"); pmd6.setOutputFileGrps("group2"); pmd6.setProcessorLabel("ocrd-tesserocr-binarize"); pmd6.setStartProcessor(new Date(987654414l)); pmd6.setStartWorkflow(new Date(987654390l)); pmd6.setWorkflowId("any workflow ID"); return Arrays.asList( pmd2, pmd3, pmd6, pmd1, pmd4, pmd5); } public static Collection<ZippedBagit> createZippedBagits() { Collection<ZippedBagit> collection = null; try { ZippedBagit bag1 = new ZippedBagit("resource_0001", "id_0002", "url1"); Thread.sleep(20); ZippedBagit bag2 = bag1.updateZippedBagit("resource_0002", "url2"); Thread.sleep(20); ZippedBagit bag3 = bag2.updateZippedBagit("resource_0003", "url3"); Thread.sleep(20); ZippedBagit bag4 = new ZippedBagit("resource_0004", "id_0015", "url4"); Thread.sleep(20); ZippedBagit bag5 = new ZippedBagit("resource_0005", "id_0016", "url5"); Thread.sleep(20); ZippedBagit bag6 = new ZippedBagit("resource_0006", "id_0017", "url6"); Thread.sleep(20); ZippedBagit bag7 = bag6.updateZippedBagit("resource_0007", "url7"); Thread.sleep(20); ZippedBagit bag8 = new ZippedBagit("resource_0008", "id_0018", "url8"); Thread.sleep(20); ZippedBagit bag9 = new ZippedBagit("resource_0009", "id_0019", "url9"); Thread.sleep(20); ZippedBagit bag10 = bag3.updateZippedBagit("resource_0010", "url10"); bag10.setVersion(15); collection = Arrays.asList(bag1, bag2, bag3, bag4, bag5, bag6, bag7, bag8, bag9, bag10); } catch (InterruptedException ex) { Logger.getLogger(CrudRunner.class.getName()).log(Level.SEVERE, null, ex); } return collection; } }
Changes in the Position of Epidural Catheters Associated with Patient Movement Background: Epidural catheter movement has been noted with change of patient position and can result in inadequate anesthesia. This study was designed to measure movement and to develop a technique that minimizes catheter displacement. Methods: In 255 parturients requesting epidural anesthesia for labor or cesarean section, a multiorificed lumbar epidural catheter was inserted with the patient in the sitting flexed position. The distance to the epidural space, length of catheter inserted, and amount of catheter position change as the patient moved from the sitting flexed to sitting upright and then to the lateral decubitus position were measured before the catheter was secured to the skin. Adequacy of analgesia, the need for catheter manipulation, and whether the patient was considered obese were noted. Data were grouped according to body mass index (BMI): < 25, 2530, and > 30 kg/m2. Results: The groups did not differ with respect to the length of catheter initially inserted or changes in catheter position between initial taping and removal. The distance to the epidural space differed significantly among the groups, increasing with greater BMI. Catheters frequently appeared to be drawn inward with position change from the sitting flexed to lateral decubitus position, with the greatest change seen in patients with BMI > 30. Only nine catheters were associated with inadequate analgesia, four of which were replaced. No analgesic failures occurred in the BMI > 30 group. In patients judged by the anesthesiologist to be obese or to have an obese back, BMI was greater, and the distance to the epidural space and the magnitude of catheter movement with position change were greater than in those who were not obese. Conclusions: Epidural catheters moved a clinically significant amount with reference to the skin in all BMI groups as patients changed position. If catheters had been secured to the skin before position change, many would have been pulled partially out of the epidural space. To minimize the risk of catheter displacement, particularly in obese patients, we recommend that multiorificed catheters be inserted at least 4 cm into the epidural space and that patients assume the sitting upright or lateral position before securing the catheter to the skin.
package cn.mrain22.security.mySecurity.authentication; import cn.mrain22.security.mySecurity.Properties.LoginType; import cn.mrain22.security.mySecurity.Properties.SecurityProperties; import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.Authentication; import org.springframework.security.web.authentication.SavedRequestAwareAuthenticationSuccessHandler; import org.springframework.stereotype.Component; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; @Component("marinAuthenticationSuccessHandle") public class MyAuthenticationSuccessHandle extends SavedRequestAwareAuthenticationSuccessHandler { //自定义的 private Logger logger = LoggerFactory.getLogger(getClass()); @Autowired private SecurityProperties securityProperties; @Autowired private ObjectMapper objectMapper; @Override public void onAuthenticationSuccess(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Authentication authentication) throws IOException, ServletException { logger.info("登录成功!"); if (LoginType.JSON.equals(securityProperties.getBrowser().getLoginType())){ //如果是Json格式 // 要做的工作就是将Authentication以json的形式返回给前端。 需要工具类ObjectMapper,Spring已自动注入。 httpServletResponse.setContentType("application/json;charset=UTF-8"); //设置返回类型 httpServletResponse.getWriter().write(objectMapper.writeValueAsString(authentication)); //将Authentication写入 }else { //调用父类的的方法跳转。 super.onAuthenticationSuccess(httpServletRequest,httpServletResponse,authentication); } } }
<reponame>statisticsnorway/dapla-migration-onprem-agent package no.ssb.dapla.migration.agent.storage; import com.fasterxml.jackson.annotation.JsonProperty; import no.ssb.dapla.migration.agent.cmd.AbstractAgentMonitor; import java.util.concurrent.atomic.AtomicLong; public class StorageImportMonitor extends AbstractAgentMonitor<StorageImportMonitor.StorageImportInfo> { private final AtomicLong processedFiles = new AtomicLong(0); private final int totalFiles; public StorageImportMonitor(int totalFiles) { this.totalFiles = totalFiles; } public long incrementProcessedFiles() { return processedFiles.incrementAndGet(); } @Override public StorageImportInfo build() { return new StorageImportInfo(processedFiles.get(), totalFiles); } @SuppressWarnings("WeakerAccess") public static class StorageImportInfo { @JsonProperty("processed-files") public final long processedFiles; @JsonProperty("total-files") public final int totalFiles; public StorageImportInfo(long processedFiles, int totalFiles) { this.processedFiles = processedFiles; this.totalFiles = totalFiles; } } }
Renin-Angiotensin system antagonists and mortality in patients with heart failure--reply. To the Editor: In a propensity-matched cohort of patients with heart failure with preserved ejection fraction (HFPEF), Dr Lund and colleagues reported that the use of reninangiotensin system (RAS) antagonists (angiotensinconverting enzyme inhibitors or angiotensin II receptor blockers) was associated with lower all-cause mortality. Yet multiple prospective randomized placebocontrolled trials have demonstrated no efficacy for mortality with these medications in patients with HFPEF, and a meta-analysis including all randomized controlled trials published to date also found no mortality benefit (hazard ratio, 1.02; 95% CI, 0.94-1.12; P=.60). Can there be effectiveness of drugs in clinical practice in the absence of demonstrated efficacy for the same disease state in randomized controlled trials? While Lund et al assembled a propensity-matched cohort based on 43 recorded variables, there were significant imbalances in the distribution of more than 10 baseline characteristics. Furthermore, the registry failed to record data on contraindications or intolerance to RAS antagonist therapy, thus comparing eligible and treated patients with RAS antagonists with those ineligible for treatment due to contraindications or intolerance. Confounding by contraindication or intolerance has been shown to introduce bias in observational studies, and propensity matching may not be able to overcome this confounding. This study also did not analyze outcomes from time of initiation of RAS antagonist therapy but instead from the time of registry enrollment. This may have introduced prevalent-user bias due to left censoring and the effect of prevalent drug use on baseline characteristics. In a prospective, observational, propensity-matched study comparing new users of angiotensin II receptor blockers with nonusers after having excluded HFPEF patients with documented contraindications or intolerance, no association was found with mortality, consistent with findings from randomized controlled trials. To provide complementary evidence to randomized controlled trials and better inform clinical practice, best practices for comparative effectiveness research should be rigorously applied.
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.cluster.util; import org.apache.activemq.artemis.api.core.Interceptor; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.tests.util.CountDownSessionFailureListener; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.cluster.ClusterManager; import org.junit.Assert; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; public class SameProcessActiveMQServer implements TestableServer { private final ActiveMQServer server; public SameProcessActiveMQServer(ActiveMQServer server) { this.server = server; } @Override public boolean isActive() { return server.isActive(); } public void setIdentity(String identity) { server.setIdentity(identity); } public boolean isStarted() { return server.isStarted(); } public void addInterceptor(Interceptor interceptor) { server.getRemotingService().addIncomingInterceptor(interceptor); } public void removeInterceptor(Interceptor interceptor) { server.getRemotingService().removeIncomingInterceptor(interceptor); } public void start() throws Exception { server.start(); } public void stop() throws Exception { server.stop(); } public CountDownLatch crash(ClientSession... sessions) throws Exception { return crash(true, sessions); } public CountDownLatch crash(boolean waitFailure, ClientSession... sessions) throws Exception { CountDownLatch latch = new CountDownLatch(sessions.length); CountDownSessionFailureListener[] listeners = new CountDownSessionFailureListener[sessions.length]; for (int i = 0; i < sessions.length; i++) { listeners[i] = new CountDownSessionFailureListener(latch, sessions[i]); sessions[i].addFailureListener(listeners[i]); } ClusterManager clusterManager = server.getClusterManager(); clusterManager.flushExecutor(); clusterManager.clear(); Assert.assertTrue("server should be running!", server.isStarted()); server.stop(true); if (waitFailure) { // Wait to be informed of failure boolean ok = latch.await(10000, TimeUnit.MILLISECONDS); Assert.assertTrue("Failed to stop the server! Latch count is " + latch.getCount() + " out of " + sessions.length, ok); } return latch; } public ActiveMQServer getServer() { return server; } }
/** This class contains the wml coding tables for elements * and attributes needed by the WmlParser. */ public abstract class Wml { /** Creates a WbxmlParser with the WML code pages set */ public static WbxmlParser createParser() { WbxmlParser p = new WbxmlParser(); p.setTagTable(0, TAG_TABLE); p.setAttrStartTable(0, ATTR_START_TABLE); p.setAttrValueTable(0, ATTR_VALUE_TABLE); return p; } public static WbxmlSerializer createSerializer() { WbxmlSerializer s = new WbxmlSerializer(); s.setTagTable(0, TAG_TABLE); s.setAttrStartTable(0, ATTR_START_TABLE); s.setAttrValueTable(0, ATTR_VALUE_TABLE); return s; } public static final String [] TAG_TABLE = { null, // 05 null, // 06 null, // 07 null, // 08 null, // 09 null, // 0A null, // 0B null, // 0C null, // 0D null, // 0E null, // 0F null, // 10 null, // 11 null, // 12 null, // 13 null, // 14 null, // 15 null, // 16 null, // 17 null, // 18 null, // 19 null, // 1A null, // 1B "a", // 1C "td", // 1D "tr", // 1E "table", // 1F "p", // 20 "postfield", // 21 "anchor", // 22 "access", // 23 "b", // 24 "big", // 25 "br", // 26 "card", // 27 "do", // 28 "em", // 29 "fieldset", // 2A "go", // 2B "head", // 2C "i", // 2D "img", // 2E "input", // 2F "meta", // 30 "noop", // 31 "prev", // 32 "onevent", // 33 "optgroup", // 34 "option", // 35 "refresh", // 36 "select", // 37 "small", // 38 "strong", // 39 null, // 3A "template", // 3B "timer", // 3C "u", // 3D "setvar", // 3E "wml", // 3F }; public static final String [] ATTR_START_TABLE = { "accept-charset", // 05 "align=bottom", // 06 "align=center", // 07 "align=left", // 08 "align=middle", // 09 "align=right", // 0A "align=top", // 0B "alt", // 0C "content", // 0D null, // 0E "domain", // 0F "emptyok=false", // 10 "emptyok=true", // 11 "format", // 12 "height", // 13 "hspace", // 14 "ivalue", // 15 "iname", // 16 null, // 17 "label", // 18 "localsrc", // 19 "maxlength", // 1A "method=get", // 1B "method=post", // 1C "mode=nowrap", // 1D "mode=wrap", // 1E "multiple=false", // 1F "multiple=true", // 20 "name", // 21 "newcontext=false", // 22 "newcontext=true", // 23 "onpick", // 24 "onenterbackward", // 25 "onenterforward", // 26 "ontimer", // 27 "optimal=false", // 28 "optimal=true", // 29 "path", // 2A null, // 2B null, // 2C null, // 2D "scheme", // 2E "sendreferer=false", // 2F "sendreferer=true", // 30 "size", // 31 "src", // 32 "ordered=true", // 33 "ordered=false", // 34 "tabindex", // 35 "title", // 36 "type", // 37 "type=accept", // 38 "type=delete", // 39 "type=help", // 3A "type=password", // 3B "type=onpick", // 3C "type=onenterbackward", // 3D "type=onenterforward", // 3E "type=ontimer", // 3F null, // 40 null, // 41 null, // 42 null, // 43 null, // 44 "type=options", // 45 "type=prev", // 46 "type=reset", // 47 "type=text", // 48 "type=vnd.", // 49 "href", // 4A "href=http://", // 4B "href=https://", // 4C "value", // 4D "vspace", // 4E "width", // 4F "xml:lang", // 50 null, // 51 "align", // 52 "columns", // 53 "class", // 54 "id", // 55 "forua=false", // 56 "forua=true", // 57 "src=http://", // 58 "src=https://", // 59 "http-equiv", // 5A "http-equiv=Content-Type", // 5B "content=application/vnd.wap.wmlc;charset=", // 5C "http-equiv=Expires", // 5D null, // 5E null, // 5F }; public static final String [] ATTR_VALUE_TABLE = { ".com/", // 85 ".edu/", // 86 ".net/", // 87 ".org/", // 88 "accept", // 89 "bottom", // 8A "clear", // 8B "delete", // 8C "help", // 8D "http://", // 8E "http://www.", // 8F "https://", // 90 "https://www.", // 91 null, // 92 "middle", // 93 "nowrap", // 94 "onpick", // 95 "onenterbackward", // 96 "onenterforward", // 97 "ontimer", // 98 "options", // 99 "password", // 9A "reset", // 9B null, // 9C "text", // 9D "top", // 9E "unknown", // 9F "wrap", // A0 "www.", // A1 }; }
The gene for the longest known Escherichia coli protein is a member of helicase superfamily II The Escherichia coli rnt gene, which encodes the RNA-processing enzyme RNase T, is cotranscribed with a downstream gene. Complete sequencing of this gene indicates that its coding region encompasses 1,538 amino acids, making it the longest known protein in E. coli. The gene (tentatively termed lhr for long helicase related) contains the seven conserved motifs of the DNA and RNA helicase superfamily II. An approximately 170-kDa protein is observed by sodium dodecyl sulfate-polyacrylamide gel electrophoresis of 35S-labeled extracts prepared from cells in which lhr is under the control of an induced T7 promoter. This protein is absent when lhr is interrupted or when no plasmid is present. Downstream of lhr is the C-terminal region of a convergent gene with homology to glutaredoxin. Interruptions of chromosomal lhr at two different positions within the gene do not affect the growth of E. coli at various temperatures in rich or minimal medium, indicating that lhr is not essential for usual laboratory growth. lhr interruption also has no effect on anaerobic growth. In addition, cells lacking Lhr recover normally from starvation, plate phage normally, and display normal sensitivities to UV irradiation and H2O2. Southern analysis showed that no other gene closely related to lhr is present on the E. coli chromosome. These data expand the known size range of E. coli proteins and suggest that very large helicases are present in this organism.
package main import ( "fmt" "sort" ) type node struct { Key string Num int } func main() { var n int fmt.Scan(&n) m := make(map[string]int) for i := 0; i < n; i++ { var s string var num int fmt.Scan(&s, &num) if _, ok := m[s]; !ok { m[s] = 0 } m[s] += num } list := make([]node, 0, len(m)) for k, v := range m { list = append(list, node{Key: k, Num: v}) } sort.Slice(list, func(i, j int) bool { if len(list[i].Key) == len(list[j].Key) { return list[i].Key < list[j].Key } else { return len(list[i].Key) < len(list[j].Key) } }) for _, v := range list { fmt.Println(v.Key, v.Num) } }
CURRENT DIRECTIONS IN PSYCHOLOGICAL SCIENCE The Cognitive Neuroscience of Aging and Culture Research into the cognitive neuroscience of aging has revealed exciting and unexpected changes to the brain over the lifespan. However, studies have mostly been conducted on Western populations, raising doubts about the universality of age-related changes. Cross-cultural investigation of aging provides a window into the stability of changes with age due to neurobiology, as well as into the flexibility of aging due to life experiences that impact cog nition. Behavioral findings suggest that different cultures process distinct aspects of information and employ diverse information-processing strategies. The study of aging al lows us to identify those age-related neural changes that persist across cultures as well as the changes that are driven by culture-specific life experiences. KEYWORDS?cognition; aging; culture; cognitive neuro
Trove Of Cannabis Plants Found In Ancient Tomb In China For the first time, archaeologists have unearthed well-preserved cannabis plants, which were placed on a corpse some 2,500 years ago. Clara Scarlet Rose Blocked Unblock Follow Following Oct 13, 2016 Researchers have unearthed 13 cannabis plants in an ancient tomb in northern China, suggesting that prehistoric central Eurasians had ritualistic or medicinal uses for the mind-altering plant. The archeology team, led by Hongen Jiang, is calling the discovery of this ancient burial in northwest China’s Turpan Basin, an “extraordinary cache of cannabis,” which adds significantly to understanding how ancient Eurasian cultures used marijuana for ritual and medicinal purposes. According to the report published in the journal Economic Botany, the cannabis plants were practically intact, except for most of the flowers which had been clipped. Jiang concluded that because of their excellent condition, the plants were freshly and locally harvested in late summer time. The scientists say that the “extraordinary cache” of 13 “nearly whole” female cannabis plants, were arranged diagonally like a shroud over the body of a dead man. The man was about 35 years old, appeared to be Caucasian and might have been a shaman, they say. This discovery also adds to a growing collection of archaeological evidence showing that cannabis consumption was “very popular” across the Eurasian steppe thousands of years ago. National Geographic quotes the study’s lead author, Hongen Jiang; “This is the first time ever that archaeologists have recovered complete cannabis plants, as well as the first incidence of their use as a ‘shroud’ or covering in a human burial.” This discovery also adds to a growing collection of archaeological evidence showing that cannabis consumption was “very popular” across the Eurasian steppe thousands of years ago. Cannabis seeds were also recently discovered in a Siberian tomb of a woman who likely died of breast cancer, and may have used cannabis to “cope with the symptoms of her illnesses,” the researchers say. The flowering heads of the Jiayi plants were covered with glandular trichomes, a sort of tiny plant “hair” that secretes resin containing psychoactive cannabinoids such as THC. The researchers suspect that this marijuana was grown and harvested for its psychoactive resin, which may have been inhaled as a sort of incense or consumed in a beverage for ritual or medicinal purposes. Cannabis has been helping people with medical issues for a very long time, including making their trip to the afterlife a pleasant experience.
The Attempto Tbingen Robot Soccer Team This paper describes the Attempto Tbingen Robot Soccer Team 2004. The robot platform, its sensors and actuators, and the software system running on the onboard computer are presented. The main part of the paper concentrates on our current scientific work on modelling and tracking a dynamic environment. Information about dynamic objects moving around in the environment can be useful especially in RoboCup to predict the motion of the ball, to avoid collisions, or to consider objects which cannot be detected over a short period of time. In our robot soccer team we recently implemented an efficient object and landmark detection algorithm based on images of our omnidirectional vision system. To track the detected objects, we use a tracking approach which on the one hand combines the specific advantages of Kalmanand particle filters and on the other hand uses an interacting multiple model filtering approach to model object dynamics as accurately as possible. In addition to the general tracking techniques we present our real-time approach to detect and track uncoloured objects, such as a standard soccer
Clinical presentation, diagnosis and therapeutic management of Dipylidium caninum (Cestoda: Dilepididae) infection in a domestic cat (Felis catus): a case report Dipylidium caninum (Cestoda: Dilepididae) is an arthropodborne zoonotic tape worm that is commonly known as dog tapeworm, lea tapeworm, double-pored tapeworm, or cucumber tapeworm, and it has a global distribution. The adult worm is about 46 cm long and mainly infects dogs and cats; however, it can also cause infection in humans. It is primarily transmitted by leas such as Ctenocephalides canis, C. felis, and Pulex irritans, and the dog biting louse, Trichodectes canis. Animals infected with D. caninum shed proglottids with Abstract Introduction Dipylidium caninum (Cestoda: Dilepididae) is an arthropodborne zoonotic tape worm that is commonly known as dog tapeworm, lea tapeworm, double-pored tapeworm, or cucumber tapeworm, and it has a global distribution. The adult worm is about 46 cm long and mainly infects dogs and cats; however, it can also cause infection in humans. It is primarily transmitted by leas such as Ctenocephalides canis, C. felis, and Pulex irritans, and the dog biting louse, Trichodectes canis. Animals infected with D. caninum shed proglottids with Abstract Background: Dipylidium caninum, a zoonotic cyclophyllidean tapeworm, mainly infects dogs, cats, and occasionally humans as well. Here, we present D. caninum infection in a domestic cat. A cat of about one year of age with a history of intermittent diarrhea and shedding stool containing whitish cooked rice like soft particles. Methods: The case was identifi ed by thorough clinical, coprological, and parasitological examinations, and treated accordingly. Results: During the physical examination, the cat was found to be infested with fl ea, and coprological investigation revealed the presence of gravid segments of cestodes. By preparing a permanent slide, we conducted a microscopic examination, and the cestode was confi rmed as D. caninum. The cat was treated with albendazole and levamisole, which were ineff ective; additionally, levamisole showed toxicity. Then, we administered niclosamide which completely cured the animal. On re-examination after a week, feces were found negative for eggs/gravid segments of any cestode. Conclusion: Niclosamide was found eff ective against dipylidiasis and can be treated similar infections in pets. feces, which rupture in the environment, releasing thousands of eggs. Developmental stages of lea and lice become infected through the consumption of eggs in which cysticercoids develop. Cysticercoids become infective when the developmental stage of the lea moults to an adult and starts feeding on host's blood. Usually, after ~36 h of a blood meal, the cysticercoid becomes infective inside the lea. De initive hosts get the infection by accidentally ingesting infected lea or lice. Adult worms live in the small intestine and can cause damage of tissues at the site of attachment, leading to the development of enteritis, diarrhea, and hemorrhages in Case Report Clinical presentation, diagnosis and therapeutic management of Dipylidium caninum (Cestoda: Dilepididae) infection in a domestic cat (Felis catus): a case report the mucosal surface of the intestine. The infection is clinically manifested by retarded growth, weakness, loss of appetite, intermittent diarrhea, and shedding of whitish/creamy white segments with feces. Humans, particularly children, become infected with the parasites by accidentally ingesting the infected lea containing viable cysticercoids while playing with dogs and cats. Pruritus develops when gravid segments pass through the anus of the infected host. Here, we describe the clinical presentation, diagnosis, and therapeutic management of Dipylidium caninum infection in a domestic cat. Case report A domestic cat (Felis catus) of about one year of age was presented to the Department of Parasitology, Faculty of Veterinary Science, Bangladesh Agricultural University with a history of dullness, inappetite, diarrhea, presence of creamy white cooked rice like soft materials in feces and loss of appetite. Additionally, the cat was heavily infested with lea and showed marked restlessness characterized by self-scratching and biting of body coat. The owner was using albendazole to dewarm the cat quarterly. There was no history of fever or vomiting. The cat had normal pink conjunctiva and was found healthy. Gross examination of feces showed the presence of gravid segments of some tapeworm(s). Therefore, segments were collected and processed for parasitological examinations. Gravid segments were separated from feces and kept in a normal saline solution. Under the stereoscope, gravid segments were found to have a cucumber seed shape. A gravid segment was ixed with a ixative containing alcohol, formalin, and acetic acid (AFA solution), stained with Semichon's Carmine stain and examined under the microscope under an x10 objective by preparing permanent slides. After diagnosis, the cat was irst treated with albendazole (@ 10 mg/kg body weight, orally, single dose), and after 14 days, albendazole was administered in a double dose of the previous dose, and coprological examination was continued. As albendazole was found ineffective, levamisole was administered (@ 10 mg/kg body weight, orally, single dose), and the cat was intensively monitored by close inspection. Levamisole showed signs of toxicity and was not effective. After 14 days the cat was again treated with niclosamide (@ 20 mg/kg body weight, orally, single dose), and the cat was monitored in the same way, and coprological examination was conducted at day 0, day 7, and day 28. Discussion To identify the parasites, gravid segments were examined by preparing permanent slides. The stained segments were cucumber seed-shaped with two sets of reproductive organs, which open marginally (Figure 1), conforming to the morphological features of D. caninum. Simultaneously, the cat was infested with lea, the intermediate host of the tapeworm. The occurrence of D. caninum infection in cats has been reported previously. As a treatment, the cat was medicated with commonly used anthelmintics, namely, albendazole and levamisole; however, the worm was refractory to both drugs even at the higher dose of albendazole. Additionally, levamisole showed toxicity, characterized by shivering and drooling of saliva; however, it was successfully managed. When the medication with albendazole and levamisole failed, we administered niclosamide and the cat stopped shedding of segments within a week. The cat resumed regular feeding and defecation. The cat's owner was also advised to treat it with ivermectin (pour on) to make it free from lea. Taken together, niclosamide can be used to treat similar infections in animals. In vitro study can be conducted to validate further the ef icacy of niclosamide on the worm.
US President George Bush has said Hamas must renounce its call to destroy Israel after its election triumph, but admitted the stunning result was a "wake-up call" for Palestinian leaders. Bush gave his first reaction to the stunning Hamas win in a White House news conference on Thursday. He said the United States would not deal with Hamas, which comfortably won a Palestinian election victory, unless it renounced its call to destroy Israel. "The United States does not support a political party that wants to destroy our ally Israel," Bush said. "People must renounce that part of their platform. "A political party that articulates the destruction of Israel as part of its platform is a party with which we will not deal. "If your platform is the destruction of Israel, it means you're not a partner in peace. We're interested in peace," he said, addressing fears that the Hamas win would further stall US peace efforts in the Middle East. Bush portrayed the strong Hamas showing as more a reflection at discontent among Palestinians about how they were being governed than anger at the situation with Israel. "It's a wake-up call to the leadership. Obviously, people were not happy with the status quo. The people are demanding honest government," he said. Bush also said he would like US-backed moderate Palestinian leader Mahmoud Abbas to remain in office. "We'd like him to stay in power," Bush said, when asked whether Abbas should remain in office in light of the vote results. US Secretary of State Condoleezza Rice has said there could be no Middle East peace process if Hamas, winner of the Palestinian election, refused to recognise Israel's right to exist. "You can't have a peace process if you're not committed to the right of your partner to exist," she told the World Economic Forum in Switzerland on Thursday, speaking by videolink. "And I think you will hear the international community speak clearly on exactly those principles over the next day. There will be some difficult choices before those in whom the Palestinian people are placing their trust." She was speaking after unofficial election results on Thursday showed a shock victory for the Islamist group Hamas over the long-dominant Fatah faction. Official results were due around 1700 GMT. Rice said the election had been peaceful and fair, with very high turnout. But she said the US position on Hamas, which it considers a terrorist organisation, was unchanged. "As we have said, you cannot have one foot in politics and the other in terror" Rice added: "Anyone who wants to govern the Palestinian people and do so with the support of the international community has got to be committed to a two-state solution, must be committed to the right of Israel to exist. "But if there is to be a future that can answer the aspirations for peace of the Palestinian people ... then it is going to have to be a future that renounces violence and terrorism." Rice also voiced support for Palestinian President Mahmoud Abbas. "She told President Abbas the US administration will continue to back him and his policies," presidential spokesman Nabil Abu Rdainah said. "She listened to his point of view and they agreed to continue contacts."
<filename>spass/visor/migrations/0003_indicador_nombre.py<gh_stars>0 # Generated by Django 3.2.5 on 2021-08-07 13:14 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('visor', '0002_auto_20210728_1722'), ] operations = [ migrations.AddField( model_name='indicador', name='nombre', field=models.CharField(blank=True, max_length=150, null=True, verbose_name='Nombre'), ), ]
<gh_stars>10-100 #!/usr/bin/python try: integer_types = (int, long) except NameError: # Python 3 integer_types = (int,) xrange = range class CompleteGraphEdgeColoring: """Find an edge coloring for a complete graph.""" def __init__(self, graph): """The algorithm initialization.""" if graph.is_directed(): raise ValueError("the graph is directed") self.graph = graph self.color = dict() self.m = 0 # graph.e() is slow for edge in self.graph.iteredges(): if edge.source == edge.target: raise ValueError("a loop detected") else: self.color[edge] = None # edge.source < edge.target self.m += 1 if len(self.color) < self.m: raise ValueError("edges are not unique") def run(self): """Executable pseudocode.""" if self.graph.v() % 2 == 1: self.run_odd() else: self.run_even() def run_odd(self): """Edge coloring for n odd (n colors) in O(n^2) time.""" n = self.graph.v() # Node numbering. D = dict((node, i) for (i, node) in enumerate(self.graph.iternodes())) # O(n) # Edge coloring. for edge in self.graph.iteredges(): # O(E)=O(n^2) time c = (D[edge.source] + D[edge.target]) % n self.color[edge] = c def run_even(self): """Edge coloring for n even (n-1 colors).""" n = self.graph.v() removed_node = next(self.graph.iternodes()) removed_edges = list(self.graph.iteroutedges(removed_node)) # O(n) time self.graph.del_node(removed_node) # O(n) time, removed with edges self.run_odd() # Find missing colors. free = dict((node, set(xrange(n-1))) for node in self.graph.iternodes()) for edge in self.graph.iteredges(): # O(E)=O(n^2) time c = self.color[edge] free[edge.source].remove(c) free[edge.target].remove(c) # Restoring nodes and edges. self.graph.add_node(removed_node) for edge in removed_edges: # O(n) time assert edge.source == removed_node c = free[edge.target].pop() assert len(free[edge.target]) == 0 # only one color was free self.graph.add_edge(edge) if edge.source > edge.target: edge = ~edge self.color[edge] = c def _get_color(self, edge): """Get edge color.""" if edge.source > edge.target: edge = ~edge return self.color[edge] def show_colors(self): """Show edge coloring (undirected graphs).""" L = [] for source in self.graph.iternodes(): L.append("{} : ".format(source)) for edge in self.graph.iteroutedges(source): # It should work for multigraphs. c = self._get_color(edge) L.append("{}({}) ".format(edge.target, c)) L.append("\n") print("".join(L)) # EOF
Elections in Cameroon, scheduled for October 7, will be held against a backdrop of uncertainty in the English-speaking regions and amid accusations that the government is planning to rig the polls. In exactly two months, Cameroonians will be going to the polls to elect their president. Twenty-eight candidates are in the running, including 86-year-old incumbent Paul Biya, who has been in power for 36 years. He is Africa’s second longest serving leader after his neighbour Teodoro Obiang Nguema of Equatorial Guinea. But there are doubts inside and outside Cameroon that this election will be free and fair, especially in the Anglophone regions in the west, bordering Nigeria, which have been in turmoil for the past two years, when residents started to press their claims for more autonomy or even independence. According to the United Nations, 200 000 people have been internally displaced and 40 000 have sought refuge in neighbouring countries. The government denies having any role in the atrocities taking place in the region. But both the UN and non-governmental organisations such as Human Rights Watch have filed reports on crimes against humanity by armed rebels and government troops. Fifty-four-year-old teacher Ngum Julius, who fled the English-speaking town of Njinikom, believes elections cannot be held there. “All businesses are shut down. All government services are shut down, except the military. Many people have gone away. Too many people are dying, too many people are being displaced. We need to sit at the same table as Cameroonians and talk it out and find a solution,” Julius said. Cameroon is also dealing with attacks by Boko Haram militants in the north, and a spillover of violence from the Central African Republic. Scepticism about the elections is not limited to the Anglophone regions in the country. In Bonamoussadi, a student residential area of the capital Yaounde, young people say that they should be discussing football instead of wasting time with elections, because the process is not working anyway. Tatah Hans is one of them. “I am a Cameroonian. It is normal to vote. I have to vote. But I am facing so many difficulties because very soon they will start voting and I don’t have a voter’s card. I don’t know what to do,” he told media. Voter apathy is evident; only seven million of a total of 13 million voters have actually registered to cast their ballot. But supporters of Paul Biya, like Jacques Fame Ndongo, say elections will go ahead despite the rebels’ attempts. “We know that their mad dream is to stop the organization of the presidential election in those two regions, but that mad dream will never come to fruition because the population of the Northwest and the Southwest regions is determined to express its total support to the head of state and his party, the CPDM (Cameroon’s People Democratic Movement),” he said. Meanwhile, the main opposition political party, the Social Democratic Front (SDF), cautions the government against fraud. Denise Nkemlemo, the SDF’s communication secretary, says the election management body has already started to break the law, for instance by setting up voting stations where they shouldn’t: “We are not supposed to have voting stations inside the presidency; military camps are also out of the question,” she said. According to Nkemlemo, at polling stations in those places in previous elections, CPDM’s votes outnumbered those for the SDF by a wide margin. “When our representatives wanted to raise questions, the military came with guns and told them “we don’t make noise here.” The SDF will not tolerate the lack of respect of the electoral law,” she added. These claims are rejected by Enow Abrams Egbe, the electoral body’s board chair, who says critics have ulterior motives. “It is normal that detractors play their game, but we are playing our role with serenity and everything is moving on smoothly,” he said. Can the Catholic Church mediate in Cameroon's Anglophone crisis?
MHD Braking and Joules Heating Effect in a Rotating Confined Cylindrical Cavity Packed with Liquid Metal The present research work investigates the MHD braking and Joules heating effect in a confined rotating cylindrical cavity packed with liquid metal. All walls, except the top and bottom portion of the sidewall, of the cylindrical cavity, are made of electrically as well as thermally conducting material. The cavity is exposed to both axial magnetic fields along with the axial temperature gradient, packed with the incompressible electrically conducting liquid. The MHD braking effect is experienced within the rotating liquid metal flow due to the presence of a strong axial magnetic field. It is discerned that MHD braking governs the primary, as well as secondary flow, and, reduces Joules heating effect. Moreover, the internal heat generation due to Joules heating is governed by rotating speed, Hartmann strength, and temperature gradient. INTRODUCTION In order to control the electrical conducting fluid flow, application of MHD braking in the presence of Joules heating effect can be encountered in various industrial applications, such as liquid metal pumping during the casting process, self-cooled liquid metal blankets in a fusion reactor, geothermal source studies, plasma, MHD power generator, and MHD micro-pump in medical science to circulate blood to maintain sugar level in the blood, etc. The presence of an axial temperature gradient of confined rotating flow in a cylindrical cavity has been investigated by a number of researchers i.e. . In the case of confined axisymmetric swirling flow, the axial magnetic field effect on vortex breakdown as well as on flow stability has been discussed by. Flow control in a cylindrical cavity having different cross-sections and configurations was investigated in. The effects of the partial rotating lid in the confined cylindrical annulus have been conversed by. Rahman et al. have discussed the Joules heating effect on the walls of square lid-driven cavities, with a heated semi-circular source object attached to a wall of the square lid-driven cavities. In the presence of mixed convection, the effect of the magnetic field and Joule parameter were also investigated by them. They have visualized the impact of the direction of moving lid on the flow patterns. In addition, they concluded that the decreasing Joules effect impedes heat flow. In a similar study, has concluded that at the lower Joules heating effect the bulk fluid temperature and heat transfer rate are enhanced. It had conversed the nature of boundary layer over a rotating disk in a porous medium with hall current and thermal radiation. They concluded that heat transfer is being enhanced under the influence of magnetic field and diminished by incising the Hall current The effect of the radial magnetic field on the Taylor coquette flow of viscous, incompressible, and electrically conducting fluid have been investigated analytically by. Also, have conducted similar studies. The influence of the axial magnetic field on incompressible electrically conducting fluid, over a stretchable rotating flat surface in the presence of thermal radiation for Nanofluid, have been investigated by. The effect of the magnetic field in a vertical annulus packed with gallium in the presence of natural convection has been investigated in. They concluded that in the casting process quality products are developed for liquid metal in due to the presence of magnetic field. In, the authors have discussed circulation control in MHD rotating flows. They have investigated the 3-D laminar flow in a revolving cylindrical cavity packed with viscous conducting fluid with a braking disc. It is convinced that the MHD braking in the presence of the Joule heating effect has ben hardly discussed in above reviews. From the above literature reviews, it is felt that MHD braking in a rotating flow with Joules heating within an electrically conducting rotating cylindrical container wall needs to be emphasized. In the present research work MHD braking in the presence of Joules heating effect has been analyzed. NUMERICAL TECHNIQUE AND SOLUTION PROCEDURE In the present study, we considered a confined cylindrical cavity packed with the incompressible, viscous, metallic fluid having Prandtl number 0.015 under the influence of axial magnetic field and axial temperature gradient. The bottom wall, top wall and rotating porting of the sidewall of the cylindrical cavity are electrically as well as thermally conducting as shown in Fig 1. Due to the presence of the electrically conducting wall, in the present numerical simulation, the Joules heating effect is considered. Governing Equation Momentum Equation in axial direction: Momentum Equation in azimuthal direction: Energy Equation: ( ) ( ) Equation for Magnetic Flux: where Fl r, Fl y and Fl are the Lorentz forces in radial, axial and azimuthal directions respectively and agreed as; Side insulating and stationary portion of cylindrical Wall At side cold rotating cylindrical Wall: The solution Procedure The solution procedure of momentum equation is based on pressure correction technique . The details of the procedure are discussed in,, and. Code Validation The present code has been validated against the available numerical and analytical solutions of and. A detailed discussion of code validation has been included in my previous published article, and for Pr =0.015, AR =1.0, Re =100, Ha =100. MHD Braking Effect: Strong Hartmann effect at high Joules heating The effect of increasing magnetic field strength on electrical conducting fluid in an electrically conducting cylindrical cavity has been investigated in the presence of joules heating effect in this section. The governing parameters are Re =1000, Pr =0.015, AR =2.0, Ri =1.0, J = 10 and 200 60 ≤ ≤ Ha. The axial profiles of isotherms at r =0.8, i.e. near to vertical rotating cylindrical wall indicates that the maximum internal heat generation occurs at Ha =60 and further decreases with increase in Hartmann effect as shown in Fig 2 (a). In this axial profile, maximum temperature T max = 2.5 can be visualized near bottom hot wall i.e. at r = 0.8, z = 0.4. However, internal heat generation is reduced to T max = 2.2 at r = 0.8, z = 0.8 for Ha = 200. A peculiar observation can be seen from Fig 2 (b), Fig 2(c) and Fig 2(d). It is clearly observed that with increasing Hartmann strength an MHD Braking is developed within flow. Both axial profiles of radial component velocity along the axis and radial profiles of azimuthal velocity along the radius reduce to zero. Hence, it concluded that primary flow and radial flow within the core of cavity are clogged with increase in Hartmann strength. Hence, it is concluded that primary and radial flows within the core of the cavity are clogged. The presence of axial magnetic field develops Lorentz force in radial and azimuthal directions (i.e Fl Z, Fl r ) respectively. Hence, MHD braking is developed, which restricts primary flow in azimuthal direction and radial flow in radial direction in the presence electrical conducting rotating cylinder. Since the presence of axial magnetic field could not develop any Lorentz force in axial direction, only axial flow is subsisted within the liquid metal. Due to this imperative aspect of the condition, internal heat generation is reduced during MHD braking process. Since the flow is restricted within the core of the cavity, the convective mode of heat transfer is pre- The impact of axial heat transfer in the presence of strong magnetic field (Ha=200) on Joules heating and MHD braking observed in this section. There are no extensive changes in the magnitude of isotherms with an increase in Richardson number shown in Fig 4(a). Hence, it is clear that under the influence of the strong axial magnetic field on the axial heat transfer has very little impact on joules heating effect and internal heat generation. It is clear from the axial profile of the radial component of velocity that the magnitude of velocity is near about zero at Ri =1.0. Nevertheless, the magnitude has been intensified with exaggeration in Richardson's number, Fig 4 (b). In this particular condition, the liquid metal has no motion in the azimuthal direction, which can be interpreted from Fig 4 (c) and Fig 4 (d); the azimuthal component of the velocity remains zero within the maximum portion of the core region of the cavity. Moreover, the MHD braking effect can be sensed in an azimuthal direction (i.e-Azimuthal plane) not in an axial direction (i.e.-meridional plane) in the presence of strong magnetic field even at high axial temperature gradient. Due to increase in Richardson number, an axial temperature gradient heat flow enhanced, henceforth the buoyancy effect in axial direction dominates the flow. However, the presence of a strong axial magnetic field slows down the buoyancy effect, flow velocity, and convective heat flow rate dominated by conduction mode of heat transfer within the core.The magnitude of the average Nusselt number, near the bottom hot wall, at different joules parameters with an increase in Richardson number can be seen in Fig 5 (a). At J = 4, the impotence of Ri on Nu is interpreted. At higher magnitude of J = 6 and 8, the magnitude of Nu increases with increase in Ri. It indicates that the axial temperature gradient has a significant effect on convection heat transfer only at higher Joules parameter, which can be seen in the presence of the strong axial magnetic field. Near the side rotating cold wall, Richardson number has insignificant effect on the Nusselt number shown in Fig 5 (b). However, near the top cold stationary wall, the Richardson number effect can be interpreted from Fig 5 (c). The MHD braking effect remains stronger at the mid portion of the core of the cavity (i.e. at the mid portion of electrically conducting rotating-cold cylinder) in liquid metal flow at z =1.0 even at Re=3000. The mode of heat transfer in the vicinity of the bottom hot wall can be seen from Fig 6 (a) due to increase in Joules heating effect. It is interesting to visualize that the magnitude of Nu decreases with an increase in Joules heating. This is due to internal heat generation in the core of the cavity, which generates an adverse temperature gradient from the hot plate to the core of the cavity. However, in the vicinity of the top stationary cold wall and side rotting cold wall, magnitude of Nu increases with the increase in Joules heating effect, as shown in Fig 6 (b) and Fig 6 (c). Hence, it can be concluded that the convection heat flow dominates over conduction heat transfer at higher Joules heating effect with increase in Reynolds number Cylinder = 0.5r but the momentum diffusivity varies with increase in Re due to internal heat generation) ranging near the cold surface. The effects of MHD braking in the presence of Joules heating are investigated. The MHD braking effect becomes stronger in the presence of strong axial magnetic field, which controls both the primary and secondary flows. A strong axial temperature gradient has rare effect on MHD braking in azimuthal direction and only disturbs the MHD braking effect in axial direction. At higher Reynolds number, the effect of MHD braking effect is reduced and internal heat generation is encouraged within the flow. With the increase in the Joules heating parameter, the conduction mode of heat transfer dominates the convection at higher Re near bottom hot wall. However, in the presence of strong Joules heating parameter the convective heat flow dominates conduction mode of heat transfer vicinity of vertical rotating cold cylinder and top stationary cold wall with an increase in Reynolds number. Only by increasing the rotating speed of the cylindrical wall, the convective heat flow dominates conduction throughout the cavity. The MHD braking effect analyzed in the present study can be implemented in industrial applications, particularly to control the electrically conducting fluid flow and internal heat generation. In case of the braking system of machines, MHD braking can be employed.
1. Field of the Invention The present invention relates to an apparatus for positioning a piece of meat in a given position onto the saddle of a trimming conveyor. In the following description, reference will be made almost exclusively to pork loins as the kind of pieces of meat that may be positioned with the apparatus. Even though the application of the apparatus according to the invention to pork loins is certainly very interesting, the invention is not and should not be understood as being restricted to the processing of such pieces of meat exclusively. 2. Brief Description of the Prior Art In the meat processing industry, it is of common practice to subdivide into various parts the carcasses of the animals that have been slaughtered. Usually, each carcass is divided into two halves, each of which contains a loin portion. Usually also, each loin portion is separated from the outer rib portion of the half and then passed through a fat-trimming machine called "loin puller". This machine includes a conveyor for moving the half lengthwisely past a contoured knife that is shaped and positioned in such a manner as to extract the maximum amount of lean meat, and to leave the fat. An example of such loin puller is disclosed in Applicant's Canadian patent No. 2,012,356 issued on Dec. 14, 1993. It is also of common practice to further trim manually each loin leaving the loin puller, in order to remove a maximum of fat before weighting the loin and packaging the same. Such manual trimming is carried out onto a conveyor hereinafter called "trimming" or "receiving" conveyor, on which are mounted a plurality of saddles at given intervals, each saddle being shaped and sized to receive the loin upside down. A plurality of operators manually trim the fat from the exposed surfaces of the loins positioned on the saddles while the same move with the conveyor. So far, the positioning of the loins into the saddles of the trimming conveyor has been made manually.
package mock import ( "bytes" "compress/gzip" "errors" "io/ioutil" "log" "github.com/aws/aws-sdk-go/service/s3" "github.com/m-mizutani/uguisu/pkg/adaptor" ) var s3Objects map[string]map[string][]byte func init() { s3Objects = make(map[string]map[string][]byte) } type S3Client struct { Region string S3Objects map[string]map[string][]byte } func NewS3Mock() (adaptor.S3ClientFactory, *S3Client) { client := &S3Client{} return func(region string) (adaptor.S3Client, error) { client.Region = region client.S3Objects = s3Objects return client, nil }, client } func NewS3Client(region string) (adaptor.S3Client, error) { return &S3Client{}, nil } func (x *S3Client) GetObject(input *s3.GetObjectInput) (*s3.GetObjectOutput, error) { bucket, ok := s3Objects[*input.Bucket] if !ok { return nil, errors.New(s3.ErrCodeNoSuchBucket) } obj, ok := bucket[*input.Key] if !ok { return nil, errors.New(s3.ErrCodeNoSuchKey) } gz, err := gzip.NewReader(bytes.NewReader(obj)) if err != nil { log.Fatal("gzip error in GetObject: ", err) } return &s3.GetObjectOutput{ Body: gz, }, nil } func (x *S3Client) PutObject(input *s3.PutObjectInput) (*s3.PutObjectOutput, error) { memBucket, ok := s3Objects[*input.Bucket] if !ok { memBucket = make(map[string][]byte) s3Objects[*input.Bucket] = memBucket } data, err := ioutil.ReadAll(input.Body) if err != nil { return &s3.PutObjectOutput{}, err } memBucket[*input.Key] = data return &s3.PutObjectOutput{}, nil }
/*------------------------------------------------------------------------------------------------------------------*/ /* SetFrameResult : Get the result of stbINPUT */ /*------------------------------------------------------------------------------------------------------------------*/ STB_INT32 SetFrameResult ( STBHANDLE handle , const STB_FRAME_RESULT *stbINPUTResult ) { STB_INT32 nRet; nRet = IsValidPointer(handle); if(nRet != STB_NORMAL) { return STB_ERR_NOHANDLE; } nRet = IsValidPointer(stbINPUTResult); if(nRet != STB_NORMAL) { return STB_ERR_INVALIDPARAM; } nRet = STB_IsValidValue ( stbINPUTResult ,handle->execFlg ); if(nRet != STB_TRUE) { return STB_ERR_INVALIDPARAM; } handle->nExecuted = STB_FALSE; if( handle->execFlg->bodyTr == STB_TRUE ) { handle->nDetCntBody = stbINPUTResult->bodys.nCount; SetTrackingObjectBody ( &(stbINPUTResult->bodys) ,handle->trBody ); } if( handle->execFlg->faceTr == STB_TRUE ) { handle->nDetCntFace = stbINPUTResult->faces.nCount; SetTrackingObjectFace ( &(stbINPUTResult->faces) ,handle->trFace ); } if( handle->execFlg->gen == STB_TRUE || handle->execFlg->age == STB_TRUE || handle->execFlg->fr == STB_TRUE || handle->execFlg->exp == STB_TRUE || handle->execFlg->gaz == STB_TRUE || handle->execFlg->dir == STB_TRUE || handle->execFlg->bli == STB_TRUE ) { SetFaceObject ( &(stbINPUTResult->faces) ,handle->infoFace ,handle->execFlg , handle->nTraCntMax ); } handle->nInitialized = STB_TRUE; return STB_NORMAL; }
<gh_stars>1000+ /* * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ipc.invalidation.util; import java.lang.reflect.Field; import java.lang.reflect.Modifier; /** * A {@link TextBuilder} is an abstraction that allows classes to efficiently append their string * representations and then use them later for human consumption, e.g., for debugging or logging. It * is currently a wrapper around {@link StringBuilder} and {@link Formatter} to give us format and * append capabilities together. All append methods return this TextBuilder so that the method calls * can be chained. * */ public class TextBuilder { private final StringBuilder builder; private final UtilFormatter formatter; /** * Given an {@code object} that is an instance of {@code clazz}, outputs names and values of all * member fields declared on {@code clazz}. This method should be used carefully: * <ol> * <li>This method is expensive. For frequently logged types, an ad hoc * {@link InternalBase#toCompactString} implementation is preferred.</li> * <li>May overflow the stack if there is a cycle in an object graph.</li> * <li>Custom formatters have been implemented for many protos. They will not be used by this * method.</li> * </ol> */ public static void outputFieldsToBuilder(TextBuilder builder, Object object, Class<?> clazz) { Preconditions.checkArgument(clazz.isAssignableFrom(object.getClass())); // Get all the fields and print them using toCompactString if possible; // otherwise, via toString Field[] fields = clazz.getDeclaredFields(); for (Field field : fields) { try { // Ignore static final fields, as they're uninteresting. int modifiers = field.getModifiers(); if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) { continue; } field.setAccessible(true); builder.append(field.getName() + " = "); Object fieldValue = field.get(object); if (fieldValue instanceof InternalBase) { ((InternalBase) fieldValue).toCompactString(builder); } else { builder.append(fieldValue); } builder.append(", "); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } } } /** * Returns an empty TextBuilder to which various objects' string * representations can be added later. */ public TextBuilder() { builder = new StringBuilder(); formatter = new UtilFormatter(builder); } /** * Appends the string representation of {@code c} to this builder. * * @param c the character being appended */ public TextBuilder append(char c) { builder.append(c); return this; } /** * Appends the string representation of {@code i} to this builder. * * @param i the integer being appended */ public TextBuilder append(int i) { builder.append(i); return this; } /** * Appends the toString representation of {@code object} to this builder. */ public TextBuilder append(Object object) { if (object instanceof InternalBase) { return append((InternalBase) object); } else { builder.append(object); return this; } } /** * Appends the {@code InternalBase#toCompactString} representation of {@code object} to this * builder. */ public TextBuilder append(InternalBase object) { if (object == null) { return append("null"); } object.toCompactString(this); return this; } /** * Appends the comma-separated {@code InternalBase#toCompactString} representations of * {@code objects} to this builder. */ public TextBuilder append(Iterable<? extends InternalBase> objects) { if (objects == null) { return this; } boolean first = true; for (InternalBase object : objects) { if (first) { first = false; } else { builder.append(", "); } append(object); } return this; } /** Appends the {@link Bytes#toString} representation of {@code bytes} to this builder. */ public TextBuilder append(byte[] bytes) { if (bytes == null) { return append("null"); } Bytes.toCompactString(this, bytes); return this; } /** * Appends the string representation of {@code l} to this builder. * * @param l the long being appended */ public TextBuilder append(long l) { builder.append(l); return this; } /** * Appends the string representation of {@code b} to this builder. * * @param b the boolean being appended */ public TextBuilder append(boolean b) { builder.append(b); return this; } /** * Appends {@code s} to this builder. * * @param s the string being appended */ public TextBuilder append(String s) { builder.append(s); return this; } /** * Writes a formatted string to this using the specified format string and * arguments. * * @param format the format as used in {@link java.util.Formatter} * @param args the arguments that are converted to their string form using * {@code format} */ public TextBuilder appendFormat(String format, Object... args) { formatter.format(format, args); return this; } @Override public String toString() { return builder.toString(); } }
<filename>spring-xsuaa/src/test/java/com/sap/cloud/security/xsuaa/DummyXsuaaServiceConfiguration.java package com.sap.cloud.security.xsuaa; public class DummyXsuaaServiceConfiguration implements XsuaaServiceConfiguration { private String clientId; private String uaaDomain; private String appId; public DummyXsuaaServiceConfiguration() { } public DummyXsuaaServiceConfiguration(String clientId, String appId) { this.clientId = clientId; this.appId = appId; } @Override public String getClientId() { return clientId != null ? clientId : "clientId"; } @Override public String getClientSecret() { return "secret"; } @Override public String getUaaUrl() { return "https://subdomain.authentication.eu10.hana.ondemand.com"; } @Override public String getAppId() { return appId; } @Override public String getUaaDomain() { return uaaDomain; } }
/** * This file is generated by the Angular 2 template compiler. * Do not edit. */ /* tslint:disable */ import * as import0 from 'ionic-angular/components/virtual-scroll/virtual-scroll'; import * as import1 from '@angular/core/src/change_detection/change_detection'; import * as import2 from '@angular/core/src/linker/view_utils'; import * as import3 from '@angular/core/src/linker/view'; export class Wrapper_VirtualScroll { context:import0.VirtualScroll; changed:boolean; /*private*/ _virtualScroll:any; /*private*/ _bufferRatio:any; /*private*/ _approxItemWidth:any; /*private*/ _approxItemHeight:any; /*private*/ _approxHeaderWidth:any; /*private*/ _approxHeaderHeight:any; /*private*/ _approxFooterWidth:any; /*private*/ _approxFooterHeight:any; /*private*/ _headerFn:any; /*private*/ _footerFn:any; /*private*/ _virtualTrackBy:any; constructor(p0:any,p1:any,p2:any,p3:any,p4:any,p5:any,p6:any,p7:any,p8:any) { this.changed = false; this._virtualScroll = import1.UNINITIALIZED; this._bufferRatio = import1.UNINITIALIZED; this._approxItemWidth = import1.UNINITIALIZED; this._approxItemHeight = import1.UNINITIALIZED; this._approxHeaderWidth = import1.UNINITIALIZED; this._approxHeaderHeight = import1.UNINITIALIZED; this._approxFooterWidth = import1.UNINITIALIZED; this._approxFooterHeight = import1.UNINITIALIZED; this._headerFn = import1.UNINITIALIZED; this._footerFn = import1.UNINITIALIZED; this._virtualTrackBy = import1.UNINITIALIZED; this.context = new import0.VirtualScroll(p0,p1,p2,p3,p4,p5,p6,p7,p8); } check_virtualScroll(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._virtualScroll,currValue))) { this.changed = true; this.context.virtualScroll = currValue; this._virtualScroll = currValue; } } check_bufferRatio(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._bufferRatio,currValue))) { this.changed = true; this.context.bufferRatio = currValue; this._bufferRatio = currValue; } } check_approxItemWidth(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._approxItemWidth,currValue))) { this.changed = true; this.context.approxItemWidth = currValue; this._approxItemWidth = currValue; } } check_approxItemHeight(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._approxItemHeight,currValue))) { this.changed = true; this.context.approxItemHeight = currValue; this._approxItemHeight = currValue; } } check_approxHeaderWidth(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._approxHeaderWidth,currValue))) { this.changed = true; this.context.approxHeaderWidth = currValue; this._approxHeaderWidth = currValue; } } check_approxHeaderHeight(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._approxHeaderHeight,currValue))) { this.changed = true; this.context.approxHeaderHeight = currValue; this._approxHeaderHeight = currValue; } } check_approxFooterWidth(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._approxFooterWidth,currValue))) { this.changed = true; this.context.approxFooterWidth = currValue; this._approxFooterWidth = currValue; } } check_approxFooterHeight(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._approxFooterHeight,currValue))) { this.changed = true; this.context.approxFooterHeight = currValue; this._approxFooterHeight = currValue; } } check_headerFn(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._headerFn,currValue))) { this.changed = true; this.context.headerFn = currValue; this._headerFn = currValue; } } check_footerFn(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._footerFn,currValue))) { this.changed = true; this.context.footerFn = currValue; this._footerFn = currValue; } } check_virtualTrackBy(currValue:any,throwOnChange:boolean,forceUpdate:boolean):void { if ((forceUpdate || import2.checkBinding(throwOnChange,this._virtualTrackBy,currValue))) { this.changed = true; this.context.virtualTrackBy = currValue; this._virtualTrackBy = currValue; } } detectChangesInternal(view:import3.AppView<any>,el:any,throwOnChange:boolean):boolean { var changed:any = this.changed; this.changed = false; if (!throwOnChange) { this.context.ngDoCheck(); } return changed; } }
Aug 7, 2013 - DeeJ Waiting for a game you want to play can be tough. We know that. You see the shores of a world you want to visit, and then we tell you that the beaches have yet to open. There are high tides and low tides for information. In between the big waves, gamers can get thirsty for updates. Some members of the Bungie Community spend that time asking “Are we there yet?” Others among you don’t ask questions – you look for your own answers. This week, we’re focusing on a YouTuber who acts like an investigative reporter when he contemplates Destiny. The brave new world of our game is the site of an unsolved mystery, and he’s a detective on a mission. Tell us about yourself, eager gumshoe. Who are you? What’s your history as a gamer? DU : My name is David. I'm an avid gamer and a huge Bungie fan. The first Halo game I played was Halo 2 (sorry, not a CE veteran). It was the first game I really fell in love with. From there, of course, I played all the Halo games, loving each one. At some point, I started branching out and enjoying a wide variety of games. I really enjoy open world RPGs, other First Person Shooters, Strategy games, some MMOs, Adventure games, and of course, Indies. I definitely love being immersed in Fantasy or SciFi worlds (in games, books and movies), exploring them, learning their history and lore, etc. How would you describe Destiny Updates? What service do you provide to the Destiny Community? DU : Destiny Updates is a YouTube channel dedicated to providing the latest news and information on Destiny. I also analyze trailers and other things to try to find hidden details. I include a lot of my own speculation as well, which my viewers seem to love. Basically, my aim is to make it very easy for people to keep up with Destiny, and make the wait for its release a bit less painful. Instead of scouring the internet and reading through long articles, you can just watch a few quick, and hopefully entertaining, videos. You make some wild guesses about what we’ve shown you so far. Do your viewers weigh in on some of your more random hunches? DU : I try to keep my guesswork grounded in reality, but I suppose I may get excited at times. When things aren't yet fully explained, our imaginations like to fill in the gaps. The answer to your question is yes. Definitely. I'm always reading comments and taking in the opinions of my viewers. Recently, I started a series dedicated to just that. It's called “Dear Traveler,” and it’s all about the community's ideas, hopes, and wishes for Destiny. What are the missing clues about Destiny that tickle your curiosity the most? DU : How can I possibly narrow it down to one thing?! There's always the release date. “Are we there yet?” We were off to such a great start. Don't make me kick your ass. DU : In all seriousness, I suppose a major one would be if players on different platforms could join up together. I think that would be an amazing feature to bring the community together, and possibly even lessen console wars (yeah right). Another one would be a PC release (not necessarily on launch day, but hopefully at some point). Again, I'm all about bringing more players together to enjoy Destiny, from all different platforms. I already know that Destiny will be an amazing game, so those types of question matter more to me than normal gameplay elements. Someday, solving the mysteries will be a hands-on experience. Once rampant speculation gives way to sweet gameplay, what will you talk about on ‘Updates? DU : There will always be news to cover: game updates, DLC, and, of course, future titles. I think news and information will remain the "core" of the channel. Besides that, I believe that a game as big as Destiny will have no shortage of great videos to make once the game has been released. Exploring the world and showing off what it has to offer, walkthroughs on how to achieve things, hidden secrets and Easter eggs, top ten funny or awesome moments. The list goes on and on. Your first official act of business as a Guardian will be to create a character and choose a class. Which class will you choose? DU : I've honestly not been able to decide! They all seem so awesome, it's just really difficult. I'll probably need to wait for more info before making that decision. Fair enough. A good detective needs facts. Once you’ve given form to your hero, where will you take him (or her) first? DU : I guess I'd be most excited to find some more beautiful vistas like we saw in the gameplay demo and just sit and stare. Then I'd go send some Fallen souls into the abyss. Also, Super Good Advice always seemed like an awesome gun, I may try to find that. You’ll have no shortage of awesome guns to find. A man with your drive for the truth and attention to detail should have a great time seeking out glorious treasures. Final Question: What would you ask a Bungie employee if you were stuck with them in an elevator? Remember, if you get too pushy for information, the wait to be rescued could be Awkward! DU : Well seeing as that employee probably can't say much, I would likely ask something more general. like: Overall, do you think fans will spend more hours in competitive multiplayer? Or in the Story modes? And then I might ask something more cliché like: What's the most awesome part of working at Bungie? We all have our own reasons. I’m biased, but I think the hyper-curiosity of our vocal community is a great thing about working on this project. Not everyone has such a loud cheering section while they do their work.
Seroprevalence of HIV, Hepatitis B, Hepatitis C and Syphilis at the Abidjan NBTC From January 2018 To December 2020 (Ivory Coast) The objective of this study is to analyze the seroprevalence of HIV, hepatitis B, hepatitis C and syphilis at the National Blood Transfusion Center (NBTC) in Cte d'Ivoire from January 2018 to December 2020 in order to contribute to transfusion safety in Ivory Coast.Materials and Methods : This is a retrospective study that took place at the NBCT in Abidjan. This is a three year review covering the period from January 2018 to December 2020. Biological analyzes (AgHbs, anti-HCV Antibodies, HIV serology, syphilitic serology and blood group testing) were carried out on 469,863 blood donations. These were donors volunteers whose age was between 18 and 60 years old. The data was entered and analyzed on Excell 2012. The statistical tests used were Chi square. A value of p < 0.05 was considered statistically significant.Results : The prevalence of hepatitis B was the highest with 65.5, syphilis 1.6, HIV with 8.6 and Hepatitis C with 19. HIV prevalence was higher for new donations than for repeat donations. The difference was statistically significant.Conclusion : It would be important to make blood donors more aware of the modes of transmission of HIV, hepatitis C and syphilis. Blood donors should also be encouraged to vaccinate against hepatitis B.
[TOKYO] Japan and Indonesia agreed Wednesday to cooperate on maritime security at a time when both countries are embroiled in sea rows with China. Beijing asserts sovereignty over almost all of the resource-rich South China Sea, despite rival claims from South-east Asian neighbours. Indonesia has no dispute with China over ownership of reefs or islets. But China's expansive claims overlap with Indonesia's exclusive economic zone - waters where a state has the right to exploit resources - around the Natunas, a remote scattering of islands with rich fishing grounds. In June Indonesian President Joko Widodo toured the islands on a warship, in a move seen sending a strong message to Beijing to respect his country's sovereignty. In Tokyo, Indonesia's coordinating minister for maritime affairs Luhut Panjaitan and Japanese Foreign Minister Fumio Kishida met to exchange documents to launch the cooperation framework, including in maritime security and economic development in remote islets. "Japan highly values cooperation with Indonesia in maritime affairs," Mr Kishida told Panjaitan. The agreement is aimed at helping Indonesia strengthen its capacity in maritime security and promote economic development of remote islands, a foreign ministry official told AFP. Australia has earlier said it is considering joint patrols with Indonesia in the South China Sea. Japan, which has a territorial row with China over disputed islands in the East China Sea, has worked to strengthen ties with members of the Association of Southeast Asian Nations, repeatedly stressing maritime disputes should be addressed according to law. The Philippines took China to the Hague-based Permanent Court of Arbitration, which ruled in July there was no legal basis to China's claims - a verdict Beijing vehemently dismissed.
Dissolution profile of novel composite pellet cores based on different ratios of microcrystalline cellulose and isomalt. There is a growing interest towards the application of inert cores as starting materials for pharmaceutical pellet manufacturing. They serve as alternatives to develop and adapt a relatively simple manufacturing technology compared with an extrusion/spheronisation process. The major objective of this study was to investigate the effect of the compositions of core materials on the drug release profile. Pure microcrystalline cellulose (MCC), isomalt and different types of novel composite MCC-isomalt cores were layered with model drug (sodium diclofenac) and were coated with acrylic polymer. The effect of the osmolality in the gastrointestinal tract was simulated using glucose as osmotically active agent during in vitro dissolution tests. The results demonstrated the dependence of drug dissolution profile on the ratio of MCC and isomalt in the core and the influence of osmotic properties of the dissolution medium. Isomalt used in the composite core was able to decrease the vulnerability of the dissolution kinetics to the changes in the osmotic environment.
Fluorescence-activated cell sorting of senescent fibroblasts Introduction: COPD is associated with cellular senescence, hallmarks of which include increased cell size, autofluorescence, senescence associated--galactosidase (SA--Gal) staining, mitochondrial dysfunction, reduced proliferation and cell cycle inhibitor expression. Dilution of these markers due to proliferation of non-senescent cells makes studying senescence in proliferative cells, such as fibroblasts, challenging. Aim: Assess whether senescent fibroblasts can be isolated from a mixed COPD fibroblast population. Method: Fibroblasts were isolated from COPD patient lung resections (n=6). Cells were sorted using a FACSAria Fusion into a senescence-enriched population (largest and most autofluorescence) and a non-senescent population (smallest, least autofluorescence). Senescence was investigated in these populations using SA--Gal for senescent cell identification, iCELLigence technology to assess proliferation, Seahorse assay to investigate mitochondrial function and RT-qPCR to measure cell cycle inhibitor expression. Results: Senescence-enriched fibroblasts were larger and more autofluorescence and displayed greater SA--Gal staining. These cells trended towards reduced proliferation over the first 48h of growth compared to the non-senescent population. Senescence-enriched fibroblasts showed increased basal mitochondrial function (p Conclusion: These data suggest FACS isolate senescent-enriched fibroblasts. This technique can be used to study fibroblast senescence mechanisms and provide therapeutic targets for COPD.
<filename>speakercount/src/de/fau/cs/jstk/stat/hmm/CState.java /* Copyright (c) 2009-2011 Speech Group at Informatik 5, Univ. Erlangen-Nuremberg, GERMANY <NAME> <NAME> This file is part of the Java Speech Toolkit (JSTK). The JSTK is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The JSTK is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with the JSTK. If not, see <http://www.gnu.org/licenses/>. */ package de.fau.cs.jstk.stat.hmm; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import de.fau.cs.jstk.io.IOUtil; import de.fau.cs.jstk.stat.Density.Flags; import de.fau.cs.jstk.stat.DensityDiagonal; import de.fau.cs.jstk.stat.DensityFull; import de.fau.cs.jstk.stat.Mixture; import de.fau.cs.jstk.stat.MleDensityAccumulator.MleOptions; import de.fau.cs.jstk.stat.MleMixtureAccumulator; /** * The continuous HMM state features an individual mixture density. Though this * results in easier code, the training complexity increases due to the large * number of variables. * * @author sikoried */ public class CState extends State { /** codebook for this state */ Mixture cb = null; MleMixtureAccumulator acc = null; private double ga = 0.; /** cache for mixture posteriors */ private transient double [] p; /** * Generate a new State using a copy of the given codebook. * @param codebook */ public CState(Mixture codebook) { this.cb = codebook.clone(); this.p = new double [codebook.nd]; } /** * Generate a new continuous state by a deep copy of the referenced one. * @param copy */ public CState(CState copy) { this.cb = copy.cb.clone(); this.p = new double [this.cb.nd]; } /** * Create an HMM state by reading from the given InputStream * @param is */ public CState(InputStream is) throws IOException { cb = new Mixture(is); p = new double [cb.nd]; } /** * Write the continuous state to the given OutputStream */ void write(OutputStream os) throws IOException { IOUtil.writeByte(os, getTypeCode()); cb.write(os); } /** * Emission probability of feature vector x */ public double emits(double[] x) { return cb.evaluate2(x); } /** * Initialize a new accumulator. */ public void init() { try { acc = new MleMixtureAccumulator(cb.fd, cb.nd, cb.diagonal() ? DensityDiagonal.class : DensityFull.class); } catch (Exception e) { throw new RuntimeException(e.toString()); } ga = 0.; } /** * Accumulate the given feature vector using the state's posterior. */ public void accumulate(double gamma, double [] x) { // save your breath if (gamma == 0.) return; // evaluate the mixtures and compute posteriors cb.evaluate(x); cb.posteriors(p); // sum up all gammas for later interpolation ga += gamma; // for all densities... for (int j = 0; j < cb.nd; ++j) { // gamma_t(i,k) acc.accumulate(gamma * p[j], x, j); } } public double gamma() { return ga; } /** * Absorb the given state's accumulator and delete if afterwards. */ public void propagate(State source) { CState state = (CState) source; // absorb the statistics acc.propagate(state.acc); } /** * Interpolate the local sufficient statistics with the ones from the * referenced state. */ public void interpolate(State source, double rho) { CState state = (CState) source; acc.interpolate(state.acc, rho / (rho + ga)); } public void pinterpolate(double wt, State source) { cb.pinterpolate(wt, ((CState) source).cb); } /** * Reestimate this state's codebook. */ public void reestimate() { Mixture old = cb.clone(); MleMixtureAccumulator.MleUpdate(old, MleOptions.pDefaultOptions, Flags.fAllParams, acc, cb); } /** * Discard the current accumulator. */ public void discard() { ga = 0.; acc = null; } /** * Generate a String representation of this state. */ public String toString() { StringBuffer buf = new StringBuffer(); buf.append(cb.toString()); return buf.toString(); } /** * Get the type code for continuous states 'c' */ public byte getTypeCode() { return 'c'; } }
The Shark Attack Problem: The Gamma-Poisson Conjugate This chapter introduces the gamma-Poisson conjugate. Many Bayesian analyses consider alternative parameter values as hypotheses. The prior distribution for an unknown parameter can be represented by a continuous probability density function when the number of hypotheses is infinite. There are special cases where a Bayesian prior probability distribution for an unknown parameter of interest can be quickly updated to a posterior distribution of the same form as the prior. In the Shark Attack Problem, a gamma distribution is used as the prior distribution of, the mean number of shark attacks in a given year. Poisson data are then collected to determine the number of attacks in a given year. The prior distribution is updated to the posterior distribution in light of this new information. In short, a gamma prior distribution + Poisson data → gamma posterior distribution. The gamma distribution is said to be conjugate to the Poisson distribution.
<filename>data_utility.py import os import glob import numpy as np from scipy import misc import torch import torch.nn.functional as F from torch.utils.data import Dataset, DataLoader from torchvision import transforms, utils import random class ToTensor(object): def __call__(self, sample): return torch.tensor(sample, dtype=torch.float32) class dataset_8s(Dataset): def __init__(self, root_dir, dataset_type, img_size, color_invert =True, transform=None,shuffle=False): self.root_dir = root_dir self.shuffle = shuffle self.color_invert = color_invert print(self.root_dir) self.transform = transform self.file_names = [f for f in glob.glob(os.path.join(root_dir, "*.npz")) \ if dataset_type in f] print('number of files loaded:',len(self.file_names)) self.img_size = img_size def __len__(self): return len(self.file_names) def __getitem__(self, idx): data_path = self.file_names[idx] data = np.load(data_path) image = data["image"].reshape(8,16, 80, 80) target = data["target"] meta_target = data["meta_target"] if self.shuffle: for i in range(8): context = image[i,:8, :, :] choices = image[i,8:, :, :] indices = list(range(8)) np.random.shuffle(indices) new_target = indices.index(target[i]) new_choices = choices[indices, :, :] image[i] = np.concatenate((context, new_choices)) target[i] = new_target if meta_target.dtype == np.int8: meta_target = meta_target.astype(np.uint8) del data if self.transform: image = self.transform(image) target = torch.tensor(target, dtype=torch.long) meta_target = self.transform(meta_target) return image, target, meta_target class dataset(Dataset): def __init__(self, root_dir, dataset_type, img_size, color_invert =True, transform=None,shuffle=False): self.root_dir = root_dir self.color_invert = color_invert self.shuffle = shuffle print(self.root_dir) self.transform = transform self.file_names = [f for f in glob.glob(os.path.join(root_dir, "*.npz")) \ if dataset_type in f] print('number of files loaded:',len(self.file_names)) self.img_size = img_size def __len__(self): return len(self.file_names) def get_num_data(self): return self.__len__() def __getitem__(self, idx): data_path = self.file_names[idx] data = np.load(data_path) image = data["image"].reshape(16, 160, 160) resize_image = [] for idx in range(0, 16): resize_image.append(misc.imresize(image[idx,:,:], (self.img_size, self.img_size))) resize_image = np.stack(resize_image) resize_image = resize_image/255.0 if self.color_invert: resize_image = 1-resize_image target = data["target"] meta_target = data["meta_target"] if self.shuffle: context = resize_image[:8, :, :] choices = resize_image[8:, :, :] indices = list(range(8)) np.random.shuffle(indices) new_target = indices.index(target) new_choices = choices[indices, :, :] resize_image = np.concatenate((context, new_choices),axis=0) target = new_target if meta_target.dtype == np.int8: meta_target = meta_target.astype(np.uint8) del data if self.transform: resize_image = self.transform(resize_image) target = torch.tensor(target, dtype=torch.long) meta_target = self.transform(meta_target) return resize_image, target, meta_target class dataset_raven(Dataset): def load_subfolder_files(self,root_dir,dataset_type): all_files = [] for r, d, f in os.walk(root_dir): for filename in f: if dataset_type in filename and 'npz' in filename: all_files.append(os.path.join(r,filename)) return all_files def __init__(self, root_dir, dataset_type, img_size, color_invert =False, transform=None, subfolder = True,shuffle=False): self.root_dir = root_dir self.color_invert = color_invert self.shuffle = shuffle print(self.root_dir) #print(os.path.join(root_dir,'*','*.npz')) self.transform = transform if not subfolder: self.file_names = [f for f in glob.glob(os.path.join(root_dir, "*.npz")) \ if dataset_type in f] else: self.file_names = self.load_subfolder_files(root_dir,dataset_type) print('number of files loaded:',len(self.file_names)) self.img_size = img_size def __len__(self): return len(self.file_names) def get_num_data(self): return self.__len__() def __getitem__(self, idx): data_path = self.file_names[idx] data = np.load(data_path) image = data["image"].reshape(16, 160, 160) target = data["target"] meta_target = data["meta_target"] if random.randint(0,1) == 1: context = image[:8, :, :].copy() image[:3,:,:] = context[3:6,:,:] image[3:6,:,:] = context[:3,:,:] if self.shuffle: context = image[:8, :, :] choices = image[8:, :, :] indices = list(range(8)) np.random.shuffle(indices) new_target = indices.index(target) new_choices = choices[indices, :, :] image = np.concatenate((context, new_choices)) target = new_target resize_image = [] for idx in range(0, 16): resize_image.append(misc.imresize(image[idx,:,:], (self.img_size, self.img_size))) resize_image = np.stack(resize_image) resize_image = resize_image / 255.0 if self.color_invert: resize_image = 1-resize_image if meta_target.dtype == np.int8: meta_target = meta_target.astype(np.uint8) del data if self.transform: resize_image = self.transform(resize_image) target = torch.tensor(target, dtype=torch.long) meta_target = self.transform(meta_target) return resize_image, target, meta_target