content
stringlengths
10
4.9M
<filename>src/main/java/com/gmail/jannyboy11/customrecipes/api/crafting/CraftingRecipe.java package com.gmail.jannyboy11.customrecipes.api.crafting; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.bukkit.ChatColor; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.configuration.serialization.ConfigurationSerializable; import org.bukkit.inventory.CraftingInventory; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.Recipe; import org.bukkit.inventory.meta.ItemMeta; import com.gmail.jannyboy11.customrecipes.api.InventoryUtils; import com.gmail.jannyboy11.customrecipes.api.Representable; /** * Represents a crafting recipe. * * @author Jan * */ public interface CraftingRecipe extends Representable, Recipe, ConfigurationSerializable { /** * Tests whether the items in the crafting inventory match to this crafting recipe. * * @param craftingInventory the crafting inventory - either a 3x3 workbench inventory, or the 2x2 hand crafting inventory * @param world the world in which crafting takes place * @return the recipe accepts the inventory and world as valid input for the result ItemStack */ public boolean matches(CraftingInventory craftingInventory, World world); /** * Get the ItemStack that will be put in the result slot of the crafting inventory. * * @param craftingInventory the crafting inventory - either a 3x3 workbench inventory, or the 2x2 hand crafting inventory * @return the crafting result ItemStack */ public ItemStack craftItem(CraftingInventory craftingInventory); /** * Get the result of this recipe. This is NOT the item that is used by the recipe when the player crafts an item. * See {@link com.gmail.jannyboy11.customrecipes.api.crafting.CraftingRecipe#craftItem} * * @return the result ItemStack */ public ItemStack getResult(); /** * Get the ingredients of this recipe. The ordef or the ingredients may or may not be important depending the recipe type. * * @return the list of ingredients. */ List<? extends CraftingIngredient> getIngredients(); /** * Get the list of ItemStacks that remain in the crafting table after crafting. * The size of the list is the same as the the size of the ingredient inventory. * This method also takes out the items from the crafting inventory. * * @param craftingInventory the crafting inventory - either a 3x3 workbench inventory, or the 2x2 hand crafting inventory * @return the ItemStacks that are left over after crafting completed - can contain null or AIR ItemStacks */ public List<? extends ItemStack> getLeftOverItems(CraftingInventory craftingInventory); /** * Tests whether the recipe is a special recipe that have multiple ingredient patterns. * These special recipes are not shown in the Recipe Book. * * Vanilla examples include * - ArmorDyeRecipe * - BannerAddPatternRecipe * - BannerDuplicateRecipe * - BookCloneRecipe * - FireworksRecipe * - MapCloneRecipe * - MapExtendRecipe * - RepairRecipe * - ShieldDecorationRecipe * - ShulkerBoxDyeRecipe * - TippedArowRecipe * * @return whether the recipe is special */ public boolean isHidden(); /** * {@inheritDoc} */ @Override public default ItemStack getRepresentation() { ItemStack result = getResult(); ItemStack representation = (result == null || result.getType() == Material.AIR) ? new ItemStack(Material.AIR) : result.clone(); if (representation.getType() == Material.AIR) { representation = new ItemStack(Material.STRUCTURE_BLOCK); ItemMeta meta = representation.getItemMeta(); meta.setDisplayName(InventoryUtils.getItemName(getResult())); meta.setLore(Arrays.asList("Result: UNKNOWN")); representation.setItemMeta(meta); return representation; } ItemMeta meta = representation.getItemMeta(); List<String> lore = new ArrayList<>(); lore.add(ChatColor.DARK_GRAY + "Hidden: " + isHidden()); meta.setDisplayName(ChatColor.GRAY + InventoryUtils.getItemName(getResult())); meta.setLore(lore); representation.setItemMeta(meta); return representation; } /** * Get the group of the recipe. Groups are used for grouping recipes together in the Recipe Book. * Examples of vanilla recipes with a group are recipes that take different kinds of wood, or different colors of wool. * The recipe book display mechanic is client side only, <a href="https://twitter.com/dinnerbone/status/856505341479145472">but is subject to change in Minecraft 1.13</a>. * * @return the group identifier, or the empty string if the recipe has no group */ public default String getGroup() { return ""; } /** * Check whether this recipe has a group. * * @return true if the shaped recipe has a group, otherwise false */ public default boolean hasGroup() { String group = getGroup(); return !(group == null || group.isEmpty()); } }
declare class SDK { constructor(opts?: Options); base: string; token: string | (() => string); auth: string; repository: RepositoryAPI; issue: IssueAPI; pr: PrAPI; label: LabelAPI; ticket: TicketAPI; project: ProjectAPI; summary: SummaryAPI; staff: StaffAPI; wallet: WalletAPI; trade: TradeAPI; invitation: InvitationAPI; } export interface Options { base?: string; token?: string | (() => string); } export interface RepositoryAPI { /** * List all all repositories */ listRepositories(req: ListRepositoriesRequest): Promise<ListRepositoriesResponse>; /** * Find repository by id */ getRepository(req: GetRepositoryRequest): Promise<GetRepositoryResponse>; /** * Update repository by id */ updateRepository(req: UpdateRepositoryRequest): Promise<UpdateRepositoryResponse>; /** * Create a release */ createRelease(req: CreateReleaseRequest): Promise<CreateReleaseResponse>; /** * Delete all invitatings */ deleteInvitatings(req: DeleteInvitatingsRequest): Promise<DeleteInvitatingsResponse>; /** * add repository collaborator */ addCollaborator(req: AddCollaboratorRequest): Promise<AddCollaboratorResponse>; /** * Delete collaborator */ deleteCollaborator(req: DeleteCollaboratorRequest): Promise<void>; } export interface IssueAPI { /** * List all all issues */ listIssues(req: ListIssuesRequest): Promise<ListIssuesResponse>; /** * Find issue by id and driver */ getIssue(req: GetIssueRequest): Promise<GetIssueResponse>; /** * Find issue comments by issue&#x27;s id and driver */ getComments(req: GetCommentsRequest): Promise<GetCommentsResponse>; } export interface PrAPI { /** * List all all prs */ listPrs(req: ListPrsRequest): Promise<ListPrsResponse>; } export interface LabelAPI { /** * List all all labels */ listLabels(req: ListLabelsRequest): Promise<ListLabelsResponse>; } export interface TicketAPI { /** * Create a ticket */ createTicket(req: CreateTicketRequest): Promise<CreateTicketResponse>; /** * List all all tickets */ listTickets(req: ListTicketsRequest): Promise<ListTicketsResponse>; /** * Find ticket by id */ getTicket(req: GetTicketRequest): Promise<GetTicketResponse>; /** * Delete ticket */ deleteTicket(req: DeleteTicketRequest): Promise<void>; /** * Create ticket event */ createTicketEvent(req: CreateTicketEventRequest): Promise<CreateTicketEventResponse>; } export interface ProjectAPI { /** * List all interations of project */ listInterations(req: ListInterationsRequest): Promise<ListInterationsResponse>; /** * Create a interation */ createInteraction(req: CreateInteractionRequest): Promise<CreateInteractionResponse>; /** * Find interation by id */ getInteration(req: GetInterationRequest): Promise<GetInterationResponse>; /** * Update interation */ updateInteration(req: UpdateInterationRequest): Promise<UpdateInterationResponse>; /** * Delete interation */ deleteInteration(req: DeleteInterationRequest): Promise<void>; /** * List all projects */ listProjects(req: ListProjectsRequest): Promise<ListProjectsResponse>; /** * Create a project */ createProject(req: CreateProjectRequest): Promise<CreateProjectResponse>; /** * Find project by id */ getProject(req: GetProjectRequest): Promise<GetProjectResponse>; /** * Update project */ updateProject(req: UpdateProjectRequest): Promise<UpdateProjectResponse>; /** * Delete project */ deleteProject(req: DeleteProjectRequest): Promise<void>; /** * Create a project document */ createProjectDoc(req: CreateProjectDocRequest): Promise<CreateProjectDocResponse>; /** * get a project summary */ getProjectSummary(req: GetProjectSummaryRequest): Promise<GetProjectSummaryResponse>; /** * Update a project document */ updateProjectDoc(req: UpdateProjectDocRequest): Promise<UpdateProjectDocResponse>; /** * Create a project event */ createProjectEvent(req: CreateProjectEventRequest): Promise<CreateProjectEventResponse>; } export interface SummaryAPI { /** * Get interations summary */ getInteractionsSummary( req: GetInteractionsSummaryRequest ): Promise<GetInteractionsSummaryResponse>; /** * Get tickets summary */ getTicketsSummary(req: GetTicketsSummaryRequest): Promise<GetTicketsSummaryResponse>; /** * Get trades summary */ getTradeSummary(req: GetTradeSummaryRequest): Promise<GetTradeSummaryResponse>; /** * Get trades summary by month */ getTradeSummaryByMonth( req: GetTradeSummaryByMonthRequest ): Promise<GetTradeSummaryByMonthResponse>; /** * Get ticket done summary */ getTicketDoneSummary(req: GetTicketDoneSummaryRequest): Promise<GetTicketDoneSummaryResponse>; /** * Get ticket coverage summary */ getTicketCoverageSummary( req: GetTicketCoverageSummaryRequest ): Promise<GetTicketCoverageSummaryResponse>; } export interface StaffAPI { /** * List staffs */ listStaffs(req: ListStaffsRequest): Promise<ListStaffsResponse>; /** * Create a staff */ createStaff(req: CreateStaffRequest): Promise<CreateStaffResponse>; /** * upsert staff(only for development) */ upsertStaff(req: UpsertStaffRequest): Promise<UpsertStaffResponse>; /** * update a staff */ updateStaff(req: UpdateStaffRequest): Promise<UpdateStaffResponse>; /** * Find staff by id */ getStaff(req: GetStaffRequest): Promise<GetStaffResponse>; /** * Delete staff */ deleteStaff(req: DeleteStaffRequest): Promise<void>; } export interface WalletAPI { /** * Find staff wallet by id */ getStaffWallet(req: GetStaffWalletRequest): Promise<GetStaffWalletResponse>; /** * List wallets */ listWallets(req: ListWalletsRequest): Promise<ListWalletsResponse>; } export interface TradeAPI { /** * List trades */ listTrades(req: ListTradesRequest): Promise<ListTradesResponse>; /** * Create a trade */ createTrade(req: CreateTradeRequest): Promise<CreateTradeResponse>; } export interface InvitationAPI { /** * Create a invitation, 用于发送邀请码 */ createInvitation(req: CreateInvitationRequest): Promise<CreateInvitationResponse>; } export interface ListRepositoriesRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; id?: string; }; } export interface ListRepositoriesResponse { body: { /** * 在第三方服务中的id */ id?: string; /** * 仓库名称 */ name?: string; /** * 仓库全称 */ fullName?: string; /** * url for clone */ gitUrl?: string; /** * url */ htmlUrl?: string; /** * github user username */ owner?: string; /** * 是否私有 */ private?: boolean; /** * 最近push时间 */ pushedAt?: string; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; /** * readme 内容 */ readme?: string; /** * 主题 */ topics?: string[]; collaborators?: string[]; technologies?: string[]; types?: string[]; }[]; headers: { "x-total-count"?: number; }; } export interface GetRepositoryRequest { repositoryId: string; } export interface GetRepositoryResponse { /** * Repo Doc */ body: { /** * 在第三方服务中的id */ id?: string; /** * 仓库名称 */ name?: string; /** * 仓库全称 */ fullName?: string; /** * url for clone */ gitUrl?: string; /** * url */ htmlUrl?: string; /** * github user username */ owner?: string; /** * 是否私有 */ private?: boolean; /** * 最近push时间 */ pushedAt?: string; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; /** * readme 内容 */ readme?: string; /** * 主题 */ topics?: string[]; collaborators?: string[]; technologies?: string[]; types?: string[]; }; } export interface UpdateRepositoryRequest { repositoryId: string; /** * Repo Doc */ body: { /** * 在第三方服务中的id */ id?: string; /** * 仓库名称 */ name?: string; /** * 仓库全称 */ fullName?: string; /** * url for clone */ gitUrl?: string; /** * url */ htmlUrl?: string; /** * github user username */ owner?: string; /** * 是否私有 */ private?: boolean; /** * 最近push时间 */ pushedAt?: string; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; /** * readme 内容 */ readme?: string; /** * 主题 */ topics?: string[]; collaborators?: string[]; technologies?: string[]; types?: string[]; }; } export interface UpdateRepositoryResponse { /** * Repo Doc */ body: { /** * 在第三方服务中的id */ id?: string; /** * 仓库名称 */ name?: string; /** * 仓库全称 */ fullName?: string; /** * url for clone */ gitUrl?: string; /** * url */ htmlUrl?: string; /** * github user username */ owner?: string; /** * 是否私有 */ private?: boolean; /** * 最近push时间 */ pushedAt?: string; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; /** * readme 内容 */ readme?: string; /** * 主题 */ topics?: string[]; collaborators?: string[]; technologies?: string[]; types?: string[]; }; } export interface CreateReleaseRequest { repositoryId: string; /** * release of repository */ body: { /** * 暂时不提供 */ version?: string; }; } export interface CreateReleaseResponse { /** * release of repository */ body: { /** * 暂时不提供 */ version?: string; }; } export interface DeleteInvitatingsRequest { repositoryId: string; } export interface DeleteInvitatingsResponse { /** * Repo Doc */ body: { /** * 在第三方服务中的id */ id?: string; /** * 仓库名称 */ name?: string; /** * 仓库全称 */ fullName?: string; /** * url for clone */ gitUrl?: string; /** * url */ htmlUrl?: string; /** * github user username */ owner?: string; /** * 是否私有 */ private?: boolean; /** * 最近push时间 */ pushedAt?: string; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; /** * readme 内容 */ readme?: string; /** * 主题 */ topics?: string[]; collaborators?: string[]; technologies?: string[]; types?: string[]; }; } export interface AddCollaboratorRequest { repositoryId: string; github: string; } export interface AddCollaboratorResponse { /** * Repo Doc */ body: { /** * 在第三方服务中的id */ id?: string; /** * 仓库名称 */ name?: string; /** * 仓库全称 */ fullName?: string; /** * url for clone */ gitUrl?: string; /** * url */ htmlUrl?: string; /** * github user username */ owner?: string; /** * 是否私有 */ private?: boolean; /** * 最近push时间 */ pushedAt?: string; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; /** * readme 内容 */ readme?: string; /** * 主题 */ topics?: string[]; collaborators?: string[]; technologies?: string[]; types?: string[]; }; } export interface DeleteCollaboratorRequest { repositoryId: string; github: string; } export interface ListIssuesRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; id?: string; repository?: string; state?: "OPEN" | "CLOSED"; }; } export interface ListIssuesResponse { body: { /** * 在第三方服务中的id */ id?: string; /** * 标题 */ title?: string; /** * 所属repo (repositoryId) */ repository?: string; /** * issue 号 */ number?: number; /** * 标签 */ labels?: string[]; /** * 创建者 */ state?: "OPEN" | "CLOSED"; /** * 关闭时间 */ closeAt?: string; /** * url */ htmlUrl?: string; /** * github user username */ user?: string; assignees?: string[]; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; }[]; headers: { "x-total-count"?: number; }; } export interface GetIssueRequest { issueId: string; } export interface GetIssueResponse { /** * Issue Doc */ body: { /** * 在第三方服务中的id */ id?: string; /** * 标题 */ title?: string; /** * 所属repo (repositoryId) */ repository?: string; /** * issue 号 */ number?: number; /** * 标签 */ labels?: string[]; /** * 创建者 */ state?: "OPEN" | "CLOSED"; /** * 关闭时间 */ closeAt?: string; /** * url */ htmlUrl?: string; /** * github user username */ user?: string; assignees?: string[]; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; }; } export interface GetCommentsRequest { issueId: string; } export interface GetCommentsResponse { body: { /** * content of comment */ body?: string; /** * writer of comment */ user?: string; /** * 更新时间 */ updatedAt?: string; /** * github url of comment */ htmlUrl?: string; /** * role of user */ authorAssociation?: string; }[]; } export interface ListPrsRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; id?: string; repository?: string; state?: "OPEN" | "CLOSED"; }; } export interface ListPrsResponse { body: { /** * 在第三方服务中的id */ id?: string; /** * 标题 */ title?: string; /** * 所属repo (repositoryId) */ repository?: string; /** * pr 号 */ number?: number; /** * 标签 */ labels?: string[]; /** * pr状态 */ state?: "OPEN" | "CLOSED"; /** * 关闭时间 */ closeAt?: string; /** * url */ htmlUrl?: string; /** * github user username */ user?: string; assignees?: string[]; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; }[]; headers: { "x-total-count"?: number; }; } export interface ListLabelsRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; id?: string; name?: string; }; } export interface ListLabelsResponse { body: { /** * label id */ id?: string; /** * label name */ name?: string; /** * label color */ color?: string; }[]; headers: { "x-total-count"?: number; }; } export interface CreateTicketRequest { /** * Ticket Doc */ body: { /** * 关联的issue (第三方服务中的issueid) */ issue: string; /** * ticket titile */ title?: string; /** * 所属repo (第三方服务中的 repositoryId) */ repository?: string; /** * 级别 */ level?: number; /** * 状态 */ state?: "PLANNING" | "TODO" | "DOING" | "DONE"; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 所属项目 (projectId) */ project?: string; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取时间 */ takenAt?: string; /** * 完成时间 */ doneAt?: string; /** * 发布时间 */ publishedAt?: string; /** * 备注 */ remark?: string; /** * 额外暴击奖励 */ bonus?: number; labels?: string[]; events?: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }[]; /** * 是否 reopened 过 */ reopened?: boolean; reopenedAt?: string; relatedRepos?: string[]; }; } export interface CreateTicketResponse { body: { /** * 关联的issue (第三方服务中的issueid) */ issue: string; /** * ticket titile */ title?: string; /** * 所属repo (第三方服务中的 repositoryId) */ repository?: string; /** * 级别 */ level?: number; /** * 状态 */ state?: "PLANNING" | "TODO" | "DOING" | "DONE"; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 所属项目 (projectId) */ project?: string; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取时间 */ takenAt?: string; /** * 完成时间 */ doneAt?: string; /** * 发布时间 */ publishedAt?: string; /** * 备注 */ remark?: string; /** * 额外暴击奖励 */ bonus?: number; labels?: string[]; events?: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }[]; /** * 是否 reopened 过 */ reopened?: boolean; reopenedAt?: string; relatedRepos?: string[]; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface ListTicketsRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; _group?: string; id?: string; repository?: string; driver?: string; interation?: string; project?: string; state?: "PLANNING" | "TODO" | "DOING" | "DONE"; priority?: 0 | 1 | 2; deadline_gt?: string; deadline_lt?: string; takenAt_gt?: string; takenAt_lt?: string; title_like?: string; takenBy?: string; }; } export interface ListTicketsResponse { body: ({ /** * 关联的issue (第三方服务中的issueid) */ issue: string; /** * ticket titile */ title?: string; /** * 所属repo (第三方服务中的 repositoryId) */ repository?: string; /** * 级别 */ level?: number; /** * 状态 */ state?: "PLANNING" | "TODO" | "DOING" | "DONE"; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 所属项目 (projectId) */ project?: string; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取时间 */ takenAt?: string; /** * 完成时间 */ doneAt?: string; /** * 发布时间 */ publishedAt?: string; /** * 备注 */ remark?: string; /** * 额外暴击奖励 */ bonus?: number; labels?: string[]; events?: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }[]; /** * 是否 reopened 过 */ reopened?: boolean; reopenedAt?: string; relatedRepos?: string[]; } & { id: string; updatedAt?: string; createdAt?: string; })[]; headers: { "x-total-count"?: number; }; } export interface GetTicketRequest { ticketId: string; } export interface GetTicketResponse { body: { /** * 关联的issue (第三方服务中的issueid) */ issue: string; /** * ticket titile */ title?: string; /** * 所属repo (第三方服务中的 repositoryId) */ repository?: string; /** * 级别 */ level?: number; /** * 状态 */ state?: "PLANNING" | "TODO" | "DOING" | "DONE"; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 所属项目 (projectId) */ project?: string; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取时间 */ takenAt?: string; /** * 完成时间 */ doneAt?: string; /** * 发布时间 */ publishedAt?: string; /** * 备注 */ remark?: string; /** * 额外暴击奖励 */ bonus?: number; labels?: string[]; events?: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }[]; /** * 是否 reopened 过 */ reopened?: boolean; reopenedAt?: string; relatedRepos?: string[]; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface DeleteTicketRequest { ticketId: string; } export interface CreateTicketEventRequest { ticketId: string; /** * Ticket evnet */ body: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }; } export interface CreateTicketEventResponse { body: { /** * 关联的issue (第三方服务中的issueid) */ issue: string; /** * ticket titile */ title?: string; /** * 所属repo (第三方服务中的 repositoryId) */ repository?: string; /** * 级别 */ level?: number; /** * 状态 */ state?: "PLANNING" | "TODO" | "DOING" | "DONE"; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 所属项目 (projectId) */ project?: string; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取时间 */ takenAt?: string; /** * 完成时间 */ doneAt?: string; /** * 发布时间 */ publishedAt?: string; /** * 备注 */ remark?: string; /** * 额外暴击奖励 */ bonus?: number; labels?: string[]; events?: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }[]; /** * 是否 reopened 过 */ reopened?: boolean; reopenedAt?: string; relatedRepos?: string[]; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface ListInterationsRequest { projectId: string; query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; planStart_gt?: string; planStart_lt?: string; planEnd_gt?: string; planEnd_lt?: string; id?: string; }; } export interface ListInterationsResponse { body: ({ /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; } & { id: string; updatedAt?: string; createdAt?: string; })[]; headers: { "x-total-count"?: number; }; } export interface CreateInteractionRequest { projectId: string; /** * 迭代 Doc */ body: { /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; }; } export interface CreateInteractionResponse { body: { /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface GetInterationRequest { projectId: string; interationId: string; } export interface GetInterationResponse { body: { /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface UpdateInterationRequest { projectId: string; interationId: string; /** * 迭代 Doc */ body: { /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; }; } export interface UpdateInterationResponse { body: { /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface DeleteInterationRequest { projectId: string; interationId: string; } export interface ListProjectsRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; name?: string; po?: string; cm?: string; state?: "DOING" | "ARCHIVED"; planStart_gt?: string; planStart_lt?: string; planEnd_gt?: string; planEnd_lt?: string; id?: string; }; } export interface ListProjectsResponse { body: ({ /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } & { id: string; updatedAt?: string; createdAt?: string; })[]; headers: { "x-total-count"?: number; }; } export interface CreateProjectRequest { body: { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; }; } export interface CreateProjectResponse { body: { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface GetProjectRequest { projectId: string; } export interface GetProjectResponse { body: { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface UpdateProjectRequest { projectId: string; body: { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; }; } export interface UpdateProjectResponse { body: { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface DeleteProjectRequest { projectId: string; } export interface CreateProjectDocRequest { projectId: string; body: { /** * 文档标题 */ title: string; /** * 文档内容 */ content: string; /** * 更新人(userId) */ updatedBy?: string; /** * 创建人(userId) */ createdBy?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface CreateProjectDocResponse { body: { /** * 文档标题 */ title: string; /** * 文档内容 */ content: string; /** * 更新人(userId) */ updatedBy?: string; /** * 创建人(userId) */ createdBy?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface GetProjectSummaryRequest { projectId: string; } export interface GetProjectSummaryResponse { /** * 项目统计 */ body: { /** * 统计类型 */ type?: string; /** * 统计数据 */ data?: { /** * 名称 */ name?: string; /** * 值 */ value?: string; }[]; /** * 总数 */ total?: number; }[]; } export interface UpdateProjectDocRequest { projectId: string; docId: string; body: { /** * 文档标题 */ title: string; /** * 文档内容 */ content: string; /** * 更新人(userId) */ updatedBy?: string; /** * 创建人(userId) */ createdBy?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface UpdateProjectDocResponse { body: { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface CreateProjectEventRequest { projectId: string; /** * 项目事件 */ body: { /** * 事件名称 */ name?: "CHANGE_TOTAL" | "SHARED"; /** * 项目金额总数 */ total?: number; /** * 分成细则 */ shared?: { /** * 分成总金额 */ total?: number; /** * 分成事项 */ remark?: string; detail?: { /** * 员工id */ staff?: string; /** * 在项目中的职位 */ position?: "PO" | "CM"; /** * 分成中的比例,百分比 */ percent?: number; }[]; }; }; } export interface CreateProjectEventResponse { body: { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface GetInteractionsSummaryRequest { query?: { _group: string[]; project?: string; planStart_gt?: string; planStart_lt?: string; planEnd_gt?: string; planEnd_lt?: string; }; } export interface GetInteractionsSummaryResponse { body: {}[]; } export interface GetTicketsSummaryRequest { query?: { _group: string[]; _exist?: string[]; state?: "PLANNING" | "TODO" | "DOING" | "DONE"; takenBy?: string; project?: string; interation?: string; }; } export interface GetTicketsSummaryResponse { body: {}[]; } export interface GetTradeSummaryRequest { query?: { _group: string[]; project?: string; staff?: string; }; } export interface GetTradeSummaryResponse { body: { /** * 对应项目 */ project?: string; /** * 对应员工 */ staff?: string; /** * 对应操作者 */ handler?: string; /** * 总支出 */ totalExp?: string; }[]; } export interface GetTradeSummaryByMonthRequest { query?: { staff?: string; }; } export interface GetTradeSummaryByMonthResponse { body: { /** * 对应项目 */ project?: string; /** * 对应员工 */ staff?: string; /** * 对应操作者 */ handler?: string; /** * 总支出 */ totalExp?: string; }[]; } export interface GetTicketDoneSummaryRequest { query?: { start?: string; end?: string; }; } export interface GetTicketDoneSummaryResponse { body: {}[]; } export interface GetTicketCoverageSummaryRequest { query?: { start?: string; end?: string; }; } export interface GetTicketCoverageSummaryResponse { body: {}[]; } export interface ListStaffsRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; name?: string; type?: string; position?: string; }; } export interface ListStaffsResponse { body: ({ /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { id: string; updatedAt?: string; createdAt?: string; })[]; headers: { "x-total-count"?: number; }; } export interface CreateStaffRequest { body: { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { /** * 邀请码 */ code?: string; }; } export interface CreateStaffResponse { body: { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface UpsertStaffRequest { body: { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { /** * 邀请码 */ code?: string; }; } export interface UpsertStaffResponse { body: { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface UpdateStaffRequest { staffId: string; body: { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; }; } export interface UpdateStaffResponse { body: { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface GetStaffRequest { staffId: string; } export interface GetStaffResponse { body: { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface DeleteStaffRequest { staffId: string; } export interface GetStaffWalletRequest { staffId: string; } export interface GetStaffWalletResponse { body: { /** * 钱包总余额 */ balance?: number; /** * 对应员工 */ staff?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface ListWalletsRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; staff?: string; }; } export interface ListWalletsResponse { body: ({ /** * 钱包总余额 */ balance?: number; /** * 对应员工 */ staff?: string; } & { id: string; updatedAt?: string; createdAt?: string; })[]; headers: { "x-total-count"?: number; }; } export interface ListTradesRequest { query?: { _limit?: number; _offset?: number; _sort?: string; _select?: string; staff?: string; type?: string; staffName?: string; staffGithub?: string; createdAt_gt?: string; createdAt_lt?: string; }; } export interface ListTradesResponse { body: ({ /** * 对应的项目 */ project?: string; /** * 对应的员工 */ staff: string; /** * 交易类型, 分成, 任务, 结算 */ type: "SHARED" | "TICKET" | "SETTLE"; /** * 本次交易总金额 */ amount: number; /** * 操作人 */ handler: string; /** * staff name */ staffName?: string; /** * staff github */ staffGithub?: string; } & { id: string; updatedAt?: string; createdAt?: string; })[]; headers: { "x-total-count"?: number; }; } export interface CreateTradeRequest { /** * 项目交易记录 */ body: { /** * 对应的项目 */ project?: string; /** * 对应的员工 */ staff: string; /** * 交易类型, 分成, 任务, 结算 */ type: "SHARED" | "TICKET" | "SETTLE"; /** * 本次交易总金额 */ amount: number; /** * 操作人 */ handler: string; /** * staff name */ staffName?: string; /** * staff github */ staffGithub?: string; }; } export interface CreateTradeResponse { body: { /** * 对应的项目 */ project?: string; /** * 对应的员工 */ staff: string; /** * 交易类型, 分成, 任务, 结算 */ type: "SHARED" | "TICKET" | "SETTLE"; /** * 本次交易总金额 */ amount: number; /** * 操作人 */ handler: string; /** * staff name */ staffName?: string; /** * staff github */ staffGithub?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface CreateInvitationRequest { /** * 邀请函,包含邀请码 */ body: { code?: string; email?: string; /** * 邮件是否已经发送 */ sent?: boolean; /** * 邀请码过期时间 */ expiredAt?: Date; /** * 邀请码是否已经使用 */ used?: boolean; /** * 邀请码使用时间 */ usedAt?: Date; /** * 备注 */ remark?: string; }; } export interface CreateInvitationResponse { body: { code?: string; email?: string; /** * 邮件是否已经发送 */ sent?: boolean; /** * 邀请码过期时间 */ expiredAt?: Date; /** * 邀请码是否已经使用 */ used?: boolean; /** * 邀请码使用时间 */ usedAt?: Date; /** * 备注 */ remark?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; } export interface ProjectDoc { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } export type Project = { /** * 项目名称 */ name: string; /** * 项目描述 */ description?: string; /** * 产品负责人 (userId) */ po?: string[]; /** * 技术负责人 (userId) */ cm?: string[]; /** * 包含的工程 */ repositories?: string[]; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 项目状态 */ state?: "DOING" | "ARCHIVED"; /** * 项目logo */ logo?: string; /** * 项目负责人 */ owner?: string; /** * 行业 */ industry?: string; /** * 项目管理人员 */ manager?: { /** * 人员 */ staff?: string; /** * 职位 */ position?: string; }; } & { id: string; updatedAt?: string; createdAt?: string; }; /** * 项目事件 */ export interface ProjectEvent { /** * 事件名称 */ name?: "CHANGE_TOTAL" | "SHARED"; /** * 项目金额总数 */ total?: number; /** * 分成细则 */ shared?: { /** * 分成总金额 */ total?: number; /** * 分成事项 */ remark?: string; detail?: { /** * 员工id */ staff?: string; /** * 在项目中的职位 */ position?: "PO" | "CM"; /** * 分成中的比例,百分比 */ percent?: number; }[]; }; } /** * 迭代 Doc */ export interface InterationDoc { /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; } /** * 项目统计 */ export type ProjectSummary = { /** * 统计类型 */ type?: string; /** * 统计数据 */ data?: { /** * 名称 */ name?: string; /** * 值 */ value?: string; }[]; /** * 总数 */ total?: number; }[]; /** * 项目文档 */ export interface ProjectDocumentDoc { /** * 文档标题 */ title: string; /** * 文档内容 */ content: string; /** * 更新人(userId) */ updatedBy?: string; /** * 创建人(userId) */ createdBy?: string; } export type ProjectDocument = { /** * 文档标题 */ title: string; /** * 文档内容 */ content: string; /** * 更新人(userId) */ updatedBy?: string; /** * 创建人(userId) */ createdBy?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; export type Interation = { /** * 迭代名称 */ name: string; /** * 计划开始时间 */ planStartAt: string; /** * 计划结束时间 */ planEndAt: string; /** * 实际开始时间 */ startAt?: string; /** * 实际结束时间 */ endAt?: string; /** * 所属的 project (projectId) */ project?: string; /** * 迭代描述 */ description?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; /** * 任务统计 */ export interface TicketsSummary {} /** * 迭代统计 */ export interface InterationSummary {} /** * Repo Doc */ export interface Repository { /** * 在第三方服务中的id */ id?: string; /** * 仓库名称 */ name?: string; /** * 仓库全称 */ fullName?: string; /** * url for clone */ gitUrl?: string; /** * url */ htmlUrl?: string; /** * github user username */ owner?: string; /** * 是否私有 */ private?: boolean; /** * 最近push时间 */ pushedAt?: string; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; /** * readme 内容 */ readme?: string; /** * 主题 */ topics?: string[]; collaborators?: string[]; technologies?: string[]; types?: string[]; } /** * Issue Doc */ export interface Issue { /** * 在第三方服务中的id */ id?: string; /** * 标题 */ title?: string; /** * 所属repo (repositoryId) */ repository?: string; /** * issue 号 */ number?: number; /** * 标签 */ labels?: string[]; /** * 创建者 */ state?: "OPEN" | "CLOSED"; /** * 关闭时间 */ closeAt?: string; /** * url */ htmlUrl?: string; /** * github user username */ user?: string; assignees?: string[]; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; } /** * Issue Comment Doc */ export interface IssueComment { /** * content of comment */ body?: string; /** * writer of comment */ user?: string; /** * 更新时间 */ updatedAt?: string; /** * github url of comment */ htmlUrl?: string; /** * role of user */ authorAssociation?: string; } /** * Pr Doc */ export interface Pr { /** * 在第三方服务中的id */ id?: string; /** * 标题 */ title?: string; /** * 所属repo (repositoryId) */ repository?: string; /** * pr 号 */ number?: number; /** * 标签 */ labels?: string[]; /** * pr状态 */ state?: "OPEN" | "CLOSED"; /** * 关闭时间 */ closeAt?: string; /** * url */ htmlUrl?: string; /** * github user username */ user?: string; assignees?: string[]; /** * 创建时间 */ createdAt?: string; /** * 更新时间 */ updatedAt?: string; } /** * Ticket Doc */ export interface TicketDoc { /** * 关联的issue (第三方服务中的issueid) */ issue: string; /** * ticket titile */ title?: string; /** * 所属repo (第三方服务中的 repositoryId) */ repository?: string; /** * 级别 */ level?: number; /** * 状态 */ state?: "PLANNING" | "TODO" | "DOING" | "DONE"; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 所属项目 (projectId) */ project?: string; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取时间 */ takenAt?: string; /** * 完成时间 */ doneAt?: string; /** * 发布时间 */ publishedAt?: string; /** * 备注 */ remark?: string; /** * 额外暴击奖励 */ bonus?: number; labels?: string[]; events?: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }[]; /** * 是否 reopened 过 */ reopened?: boolean; reopenedAt?: string; relatedRepos?: string[]; } /** * Ticket Label */ export interface Label { /** * label id */ id?: string; /** * label name */ name?: string; /** * label color */ color?: string; } /** * Ticket evnet */ export interface TicketEvent { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; } export type Ticket = { /** * 关联的issue (第三方服务中的issueid) */ issue: string; /** * ticket titile */ title?: string; /** * 所属repo (第三方服务中的 repositoryId) */ repository?: string; /** * 级别 */ level?: number; /** * 状态 */ state?: "PLANNING" | "TODO" | "DOING" | "DONE"; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 所属项目 (projectId) */ project?: string; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取时间 */ takenAt?: string; /** * 完成时间 */ doneAt?: string; /** * 发布时间 */ publishedAt?: string; /** * 备注 */ remark?: string; /** * 额外暴击奖励 */ bonus?: number; labels?: string[]; events?: { name: | "PUBLISH" | "UNPUBLISH" | "ASSIGN" | "UNASSIGN" | "LEVEL" | "PRIORITY" | "BONUS" | "DEADLINE" | "DONE" | "REOPEN" | "LABEL" | "REMARK" | "INTERATION"; /** * 操作人 (userId) */ user: string; /** * 级别 */ level?: 0 | 1 | 2 | 3 | 4 | 5 | 6; /** * 创建时间 */ createdAt?: string; /** * 优先级 */ priority?: 0 | 1 | 2; /** * 截止时间 */ deadline?: string; /** * 所属迭代 (interationId) */ interation?: string; /** * 额外暴击奖励 */ bonus?: number; /** * 领取人 (userId) */ takenBy?: string; /** * 发布人 (userId) */ publishBy?: string; /** * 领取人 外部id */ foreignTakenBy?: string; /** * ticket labels */ labels?: string[]; /** * ticket remark */ remark?: string; relatedRepos?: string[]; }[]; /** * 是否 reopened 过 */ reopened?: boolean; reopenedAt?: string; relatedRepos?: string[]; } & { id: string; updatedAt?: string; createdAt?: string; }; export interface StaffDoc { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } export type Staff = { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; export type CreateStaffDoc = { /** * 员工id, 同stargate 中的id */ id: string; /** * 工号 */ number?: number; /** * 姓名 */ name?: string; /** * 用户类型 */ type?: "36NODE" | "ADVENTURE"; /** * 职位 */ position?: "PM" | "DEVELOPER" | "DESIGNER"; /** * 等级 */ level?: number; /** * 银行卡号 */ bankCard?: string; /** * 身份证号 */ idNumber?: string; /** * 城市 */ city?: string; /** * 微信 */ weixin?: string; /** * 电话 */ phone?: string; /** * 个人邮箱 */ email?: string; /** * 公司邮箱 */ companyEmail?: string; /** * google账号 */ google?: string; /** * icloud账号 */ icloud?: string; /** * github账号 */ github?: string; /** * 用户头像url */ avatar?: string; } & { /** * 邀请码 */ code?: string; }; /** * 项目钱包 */ export interface WalletDoc { /** * 钱包总余额 */ balance?: number; /** * 对应员工 */ staff?: string; } export type Wallet = { /** * 钱包总余额 */ balance?: number; /** * 对应员工 */ staff?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; /** * 项目交易记录 */ export interface TradeDoc { /** * 对应的项目 */ project?: string; /** * 对应的员工 */ staff: string; /** * 交易类型, 分成, 任务, 结算 */ type: "SHARED" | "TICKET" | "SETTLE"; /** * 本次交易总金额 */ amount: number; /** * 操作人 */ handler: string; /** * staff name */ staffName?: string; /** * staff github */ staffGithub?: string; } export type Trade = { /** * 对应的项目 */ project?: string; /** * 对应的员工 */ staff: string; /** * 交易类型, 分成, 任务, 结算 */ type: "SHARED" | "TICKET" | "SETTLE"; /** * 本次交易总金额 */ amount: number; /** * 操作人 */ handler: string; /** * staff name */ staffName?: string; /** * staff github */ staffGithub?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; export interface TradeSummary { /** * 对应项目 */ project?: string; /** * 对应员工 */ staff?: string; /** * 对应操作者 */ handler?: string; /** * 总支出 */ totalExp?: string; } export interface TicketDoneSummary {} export interface TicketCoverageSummary {} /** * 创建邀请函所需参数 */ export interface CreateInvitationBody { /** * 邀请码过期时间,默认7天 */ expiredAt?: Date; email: string; /** * 备注 */ remark?: string; } /** * 邀请函,包含邀请码 */ export interface InvitationDoc { code?: string; email?: string; /** * 邮件是否已经发送 */ sent?: boolean; /** * 邀请码过期时间 */ expiredAt?: Date; /** * 邀请码是否已经使用 */ used?: boolean; /** * 邀请码使用时间 */ usedAt?: Date; /** * 备注 */ remark?: string; } export type Invitation = { code?: string; email?: string; /** * 邮件是否已经发送 */ sent?: boolean; /** * 邀请码过期时间 */ expiredAt?: Date; /** * 邀请码是否已经使用 */ used?: boolean; /** * 邀请码使用时间 */ usedAt?: Date; /** * 备注 */ remark?: string; } & { id: string; updatedAt?: string; createdAt?: string; }; /** * release of repository */ export interface Release { /** * 暂时不提供 */ version?: string; } export interface MongoDefault { id: string; updatedAt?: string; createdAt?: string; } export interface Err { code: string; message: string; } export = SDK;
<filename>x/ibc/03-connection/types/connection_test.go package types import ( "testing" "github.com/stretchr/testify/require" commitmenttypes "github.com/cosmos/cosmos-sdk/x/ibc/23-commitment/types" ibctypes "github.com/cosmos/cosmos-sdk/x/ibc/types" ) var ( connectionID = "connectionidone" clientID = "clientidone" connectionID2 = "connectionidtwo" clientID2 = "clientidtwo" ) func TestConnectionValidateBasic(t *testing.T) { testCases := []struct { name string connection ConnectionEnd expPass bool }{ { "valid connection", ConnectionEnd{connectionID, clientID, []string{"1.0.0"}, ibctypes.INIT, Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, true, }, { "invalid connection id", ConnectionEnd{"(connectionIDONE)", clientID, []string{"1.0.0"}, ibctypes.INIT, Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, false, }, { "invalid client id", ConnectionEnd{connectionID, "(clientID1)", []string{"1.0.0"}, ibctypes.INIT, Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, false, }, { "empty versions", ConnectionEnd{connectionID, clientID, nil, ibctypes.INIT, Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, false, }, { "invalid version", ConnectionEnd{connectionID, clientID, []string{""}, ibctypes.INIT, Counterparty{clientID2, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}}, false, }, { "invalid counterparty", ConnectionEnd{connectionID, clientID, []string{"1.0.0"}, ibctypes.INIT, Counterparty{clientID2, connectionID2, emptyPrefix}}, false, }, } for i, tc := range testCases { tc := tc err := tc.connection.ValidateBasic() if tc.expPass { require.NoError(t, err, "valid test case %d failed: %s", i, tc.name) } else { require.Error(t, err, "invalid test case %d passed: %s", i, tc.name) } } } func TestCounterpartyValidateBasic(t *testing.T) { testCases := []struct { name string counterparty Counterparty expPass bool }{ {"valid counterparty", Counterparty{clientID, connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, true}, {"invalid client id", Counterparty{"(InvalidClient)", connectionID2, commitmenttypes.NewMerklePrefix([]byte("prefix"))}, false}, {"invalid connection id", Counterparty{clientID, "(InvalidConnection)", commitmenttypes.NewMerklePrefix([]byte("prefix"))}, false}, {"invalid prefix", Counterparty{clientID, connectionID2, emptyPrefix}, false}, } for i, tc := range testCases { tc := tc err := tc.counterparty.ValidateBasic() if tc.expPass { require.NoError(t, err, "valid test case %d failed: %s", i, tc.name) } else { require.Error(t, err, "invalid test case %d passed: %s", i, tc.name) } } }
<gh_stars>0 import {Component, OnInit} from '@angular/core'; import {Desafio, DesafioTentativaResposta} from '../shared/model/matematica-play.model'; import {MatematicaPlayService} from '../shared/service/matematica-play.service'; @Component({ selector: 'app-matematica-play', templateUrl: './matematica-play.component.html', styleUrls: ['./matematica-play.component.scss'] }) export class MatematicaPlayComponent implements OnInit { public desafio!: Desafio; public desafioTentativas: DesafioTentativaResposta[] = []; public classAcertoErro: string = ''; public menssagemAcertoErro: string = ''; constructor(private readonly matematicaPlayService: MatematicaPlayService) { } ngOnInit(): void { this.novoDesafio(); } jogar(resposta: number) { const tentativa = { fatorA: this.desafio.fatorA, fatorB: this.desafio.fatorB, operacao: this.desafio.operacao, resposta: resposta }; this.matematicaPlayService .verificarResposta(tentativa) .subscribe((resposta: DesafioTentativaResposta) => { if (resposta && resposta.correta) { this.acertouResposta(); } else { this.errouResposta(); } setTimeout(() => { this.resetar(); this.novoDesafio(); }, 1000); }); } private novoDesafio() { this.matematicaPlayService .desafioAleatorio() .subscribe((desafio: Desafio) => { if (desafio) { this.desafio = desafio; } }); } private acertouResposta() { this.classAcertoErro = 'acertou'; this.menssagemAcertoErro = 'Acertou !!'; } private errouResposta() { this.classAcertoErro = 'errou'; this.menssagemAcertoErro = 'Errou !!'; } private resetar() { this.classAcertoErro = ''; this.menssagemAcertoErro = ''; } }
/* * A recursive function that counts all paths from src to * dst. Keep track of the count in the parameter. */ void Graph::path_counter(int src, int dst, int& path_count, vector<bool> visited) { visited[src]=true; if (src == dst) { path_count++; } else { for (auto neighbour : m_neighbours[src]) { if(!visited[neighbour]) path_counter(neighbour, dst, path_count, visited); } } }
def collect_results(input_path, output_file=None, aws_key=None, aws_secret=None): print(f'Reading results from {input_path}') scores = read_csv_from_path(input_path, aws_key, aws_secret) scores = scores.drop_duplicates() if output_file: output = output_file else: output = f'{input_path}/results.csv' print(f'Storing results at {output}') write_csv(scores, output, aws_key, aws_secret)
// Generated by Haxe 4.2.0-rc.1+354c24d30 #include <hxcpp.h> #ifndef INCLUDED_95f339a1d026d52c #define INCLUDED_95f339a1d026d52c #include "hxMath.h" #endif #ifndef INCLUDED_Std #include <Std.h> #endif #ifndef INCLUDED_dsHelper_flatInterleave__FloatColorTriangles_FloatColorTriangles_Impl_ #include <dsHelper/flatInterleave/_FloatColorTriangles/FloatColorTriangles_Impl_.h> #endif #ifndef INCLUDED_dsHelper_flatInterleave_core__Flat32_Flat32_Impl_ #include <dsHelper/flatInterleave/core/_Flat32/Flat32_Impl_.h> #endif #ifndef INCLUDED_haxe_Exception #include <haxe/Exception.h> #endif #ifndef INCLUDED_haxe_io_ArrayBufferViewImpl #include <haxe/io/ArrayBufferViewImpl.h> #endif #ifndef INCLUDED_haxe_io_Bytes #include <haxe/io/Bytes.h> #endif #ifndef INCLUDED_haxe_io_Error #include <haxe/io/Error.h> #endif #ifndef INCLUDED_trilateral3_Trilateral #include <trilateral3/Trilateral.h> #endif #ifndef INCLUDED_trilateral3_drawing_Pen #include <trilateral3/drawing/Pen.h> #endif #ifndef INCLUDED_trilateral3_geom__FlatColorTriangles_FlatColorTriangles_Impl_ #include <trilateral3/geom/_FlatColorTriangles/FlatColorTriangles_Impl_.h> #endif #ifndef INCLUDED_trilateral3_matrix_MatrixDozen #include <trilateral3/matrix/MatrixDozen.h> #endif #ifndef INCLUDED_trilateral3_matrix_Vertex #include <trilateral3/matrix/Vertex.h> #endif #ifndef INCLUDED_trilateral3_nodule_PenNodule #include <trilateral3/nodule/PenNodule.h> #endif #ifndef INCLUDED_trilateral3_structure_StartEnd #include <trilateral3/structure/StartEnd.h> #endif #ifndef INCLUDED_trilateral3_structure_TriInt #include <trilateral3/structure/TriInt.h> #endif #ifndef INCLUDED_trilateral3_structure_Triangle3D #include <trilateral3/structure/Triangle3D.h> #endif namespace trilateral3{ namespace nodule{ void PenNodule_obj::__construct(){ HX_JUST_GC_STACKFRAME int size = ((::trilateral3::nodule::PenNodule_obj::largeEnough + 2) * 4); ::haxe::io::ArrayBufferViewImpl this1 = ::haxe::io::ArrayBufferViewImpl_obj::__alloc( HX_CTX ,::haxe::io::Bytes_obj::alloc(size),0,size); ::haxe::io::ArrayBufferViewImpl this2 = this1; if ((0 < (this2->byteLength >> 2))) { this2->bytes->setFloat(this2->byteOffset,((Float)0.)); } if ((1 < (this2->byteLength >> 2))) { this2->bytes->setFloat((4 + this2->byteOffset),((Float)0.)); } ::haxe::io::ArrayBufferViewImpl this3 = this2; ::haxe::io::ArrayBufferViewImpl this4 = this3; ::haxe::io::ArrayBufferViewImpl this5 = this4; this->colorTriangles = this5; ::trilateral3::matrix::MatrixDozen transform1000 = ::trilateral3::matrix::MatrixDozen_obj::__alloc( HX_CTX ,((Float)0.001),( (Float)(0) ),( (Float)(0) ),( (Float)(-1) ),( (Float)(0) ),((Float)-0.001),( (Float)(0) ),( (Float)(1) ),( (Float)(0) ),( (Float)(0) ),((Float)0.001),( (Float)(0) )); ::trilateral3::Trilateral_obj::transformMatrix = transform1000; this->createPen(); } Dynamic PenNodule_obj::__CreateEmpty() { return new PenNodule_obj; } void *PenNodule_obj::_hx_vtable = 0; Dynamic PenNodule_obj::__Create(::hx::DynamicArray inArgs) { ::hx::ObjectPtr< PenNodule_obj > _hx_result = new PenNodule_obj(); _hx_result->__construct(); return _hx_result; } bool PenNodule_obj::_hx_isInstanceOf(int inClassId) { return inClassId==(int)0x00000001 || inClassId==(int)0x052c2712; } void PenNodule_obj::createPen(){ HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_0, ::haxe::io::ArrayBufferViewImpl,_e) HXARGC(3) void _hx_run(Float x,Float y,Float theta){ Float cos = ::Math_obj::cos(theta); Float sin = ::Math_obj::sin(theta); { { ::haxe::io::ArrayBufferViewImpl _g = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ax(_g,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_g) - x)); } { ::haxe::io::ArrayBufferViewImpl _g1 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ay(_g1,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_g1) - y)); } { ::haxe::io::ArrayBufferViewImpl _g2 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bx(_g2,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_g2) - x)); } { ::haxe::io::ArrayBufferViewImpl _g3 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_by(_g3,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_g3) - y)); } { ::haxe::io::ArrayBufferViewImpl _g4 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cx(_g4,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_g4) - x)); } { ::haxe::io::ArrayBufferViewImpl _g5 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cy(_g5,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_g5) - y)); } Float dx = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e); Float dy = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ax(_e,((dx * cos) - (dy * sin))); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ay(_e,((dx * sin) + (dy * cos))); dx = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e); dy = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bx(_e,((dx * cos) - (dy * sin))); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_by(_e,((dx * sin) + (dy * cos))); dx = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e); dy = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cx(_e,((dx * cos) - (dy * sin))); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cy(_e,((dx * sin) + (dy * cos))); { ::haxe::io::ArrayBufferViewImpl _g6 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ax(_g6,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_g6) + x)); } { ::haxe::io::ArrayBufferViewImpl _g7 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ay(_g7,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_g7) + y)); } { ::haxe::io::ArrayBufferViewImpl _g8 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bx(_g8,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_g8) + x)); } { ::haxe::io::ArrayBufferViewImpl _g9 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_by(_g9,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_g9) + y)); } { ::haxe::io::ArrayBufferViewImpl _g10 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cx(_g10,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_g10) + x)); } { ::haxe::io::ArrayBufferViewImpl _g11 = _e; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cy(_g11,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_g11) + y)); } } } HX_END_LOCAL_FUNC3((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_1, ::haxe::io::ArrayBufferViewImpl,_e1) HXARGC(2) void _hx_run(Float dx,Float dy){ ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::moveDelta(_e1,dx,dy); } HX_END_LOCAL_FUNC2((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_2, ::haxe::io::ArrayBufferViewImpl,_e2) HXARGC(4) void _hx_run(Float x,Float y,Float cos,Float sin){ { ::haxe::io::ArrayBufferViewImpl _g = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ax(_g,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_g) - x)); } { ::haxe::io::ArrayBufferViewImpl _g1 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ay(_g1,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_g1) - y)); } { ::haxe::io::ArrayBufferViewImpl _g2 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bx(_g2,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_g2) - x)); } { ::haxe::io::ArrayBufferViewImpl _g3 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_by(_g3,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_g3) - y)); } { ::haxe::io::ArrayBufferViewImpl _g4 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cx(_g4,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_g4) - x)); } { ::haxe::io::ArrayBufferViewImpl _g5 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cy(_g5,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_g5) - y)); } Float dx = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e2); Float dy = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e2); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ax(_e2,((dx * cos) - (dy * sin))); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ay(_e2,((dx * sin) + (dy * cos))); dx = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e2); dy = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e2); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bx(_e2,((dx * cos) - (dy * sin))); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_by(_e2,((dx * sin) + (dy * cos))); dx = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e2); dy = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e2); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cx(_e2,((dx * cos) - (dy * sin))); ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cy(_e2,((dx * sin) + (dy * cos))); { ::haxe::io::ArrayBufferViewImpl _g6 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ax(_g6,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_g6) + x)); } { ::haxe::io::ArrayBufferViewImpl _g7 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ay(_g7,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_g7) + y)); } { ::haxe::io::ArrayBufferViewImpl _g8 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bx(_g8,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_g8) + x)); } { ::haxe::io::ArrayBufferViewImpl _g9 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_by(_g9,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_g9) + y)); } { ::haxe::io::ArrayBufferViewImpl _g10 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cx(_g10,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_g10) + x)); } { ::haxe::io::ArrayBufferViewImpl _g11 = _e2; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cy(_g11,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_g11) + y)); } } HX_END_LOCAL_FUNC4((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_3, ::haxe::io::ArrayBufferViewImpl,_e3) HXARGC(2) bool _hx_run(Float px,Float py){ return ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::fullHit(_e3,px,py); } HX_END_LOCAL_FUNC2(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_4, ::haxe::io::ArrayBufferViewImpl,_e4) HXARGC(2) bool _hx_run(Float px,Float py){ Float planeAB = (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e4) - px); Float planeAB1 = (planeAB * (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e4) - py)); Float planeAB2 = (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e4) - px); Float planeAB3 = (planeAB1 - (planeAB2 * (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e4) - py))); Float planeBC = (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e4) - px); Float planeBC1 = (planeBC * (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e4) - py)); Float planeBC2 = (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e4) - px); Float planeBC3 = (planeBC1 - (planeBC2 * (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e4) - py))); Float planeCA = (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e4) - px); Float planeCA1 = (planeCA * (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e4) - py)); Float planeCA2 = (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e4) - px); Float planeCA3 = (planeCA1 - (planeCA2 * (::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e4) - py))); int triangleAbstract = ::Std_obj::_hx_int((::Math_obj::abs(planeAB3) / planeAB3)); if ((triangleAbstract == ::Std_obj::_hx_int((::Math_obj::abs(planeBC3) / planeBC3)))) { int triangleAbstract = ::Std_obj::_hx_int((::Math_obj::abs(planeBC3) / planeBC3)); return (triangleAbstract == ::Std_obj::_hx_int((::Math_obj::abs(planeCA3) / planeCA3))); } else { return false; } return false; } HX_END_LOCAL_FUNC2(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_5, ::haxe::io::ArrayBufferViewImpl,_e5) HXARGC(0) Float _hx_run(){ Float triangleAbstract = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e5); Float triangleAbstract1 = ::Math_obj::max(triangleAbstract,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e5)); return ::Math_obj::max(triangleAbstract1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e5)); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_6, ::haxe::io::ArrayBufferViewImpl,_e6) HXARGC(0) Float _hx_run(){ Float triangleAbstract = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_az(_e6); Float triangleAbstract1 = ::Math_obj::max(triangleAbstract,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bz(_e6)); return ::Math_obj::max(triangleAbstract1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cz(_e6)); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_7, ::haxe::io::ArrayBufferViewImpl,_e7) HXARGC(0) Float _hx_run(){ Float triangleAbstract = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e7); Float triangleAbstract1 = ::Math_obj::max(triangleAbstract,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e7)); return ::Math_obj::max(triangleAbstract1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e7)); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_8, ::haxe::io::ArrayBufferViewImpl,_e8) HXARGC(0) Float _hx_run(){ Float triangleAbstract = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e8); Float triangleAbstract1 = ::Math_obj::min(triangleAbstract,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e8)); return ::Math_obj::min(triangleAbstract1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e8)); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_9, ::haxe::io::ArrayBufferViewImpl,_e9) HXARGC(1) Float _hx_run(Float x){ Float dx = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e9); Float dx1 = ::Math_obj::min(dx,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e9)); Float dx2 = (x - ::Math_obj::min(dx1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e9))); ::haxe::io::ArrayBufferViewImpl _e = _e9; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ax(_e,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e9) + dx2)); ::haxe::io::ArrayBufferViewImpl _e1 = _e9; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bx(_e1,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e9) + dx2)); ::haxe::io::ArrayBufferViewImpl _e2 = _e9; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cx(_e2,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e9) + dx2)); return x; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_10, ::haxe::io::ArrayBufferViewImpl,_e10) HXARGC(0) Float _hx_run(){ Float triangleAbstract = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e10); Float triangleAbstract1 = ::Math_obj::min(triangleAbstract,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e10)); return ::Math_obj::min(triangleAbstract1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e10)); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_11, ::haxe::io::ArrayBufferViewImpl,_e11) HXARGC(1) Float _hx_run(Float y){ Float dy = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e11); Float dy1 = ::Math_obj::min(dy,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e11)); Float dy2 = (y - ::Math_obj::min(dy1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e11))); ::haxe::io::ArrayBufferViewImpl _e = _e11; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_ay(_e,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e11) + dy2)); ::haxe::io::ArrayBufferViewImpl _e1 = _e11; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_by(_e1,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e11) + dy2)); ::haxe::io::ArrayBufferViewImpl _e2 = _e11; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cy(_e2,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e11) + dy2)); return y; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_12, ::haxe::io::ArrayBufferViewImpl,_e12) HXARGC(0) Float _hx_run(){ Float triangleAbstract = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_az(_e12); Float triangleAbstract1 = ::Math_obj::min(triangleAbstract,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bz(_e12)); return ::Math_obj::min(triangleAbstract1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cz(_e12)); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_13, ::haxe::io::ArrayBufferViewImpl,_e13) HXARGC(1) Float _hx_run(Float z){ Float dz = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_az(_e13); Float dz1 = ::Math_obj::min(dz,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bz(_e13)); Float dz2 = (z - ::Math_obj::min(dz1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cz(_e13))); ::haxe::io::ArrayBufferViewImpl _e = _e13; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_az(_e,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_az(_e13) + dz2)); ::haxe::io::ArrayBufferViewImpl _e1 = _e13; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_bz(_e1,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bz(_e13) + dz2)); ::haxe::io::ArrayBufferViewImpl _e2 = _e13; ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_cz(_e2,(::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cz(_e13) + dz2)); return z; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_14, ::haxe::io::ArrayBufferViewImpl,_e14) HXARGC(9) bool _hx_run(Float ax_,Float ay_,Float az_,Float bx_,Float by_,Float bz_,Float cx_,Float cy_,Float cz_){ return ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::triangle(_e14,ax_,ay_,az_,bx_,by_,bz_,cx_,cy_,cz_); } HX_END_LOCAL_FUNC9(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_15, ::haxe::io::ArrayBufferViewImpl,_e15) HXARGC(0) ::trilateral3::structure::Triangle3D _hx_run(){ HX_JUST_GC_STACKFRAME Float _g = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e15); Float _g1 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e15); ::trilateral3::matrix::Vertex pa = ::trilateral3::matrix::Vertex_obj::__alloc( HX_CTX ,_g,_g1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_az(_e15),((Float)1.)); Float _g2 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e15); Float _g3 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e15); ::trilateral3::matrix::Vertex pb = ::trilateral3::matrix::Vertex_obj::__alloc( HX_CTX ,_g2,_g3,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bz(_e15),((Float)1.)); Float _g4 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e15); Float _g5 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e15); ::trilateral3::matrix::Vertex pc = ::trilateral3::matrix::Vertex_obj::__alloc( HX_CTX ,_g4,_g5,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cz(_e15),((Float)1.)); return ::trilateral3::structure::Triangle3D_obj::__alloc( HX_CTX ,pa,pb,pc); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_16, ::haxe::io::ArrayBufferViewImpl,_e16) HXARGC(1) void _hx_run( ::trilateral3::matrix::MatrixDozen m){ ::trilateral3::geom::_FlatColorTriangles::FlatColorTriangles_Impl__obj::transform(_e16,m); } HX_END_LOCAL_FUNC1((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_17, ::haxe::io::ArrayBufferViewImpl,_e17) HXARGC(9) bool _hx_run(Float ax_,Float ay_,Float az_,Float bx_,Float by_,Float bz_,Float cx_,Float cy_,Float cz_){ return ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::triangle(_e17,ax_,ay_,az_,bx_,by_,bz_,cx_,cy_,cz_); } HX_END_LOCAL_FUNC9(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_18, ::haxe::io::ArrayBufferViewImpl,_e18) HXARGC(1) void _hx_run( ::trilateral3::matrix::MatrixDozen m){ ::trilateral3::geom::_FlatColorTriangles::FlatColorTriangles_Impl__obj::transform(_e18,m); } HX_END_LOCAL_FUNC1((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_19, ::haxe::io::ArrayBufferViewImpl,_e19) HXARGC(2) void _hx_run( ::trilateral3::matrix::MatrixDozen m, ::trilateral3::structure::StartEnd startEnd){ ::trilateral3::geom::_FlatColorTriangles::FlatColorTriangles_Impl__obj::transformRange(_e19,m,startEnd); } HX_END_LOCAL_FUNC2((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_20, ::haxe::io::ArrayBufferViewImpl,_e20) HXARGC(0) ::trilateral3::structure::Triangle3D _hx_run(){ HX_JUST_GC_STACKFRAME Float _g = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ax(_e20); Float _g1 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_ay(_e20); ::trilateral3::matrix::Vertex pa = ::trilateral3::matrix::Vertex_obj::__alloc( HX_CTX ,_g,_g1,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_az(_e20),((Float)1.)); Float _g2 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bx(_e20); Float _g3 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_by(_e20); ::trilateral3::matrix::Vertex pb = ::trilateral3::matrix::Vertex_obj::__alloc( HX_CTX ,_g2,_g3,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_bz(_e20),((Float)1.)); Float _g4 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cx(_e20); Float _g5 = ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cy(_e20); ::trilateral3::matrix::Vertex pc = ::trilateral3::matrix::Vertex_obj::__alloc( HX_CTX ,_g4,_g5,::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_cz(_e20),((Float)1.)); return ::trilateral3::structure::Triangle3D_obj::__alloc( HX_CTX ,pa,pb,pc); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_21, ::haxe::io::ArrayBufferViewImpl,_e21) HXARGC(0) Float _hx_run(){ { ::haxe::io::Bytes _this = _e21->bytes; int pos = _e21->byteOffset; bool pos_; if ((pos >= 0)) { pos_ = ((pos + 4) > _this->length); } else { pos_ = true; } if (pos_) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float pos_1 = ( ::__hxcpp_memory_get_float(_this->b,pos) + ((Float)1.)); if ((0 < (_e21->byteLength >> 2))) { _e21->bytes->setFloat(_e21->byteOffset,pos_1); } ::haxe::io::Bytes _this1 = _e21->bytes; int pos1 = _e21->byteOffset; bool drawAbstract; if ((pos1 >= 0)) { drawAbstract = ((pos1 + 4) > _this1->length); } else { drawAbstract = true; } if (drawAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float drawAbstract1 = ::__hxcpp_memory_get_float(_this1->b,pos1); ::haxe::io::Bytes _this2 = _e21->bytes; int pos2 = (4 + _e21->byteOffset); bool drawAbstract2; if ((pos2 >= 0)) { drawAbstract2 = ((pos2 + 4) > _this2->length); } else { drawAbstract2 = true; } if (drawAbstract2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } if ((drawAbstract1 > ( ::__hxcpp_memory_get_float(_this2->b,pos2) - ( (Float)(1) )))) { ::haxe::io::Bytes _this = _e21->bytes; int pos = _e21->byteOffset; bool drawAbstract; if ((pos >= 0)) { drawAbstract = ((pos + 4) > _this->length); } else { drawAbstract = true; } if (drawAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float value = ::__hxcpp_memory_get_float(_this->b,pos); if ((1 < (_e21->byteLength >> 2))) { _e21->bytes->setFloat((4 + _e21->byteOffset),value); } } } ::haxe::io::Bytes _this3 = _e21->bytes; int pos3 = _e21->byteOffset; bool drawAbstract3; if ((pos3 >= 0)) { drawAbstract3 = ((pos3 + 4) > _this3->length); } else { drawAbstract3 = true; } if (drawAbstract3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } return ::__hxcpp_memory_get_float(_this3->b,pos3); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_22, ::haxe::io::ArrayBufferViewImpl,_e22) HXARGC(0) bool _hx_run(){ ::haxe::io::Bytes _this = _e22->bytes; int pos = _e22->byteOffset; bool drawAbstract; if ((pos >= 0)) { drawAbstract = ((pos + 4) > _this->length); } else { drawAbstract = true; } if (drawAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float drawAbstract1 = ::__hxcpp_memory_get_float(_this->b,pos); return (drawAbstract1 < ::dsHelper::flatInterleave::core::_Flat32::Flat32_Impl__obj::get_size(_e22)); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_23, ::haxe::io::ArrayBufferViewImpl,_e23) HXARGC(0) Float _hx_run(){ ::haxe::io::Bytes _this = _e23->bytes; int pos = _e23->byteOffset; bool drawAbstract; if ((pos >= 0)) { drawAbstract = ((pos + 4) > _this->length); } else { drawAbstract = true; } if (drawAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } return ::__hxcpp_memory_get_float(_this->b,pos); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_24, ::haxe::io::ArrayBufferViewImpl,_e24) HXARGC(1) Float _hx_run(Float pos_){ if ((0 < (_e24->byteLength >> 2))) { _e24->bytes->setFloat(_e24->byteOffset,pos_); } ::haxe::io::Bytes _this = _e24->bytes; int pos = _e24->byteOffset; bool drawAbstract; if ((pos >= 0)) { drawAbstract = ((pos + 4) > _this->length); } else { drawAbstract = true; } if (drawAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float drawAbstract1 = ::__hxcpp_memory_get_float(_this->b,pos); ::haxe::io::Bytes _this1 = _e24->bytes; int pos1 = (4 + _e24->byteOffset); bool drawAbstract2; if ((pos1 >= 0)) { drawAbstract2 = ((pos1 + 4) > _this1->length); } else { drawAbstract2 = true; } if (drawAbstract2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } if ((drawAbstract1 > ( ::__hxcpp_memory_get_float(_this1->b,pos1) - ( (Float)(1) )))) { ::haxe::io::Bytes _this = _e24->bytes; int pos = _e24->byteOffset; bool drawAbstract; if ((pos >= 0)) { drawAbstract = ((pos + 4) > _this->length); } else { drawAbstract = true; } if (drawAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float value = ::__hxcpp_memory_get_float(_this->b,pos); if ((1 < (_e24->byteLength >> 2))) { _e24->bytes->setFloat((4 + _e24->byteOffset),value); } } return pos_; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_25, ::haxe::io::ArrayBufferViewImpl,_e25) HXARGC(0) int _hx_run(){ return ::dsHelper::flatInterleave::core::_Flat32::Flat32_Impl__obj::get_size(_e25); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_26, ::haxe::io::ArrayBufferViewImpl,_e26) HXARGC(1) int _hx_run(int id){ return ::dsHelper::flatInterleave::core::_Flat32::Flat32_Impl__obj::set_size(_e26,id); } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_27, ::haxe::io::ArrayBufferViewImpl,_e27) HXARGC(1) int _hx_run(int col){ { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redA(_e27,(( (Float)(((col >> 16) & 255)) ) / ( (Float)(255) ))); { Float v = (( (Float)((col & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this = _e27->bytes; int pos = _e27->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 5) + 2); bool color3Abstract; if ((index >= 0)) { color3Abstract = (index < (_e27->byteLength >> 2)); } else { color3Abstract = false; } if (color3Abstract) { _e27->bytes->setFloat(((index << 2) + _e27->byteOffset),v); } } } } { Float v1 = (( (Float)(((col >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this1 = _e27->bytes; int pos1 = _e27->byteOffset; bool k1; if ((pos1 >= 0)) { k1 = ((pos1 + 4) > _this1->length); } else { k1 = true; } if (k1) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index1 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this1->b,pos1)) * 21) + 4) + 2); bool color3Abstract1; if ((index1 >= 0)) { color3Abstract1 = (index1 < (_e27->byteLength >> 2)); } else { color3Abstract1 = false; } if (color3Abstract1) { _e27->bytes->setFloat(((index1 << 2) + _e27->byteOffset),v1); } } } } { Float v2 = (( (Float)(((col >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this2 = _e27->bytes; int pos2 = _e27->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index2 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 6) + 2); bool color3Abstract2; if ((index2 >= 0)) { color3Abstract2 = (index2 < (_e27->byteLength >> 2)); } else { color3Abstract2 = false; } if (color3Abstract2) { _e27->bytes->setFloat(((index2 << 2) + _e27->byteOffset),v2); } } } } } { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redB(_e27,(( (Float)(((col >> 16) & 255)) ) / ( (Float)(255) ))); { Float v3 = (( (Float)((col & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this3 = _e27->bytes; int pos3 = _e27->byteOffset; bool k3; if ((pos3 >= 0)) { k3 = ((pos3 + 4) > _this3->length); } else { k3 = true; } if (k3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index3 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this3->b,pos3)) * 21) + 12) + 2); bool color3Abstract3; if ((index3 >= 0)) { color3Abstract3 = (index3 < (_e27->byteLength >> 2)); } else { color3Abstract3 = false; } if (color3Abstract3) { _e27->bytes->setFloat(((index3 << 2) + _e27->byteOffset),v3); } } } } { Float v4 = (( (Float)(((col >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this4 = _e27->bytes; int pos4 = _e27->byteOffset; bool k4; if ((pos4 >= 0)) { k4 = ((pos4 + 4) > _this4->length); } else { k4 = true; } if (k4) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index4 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this4->b,pos4)) * 21) + 11) + 2); bool color3Abstract4; if ((index4 >= 0)) { color3Abstract4 = (index4 < (_e27->byteLength >> 2)); } else { color3Abstract4 = false; } if (color3Abstract4) { _e27->bytes->setFloat(((index4 << 2) + _e27->byteOffset),v4); } } } } { Float v5 = (( (Float)(((col >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this5 = _e27->bytes; int pos5 = _e27->byteOffset; bool k5; if ((pos5 >= 0)) { k5 = ((pos5 + 4) > _this5->length); } else { k5 = true; } if (k5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index5 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this5->b,pos5)) * 21) + 13) + 2); bool color3Abstract5; if ((index5 >= 0)) { color3Abstract5 = (index5 < (_e27->byteLength >> 2)); } else { color3Abstract5 = false; } if (color3Abstract5) { _e27->bytes->setFloat(((index5 << 2) + _e27->byteOffset),v5); } } } } } { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redC(_e27,(( (Float)(((col >> 16) & 255)) ) / ( (Float)(255) ))); { Float v6 = (( (Float)((col & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this6 = _e27->bytes; int pos6 = _e27->byteOffset; bool k6; if ((pos6 >= 0)) { k6 = ((pos6 + 4) > _this6->length); } else { k6 = true; } if (k6) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index6 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this6->b,pos6)) * 21) + 19) + 2); bool color3Abstract6; if ((index6 >= 0)) { color3Abstract6 = (index6 < (_e27->byteLength >> 2)); } else { color3Abstract6 = false; } if (color3Abstract6) { _e27->bytes->setFloat(((index6 << 2) + _e27->byteOffset),v6); } } } } { Float v7 = (( (Float)(((col >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this7 = _e27->bytes; int pos7 = _e27->byteOffset; bool k7; if ((pos7 >= 0)) { k7 = ((pos7 + 4) > _this7->length); } else { k7 = true; } if (k7) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index7 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this7->b,pos7)) * 21) + 18) + 2); bool color3Abstract7; if ((index7 >= 0)) { color3Abstract7 = (index7 < (_e27->byteLength >> 2)); } else { color3Abstract7 = false; } if (color3Abstract7) { _e27->bytes->setFloat(((index7 << 2) + _e27->byteOffset),v7); } } } } { Float v8 = (( (Float)(((col >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this8 = _e27->bytes; int pos8 = _e27->byteOffset; bool k8; if ((pos8 >= 0)) { k8 = ((pos8 + 4) > _this8->length); } else { k8 = true; } if (k8) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index8 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this8->b,pos8)) * 21) + 20) + 2); bool color3Abstract8; if ((index8 >= 0)) { color3Abstract8 = (index8 < (_e27->byteLength >> 2)); } else { color3Abstract8 = false; } if (color3Abstract8) { _e27->bytes->setFloat(((index8 << 2) + _e27->byteOffset),v8); } } } } } return col; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_28, ::haxe::io::ArrayBufferViewImpl,_e28) HXARGC(1) int _hx_run(int col){ ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redA(_e28,(( (Float)(((col >> 16) & 255)) ) / ( (Float)(255) ))); { Float v = (( (Float)((col & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this = _e28->bytes; int pos = _e28->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 5) + 2); bool color3Abstract; if ((index >= 0)) { color3Abstract = (index < (_e28->byteLength >> 2)); } else { color3Abstract = false; } if (color3Abstract) { _e28->bytes->setFloat(((index << 2) + _e28->byteOffset),v); } } } } { Float v1 = (( (Float)(((col >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this1 = _e28->bytes; int pos1 = _e28->byteOffset; bool k1; if ((pos1 >= 0)) { k1 = ((pos1 + 4) > _this1->length); } else { k1 = true; } if (k1) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index1 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this1->b,pos1)) * 21) + 4) + 2); bool color3Abstract1; if ((index1 >= 0)) { color3Abstract1 = (index1 < (_e28->byteLength >> 2)); } else { color3Abstract1 = false; } if (color3Abstract1) { _e28->bytes->setFloat(((index1 << 2) + _e28->byteOffset),v1); } } } } { Float v2 = (( (Float)(((col >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this2 = _e28->bytes; int pos2 = _e28->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index2 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 6) + 2); bool color3Abstract2; if ((index2 >= 0)) { color3Abstract2 = (index2 < (_e28->byteLength >> 2)); } else { color3Abstract2 = false; } if (color3Abstract2) { _e28->bytes->setFloat(((index2 << 2) + _e28->byteOffset),v2); } } } } return col; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_29, ::haxe::io::ArrayBufferViewImpl,_e29) HXARGC(0) int _hx_run(){ ::haxe::io::Bytes _this = _e29->bytes; int pos = _e29->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k1 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 6); ::haxe::io::Bytes _this1 = _e29->bytes; int pos1 = (((k1 + 2) << 2) + _e29->byteOffset); bool color3Abstract; if ((pos1 >= 0)) { color3Abstract = ((pos1 + 4) > _this1->length); } else { color3Abstract = true; } if (color3Abstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int color3Abstract1 = (::Math_obj::round(( ::__hxcpp_memory_get_float(_this1->b,pos1) * ( (Float)(255) ))) << 24); int color3Abstract2 = (color3Abstract1 | (::Math_obj::round((::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_redA(_e29) * ( (Float)(255) ))) << 16)); ::haxe::io::Bytes _this2 = _e29->bytes; int pos2 = _e29->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k3 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 4); ::haxe::io::Bytes _this3 = _e29->bytes; int pos3 = (((k3 + 2) << 2) + _e29->byteOffset); bool color3Abstract3; if ((pos3 >= 0)) { color3Abstract3 = ((pos3 + 4) > _this3->length); } else { color3Abstract3 = true; } if (color3Abstract3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int color3Abstract4 = (color3Abstract2 | (::Math_obj::round(( ::__hxcpp_memory_get_float(_this3->b,pos3) * ( (Float)(255) ))) << 8)); ::haxe::io::Bytes _this4 = _e29->bytes; int pos4 = _e29->byteOffset; bool k4; if ((pos4 >= 0)) { k4 = ((pos4 + 4) > _this4->length); } else { k4 = true; } if (k4) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k5 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this4->b,pos4)) * 21) + 5); ::haxe::io::Bytes _this5 = _e29->bytes; int pos5 = (((k5 + 2) << 2) + _e29->byteOffset); bool color3Abstract5; if ((pos5 >= 0)) { color3Abstract5 = ((pos5 + 4) > _this5->length); } else { color3Abstract5 = true; } if (color3Abstract5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } return (color3Abstract4 | ::Math_obj::round(( ::__hxcpp_memory_get_float(_this5->b,pos5) * ( (Float)(255) )))); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_30, ::haxe::io::ArrayBufferViewImpl,_e30) HXARGC(1) int _hx_run(int col){ ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redB(_e30,(( (Float)(((col >> 16) & 255)) ) / ( (Float)(255) ))); { Float v = (( (Float)((col & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this = _e30->bytes; int pos = _e30->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 12) + 2); bool color3Abstract; if ((index >= 0)) { color3Abstract = (index < (_e30->byteLength >> 2)); } else { color3Abstract = false; } if (color3Abstract) { _e30->bytes->setFloat(((index << 2) + _e30->byteOffset),v); } } } } { Float v1 = (( (Float)(((col >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this1 = _e30->bytes; int pos1 = _e30->byteOffset; bool k1; if ((pos1 >= 0)) { k1 = ((pos1 + 4) > _this1->length); } else { k1 = true; } if (k1) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index1 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this1->b,pos1)) * 21) + 11) + 2); bool color3Abstract1; if ((index1 >= 0)) { color3Abstract1 = (index1 < (_e30->byteLength >> 2)); } else { color3Abstract1 = false; } if (color3Abstract1) { _e30->bytes->setFloat(((index1 << 2) + _e30->byteOffset),v1); } } } } { Float v2 = (( (Float)(((col >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this2 = _e30->bytes; int pos2 = _e30->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index2 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 13) + 2); bool color3Abstract2; if ((index2 >= 0)) { color3Abstract2 = (index2 < (_e30->byteLength >> 2)); } else { color3Abstract2 = false; } if (color3Abstract2) { _e30->bytes->setFloat(((index2 << 2) + _e30->byteOffset),v2); } } } } return col; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_31, ::haxe::io::ArrayBufferViewImpl,_e31) HXARGC(0) int _hx_run(){ ::haxe::io::Bytes _this = _e31->bytes; int pos = _e31->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k1 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 6); ::haxe::io::Bytes _this1 = _e31->bytes; int pos1 = (((k1 + 2) << 2) + _e31->byteOffset); bool color3Abstract; if ((pos1 >= 0)) { color3Abstract = ((pos1 + 4) > _this1->length); } else { color3Abstract = true; } if (color3Abstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int color3Abstract1 = (::Math_obj::round(( ::__hxcpp_memory_get_float(_this1->b,pos1) * ( (Float)(255) ))) << 24); int color3Abstract2 = (color3Abstract1 | (::Math_obj::round((::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_redA(_e31) * ( (Float)(255) ))) << 16)); ::haxe::io::Bytes _this2 = _e31->bytes; int pos2 = _e31->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k3 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 4); ::haxe::io::Bytes _this3 = _e31->bytes; int pos3 = (((k3 + 2) << 2) + _e31->byteOffset); bool color3Abstract3; if ((pos3 >= 0)) { color3Abstract3 = ((pos3 + 4) > _this3->length); } else { color3Abstract3 = true; } if (color3Abstract3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int color3Abstract4 = (color3Abstract2 | (::Math_obj::round(( ::__hxcpp_memory_get_float(_this3->b,pos3) * ( (Float)(255) ))) << 8)); ::haxe::io::Bytes _this4 = _e31->bytes; int pos4 = _e31->byteOffset; bool k4; if ((pos4 >= 0)) { k4 = ((pos4 + 4) > _this4->length); } else { k4 = true; } if (k4) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k5 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this4->b,pos4)) * 21) + 5); ::haxe::io::Bytes _this5 = _e31->bytes; int pos5 = (((k5 + 2) << 2) + _e31->byteOffset); bool color3Abstract5; if ((pos5 >= 0)) { color3Abstract5 = ((pos5 + 4) > _this5->length); } else { color3Abstract5 = true; } if (color3Abstract5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } return (color3Abstract4 | ::Math_obj::round(( ::__hxcpp_memory_get_float(_this5->b,pos5) * ( (Float)(255) )))); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_32, ::haxe::io::ArrayBufferViewImpl,_e32) HXARGC(1) int _hx_run(int col){ ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redC(_e32,(( (Float)(((col >> 16) & 255)) ) / ( (Float)(255) ))); { Float v = (( (Float)((col & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this = _e32->bytes; int pos = _e32->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 19) + 2); bool color3Abstract; if ((index >= 0)) { color3Abstract = (index < (_e32->byteLength >> 2)); } else { color3Abstract = false; } if (color3Abstract) { _e32->bytes->setFloat(((index << 2) + _e32->byteOffset),v); } } } } { Float v1 = (( (Float)(((col >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this1 = _e32->bytes; int pos1 = _e32->byteOffset; bool k1; if ((pos1 >= 0)) { k1 = ((pos1 + 4) > _this1->length); } else { k1 = true; } if (k1) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index1 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this1->b,pos1)) * 21) + 18) + 2); bool color3Abstract1; if ((index1 >= 0)) { color3Abstract1 = (index1 < (_e32->byteLength >> 2)); } else { color3Abstract1 = false; } if (color3Abstract1) { _e32->bytes->setFloat(((index1 << 2) + _e32->byteOffset),v1); } } } } { Float v2 = (( (Float)(((col >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this2 = _e32->bytes; int pos2 = _e32->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index2 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 20) + 2); bool color3Abstract2; if ((index2 >= 0)) { color3Abstract2 = (index2 < (_e32->byteLength >> 2)); } else { color3Abstract2 = false; } if (color3Abstract2) { _e32->bytes->setFloat(((index2 << 2) + _e32->byteOffset),v2); } } } } return col; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_33, ::haxe::io::ArrayBufferViewImpl,_e33) HXARGC(0) int _hx_run(){ ::haxe::io::Bytes _this = _e33->bytes; int pos = _e33->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k1 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 6); ::haxe::io::Bytes _this1 = _e33->bytes; int pos1 = (((k1 + 2) << 2) + _e33->byteOffset); bool color3Abstract; if ((pos1 >= 0)) { color3Abstract = ((pos1 + 4) > _this1->length); } else { color3Abstract = true; } if (color3Abstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int color3Abstract1 = (::Math_obj::round(( ::__hxcpp_memory_get_float(_this1->b,pos1) * ( (Float)(255) ))) << 24); int color3Abstract2 = (color3Abstract1 | (::Math_obj::round((::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_redA(_e33) * ( (Float)(255) ))) << 16)); ::haxe::io::Bytes _this2 = _e33->bytes; int pos2 = _e33->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k3 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 4); ::haxe::io::Bytes _this3 = _e33->bytes; int pos3 = (((k3 + 2) << 2) + _e33->byteOffset); bool color3Abstract3; if ((pos3 >= 0)) { color3Abstract3 = ((pos3 + 4) > _this3->length); } else { color3Abstract3 = true; } if (color3Abstract3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int color3Abstract4 = (color3Abstract2 | (::Math_obj::round(( ::__hxcpp_memory_get_float(_this3->b,pos3) * ( (Float)(255) ))) << 8)); ::haxe::io::Bytes _this4 = _e33->bytes; int pos4 = _e33->byteOffset; bool k4; if ((pos4 >= 0)) { k4 = ((pos4 + 4) > _this4->length); } else { k4 = true; } if (k4) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k5 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this4->b,pos4)) * 21) + 5); ::haxe::io::Bytes _this5 = _e33->bytes; int pos5 = (((k5 + 2) << 2) + _e33->byteOffset); bool color3Abstract5; if ((pos5 >= 0)) { color3Abstract5 = ((pos5 + 4) > _this5->length); } else { color3Abstract5 = true; } if (color3Abstract5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } return (color3Abstract4 | ::Math_obj::round(( ::__hxcpp_memory_get_float(_this5->b,pos5) * ( (Float)(255) )))); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_34, ::haxe::io::ArrayBufferViewImpl,_e34) HXARGC(3) void _hx_run(int colorA,int colorB,int colorC){ { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redA(_e34,(( (Float)(((colorA >> 16) & 255)) ) / ( (Float)(255) ))); { Float v = (( (Float)((colorA & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this = _e34->bytes; int pos = _e34->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 5) + 2); bool colorAbstract; if ((index >= 0)) { colorAbstract = (index < (_e34->byteLength >> 2)); } else { colorAbstract = false; } if (colorAbstract) { _e34->bytes->setFloat(((index << 2) + _e34->byteOffset),v); } } } } { Float v1 = (( (Float)(((colorA >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this1 = _e34->bytes; int pos1 = _e34->byteOffset; bool k1; if ((pos1 >= 0)) { k1 = ((pos1 + 4) > _this1->length); } else { k1 = true; } if (k1) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index1 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this1->b,pos1)) * 21) + 4) + 2); bool colorAbstract1; if ((index1 >= 0)) { colorAbstract1 = (index1 < (_e34->byteLength >> 2)); } else { colorAbstract1 = false; } if (colorAbstract1) { _e34->bytes->setFloat(((index1 << 2) + _e34->byteOffset),v1); } } } } { Float v2 = (( (Float)(((colorA >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this2 = _e34->bytes; int pos2 = _e34->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index2 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 6) + 2); bool colorAbstract2; if ((index2 >= 0)) { colorAbstract2 = (index2 < (_e34->byteLength >> 2)); } else { colorAbstract2 = false; } if (colorAbstract2) { _e34->bytes->setFloat(((index2 << 2) + _e34->byteOffset),v2); } } } } } { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redB(_e34,(( (Float)(((colorB >> 16) & 255)) ) / ( (Float)(255) ))); { Float v3 = (( (Float)((colorB & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this3 = _e34->bytes; int pos3 = _e34->byteOffset; bool k3; if ((pos3 >= 0)) { k3 = ((pos3 + 4) > _this3->length); } else { k3 = true; } if (k3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index3 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this3->b,pos3)) * 21) + 12) + 2); bool colorAbstract3; if ((index3 >= 0)) { colorAbstract3 = (index3 < (_e34->byteLength >> 2)); } else { colorAbstract3 = false; } if (colorAbstract3) { _e34->bytes->setFloat(((index3 << 2) + _e34->byteOffset),v3); } } } } { Float v4 = (( (Float)(((colorB >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this4 = _e34->bytes; int pos4 = _e34->byteOffset; bool k4; if ((pos4 >= 0)) { k4 = ((pos4 + 4) > _this4->length); } else { k4 = true; } if (k4) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index4 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this4->b,pos4)) * 21) + 11) + 2); bool colorAbstract4; if ((index4 >= 0)) { colorAbstract4 = (index4 < (_e34->byteLength >> 2)); } else { colorAbstract4 = false; } if (colorAbstract4) { _e34->bytes->setFloat(((index4 << 2) + _e34->byteOffset),v4); } } } } { Float v5 = (( (Float)(((colorB >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this5 = _e34->bytes; int pos5 = _e34->byteOffset; bool k5; if ((pos5 >= 0)) { k5 = ((pos5 + 4) > _this5->length); } else { k5 = true; } if (k5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index5 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this5->b,pos5)) * 21) + 13) + 2); bool colorAbstract5; if ((index5 >= 0)) { colorAbstract5 = (index5 < (_e34->byteLength >> 2)); } else { colorAbstract5 = false; } if (colorAbstract5) { _e34->bytes->setFloat(((index5 << 2) + _e34->byteOffset),v5); } } } } } { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redC(_e34,(( (Float)(((colorC >> 16) & 255)) ) / ( (Float)(255) ))); { Float v6 = (( (Float)((colorC & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this6 = _e34->bytes; int pos6 = _e34->byteOffset; bool k6; if ((pos6 >= 0)) { k6 = ((pos6 + 4) > _this6->length); } else { k6 = true; } if (k6) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index6 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this6->b,pos6)) * 21) + 19) + 2); bool colorAbstract6; if ((index6 >= 0)) { colorAbstract6 = (index6 < (_e34->byteLength >> 2)); } else { colorAbstract6 = false; } if (colorAbstract6) { _e34->bytes->setFloat(((index6 << 2) + _e34->byteOffset),v6); } } } } { Float v7 = (( (Float)(((colorC >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this7 = _e34->bytes; int pos7 = _e34->byteOffset; bool k7; if ((pos7 >= 0)) { k7 = ((pos7 + 4) > _this7->length); } else { k7 = true; } if (k7) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index7 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this7->b,pos7)) * 21) + 18) + 2); bool colorAbstract7; if ((index7 >= 0)) { colorAbstract7 = (index7 < (_e34->byteLength >> 2)); } else { colorAbstract7 = false; } if (colorAbstract7) { _e34->bytes->setFloat(((index7 << 2) + _e34->byteOffset),v7); } } } } { Float v8 = (( (Float)(((colorC >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this8 = _e34->bytes; int pos8 = _e34->byteOffset; bool k8; if ((pos8 >= 0)) { k8 = ((pos8 + 4) > _this8->length); } else { k8 = true; } if (k8) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index8 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this8->b,pos8)) * 21) + 20) + 2); bool colorAbstract8; if ((index8 >= 0)) { colorAbstract8 = (index8 < (_e34->byteLength >> 2)); } else { colorAbstract8 = false; } if (colorAbstract8) { _e34->bytes->setFloat(((index8 << 2) + _e34->byteOffset),v8); } } } } } } HX_END_LOCAL_FUNC3((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_35, ::haxe::io::ArrayBufferViewImpl,_e35) HXARGC(2) void _hx_run(int color,int times){ int _g = 0; int _g1 = times; while((_g < _g1)){ _g = (_g + 1); int i = (_g - 1); { { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redA(_e35,(( (Float)(((color >> 16) & 255)) ) / ( (Float)(255) ))); { Float v = (( (Float)((color & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this = _e35->bytes; int pos = _e35->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 5) + 2); bool colorAbstract; if ((index >= 0)) { colorAbstract = (index < (_e35->byteLength >> 2)); } else { colorAbstract = false; } if (colorAbstract) { _e35->bytes->setFloat(((index << 2) + _e35->byteOffset),v); } } } } { Float v1 = (( (Float)(((color >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this1 = _e35->bytes; int pos1 = _e35->byteOffset; bool k1; if ((pos1 >= 0)) { k1 = ((pos1 + 4) > _this1->length); } else { k1 = true; } if (k1) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index1 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this1->b,pos1)) * 21) + 4) + 2); bool colorAbstract1; if ((index1 >= 0)) { colorAbstract1 = (index1 < (_e35->byteLength >> 2)); } else { colorAbstract1 = false; } if (colorAbstract1) { _e35->bytes->setFloat(((index1 << 2) + _e35->byteOffset),v1); } } } } { Float v2 = (( (Float)(((color >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this2 = _e35->bytes; int pos2 = _e35->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index2 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 6) + 2); bool colorAbstract2; if ((index2 >= 0)) { colorAbstract2 = (index2 < (_e35->byteLength >> 2)); } else { colorAbstract2 = false; } if (colorAbstract2) { _e35->bytes->setFloat(((index2 << 2) + _e35->byteOffset),v2); } } } } } { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redB(_e35,(( (Float)(((color >> 16) & 255)) ) / ( (Float)(255) ))); { Float v3 = (( (Float)((color & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this3 = _e35->bytes; int pos3 = _e35->byteOffset; bool k3; if ((pos3 >= 0)) { k3 = ((pos3 + 4) > _this3->length); } else { k3 = true; } if (k3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index3 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this3->b,pos3)) * 21) + 12) + 2); bool colorAbstract3; if ((index3 >= 0)) { colorAbstract3 = (index3 < (_e35->byteLength >> 2)); } else { colorAbstract3 = false; } if (colorAbstract3) { _e35->bytes->setFloat(((index3 << 2) + _e35->byteOffset),v3); } } } } { Float v4 = (( (Float)(((color >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this4 = _e35->bytes; int pos4 = _e35->byteOffset; bool k4; if ((pos4 >= 0)) { k4 = ((pos4 + 4) > _this4->length); } else { k4 = true; } if (k4) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index4 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this4->b,pos4)) * 21) + 11) + 2); bool colorAbstract4; if ((index4 >= 0)) { colorAbstract4 = (index4 < (_e35->byteLength >> 2)); } else { colorAbstract4 = false; } if (colorAbstract4) { _e35->bytes->setFloat(((index4 << 2) + _e35->byteOffset),v4); } } } } { Float v5 = (( (Float)(((color >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this5 = _e35->bytes; int pos5 = _e35->byteOffset; bool k5; if ((pos5 >= 0)) { k5 = ((pos5 + 4) > _this5->length); } else { k5 = true; } if (k5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index5 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this5->b,pos5)) * 21) + 13) + 2); bool colorAbstract5; if ((index5 >= 0)) { colorAbstract5 = (index5 < (_e35->byteLength >> 2)); } else { colorAbstract5 = false; } if (colorAbstract5) { _e35->bytes->setFloat(((index5 << 2) + _e35->byteOffset),v5); } } } } } { ::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::set_redC(_e35,(( (Float)(((color >> 16) & 255)) ) / ( (Float)(255) ))); { Float v6 = (( (Float)((color & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this6 = _e35->bytes; int pos6 = _e35->byteOffset; bool k6; if ((pos6 >= 0)) { k6 = ((pos6 + 4) > _this6->length); } else { k6 = true; } if (k6) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index6 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this6->b,pos6)) * 21) + 19) + 2); bool colorAbstract6; if ((index6 >= 0)) { colorAbstract6 = (index6 < (_e35->byteLength >> 2)); } else { colorAbstract6 = false; } if (colorAbstract6) { _e35->bytes->setFloat(((index6 << 2) + _e35->byteOffset),v6); } } } } { Float v7 = (( (Float)(((color >> 8) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this7 = _e35->bytes; int pos7 = _e35->byteOffset; bool k7; if ((pos7 >= 0)) { k7 = ((pos7 + 4) > _this7->length); } else { k7 = true; } if (k7) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index7 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this7->b,pos7)) * 21) + 18) + 2); bool colorAbstract7; if ((index7 >= 0)) { colorAbstract7 = (index7 < (_e35->byteLength >> 2)); } else { colorAbstract7 = false; } if (colorAbstract7) { _e35->bytes->setFloat(((index7 << 2) + _e35->byteOffset),v7); } } } } { Float v8 = (( (Float)(((color >> 24) & 255)) ) / ( (Float)(255) )); { ::haxe::io::Bytes _this8 = _e35->bytes; int pos8 = _e35->byteOffset; bool k8; if ((pos8 >= 0)) { k8 = ((pos8 + 4) > _this8->length); } else { k8 = true; } if (k8) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { int index8 = (((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this8->b,pos8)) * 21) + 20) + 2); bool colorAbstract8; if ((index8 >= 0)) { colorAbstract8 = (index8 < (_e35->byteLength >> 2)); } else { colorAbstract8 = false; } if (colorAbstract8) { _e35->bytes->setFloat(((index8 << 2) + _e35->byteOffset),v8); } } } } } } { ::haxe::io::Bytes _this9 = _e35->bytes; int pos9 = _e35->byteOffset; bool colorAbstract9; if ((pos9 >= 0)) { colorAbstract9 = ((pos9 + 4) > _this9->length); } else { colorAbstract9 = true; } if (colorAbstract9) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } { Float pos_ = ( ::__hxcpp_memory_get_float(_this9->b,pos9) + 1); if ((0 < (_e35->byteLength >> 2))) { _e35->bytes->setFloat(_e35->byteOffset,pos_); } ::haxe::io::Bytes _this10 = _e35->bytes; int pos10 = _e35->byteOffset; bool colorAbstract10; if ((pos10 >= 0)) { colorAbstract10 = ((pos10 + 4) > _this10->length); } else { colorAbstract10 = true; } if (colorAbstract10) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float colorAbstract11 = ::__hxcpp_memory_get_float(_this10->b,pos10); ::haxe::io::Bytes _this11 = _e35->bytes; int pos11 = (4 + _e35->byteOffset); bool colorAbstract12; if ((pos11 >= 0)) { colorAbstract12 = ((pos11 + 4) > _this11->length); } else { colorAbstract12 = true; } if (colorAbstract12) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } if ((colorAbstract11 > ( ::__hxcpp_memory_get_float(_this11->b,pos11) - ( (Float)(1) )))) { ::haxe::io::Bytes _this = _e35->bytes; int pos = _e35->byteOffset; bool colorAbstract; if ((pos >= 0)) { colorAbstract = ((pos + 4) > _this->length); } else { colorAbstract = true; } if (colorAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float value = ::__hxcpp_memory_get_float(_this->b,pos); if ((1 < (_e35->byteLength >> 2))) { _e35->bytes->setFloat((4 + _e35->byteOffset),value); } } } } } } HX_END_LOCAL_FUNC2((void)) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_36, ::haxe::io::ArrayBufferViewImpl,_e36) HXARGC(0) ::trilateral3::structure::TriInt _hx_run(){ HX_JUST_GC_STACKFRAME ::haxe::io::Bytes _this = _e36->bytes; int pos = _e36->byteOffset; bool k; if ((pos >= 0)) { k = ((pos + 4) > _this->length); } else { k = true; } if (k) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k1 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this->b,pos)) * 21) + 6); ::haxe::io::Bytes _this1 = _e36->bytes; int pos1 = (((k1 + 2) << 2) + _e36->byteOffset); bool _g; if ((pos1 >= 0)) { _g = ((pos1 + 4) > _this1->length); } else { _g = true; } if (_g) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int _g1 = (::Math_obj::round(( ::__hxcpp_memory_get_float(_this1->b,pos1) * ( (Float)(255) ))) << 24); int _g2 = (_g1 | (::Math_obj::round((::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_redA(_e36) * ( (Float)(255) ))) << 16)); ::haxe::io::Bytes _this2 = _e36->bytes; int pos2 = _e36->byteOffset; bool k2; if ((pos2 >= 0)) { k2 = ((pos2 + 4) > _this2->length); } else { k2 = true; } if (k2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k3 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this2->b,pos2)) * 21) + 4); ::haxe::io::Bytes _this3 = _e36->bytes; int pos3 = (((k3 + 2) << 2) + _e36->byteOffset); bool _g3; if ((pos3 >= 0)) { _g3 = ((pos3 + 4) > _this3->length); } else { _g3 = true; } if (_g3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int _g4 = (_g2 | (::Math_obj::round(( ::__hxcpp_memory_get_float(_this3->b,pos3) * ( (Float)(255) ))) << 8)); ::haxe::io::Bytes _this4 = _e36->bytes; int pos4 = _e36->byteOffset; bool k4; if ((pos4 >= 0)) { k4 = ((pos4 + 4) > _this4->length); } else { k4 = true; } if (k4) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k5 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this4->b,pos4)) * 21) + 5); ::haxe::io::Bytes _this5 = _e36->bytes; int pos5 = (((k5 + 2) << 2) + _e36->byteOffset); bool _g5; if ((pos5 >= 0)) { _g5 = ((pos5 + 4) > _this5->length); } else { _g5 = true; } if (_g5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int _g6 = (_g4 | ::Math_obj::round(( ::__hxcpp_memory_get_float(_this5->b,pos5) * ( (Float)(255) )))); ::haxe::io::Bytes _this6 = _e36->bytes; int pos6 = _e36->byteOffset; bool k6; if ((pos6 >= 0)) { k6 = ((pos6 + 4) > _this6->length); } else { k6 = true; } if (k6) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k7 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this6->b,pos6)) * 21) + 6); ::haxe::io::Bytes _this7 = _e36->bytes; int pos7 = (((k7 + 2) << 2) + _e36->byteOffset); bool _g7; if ((pos7 >= 0)) { _g7 = ((pos7 + 4) > _this7->length); } else { _g7 = true; } if (_g7) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int _g8 = (::Math_obj::round(( ::__hxcpp_memory_get_float(_this7->b,pos7) * ( (Float)(255) ))) << 24); int _g9 = (_g8 | (::Math_obj::round((::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_redA(_e36) * ( (Float)(255) ))) << 16)); ::haxe::io::Bytes _this8 = _e36->bytes; int pos8 = _e36->byteOffset; bool k8; if ((pos8 >= 0)) { k8 = ((pos8 + 4) > _this8->length); } else { k8 = true; } if (k8) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k9 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this8->b,pos8)) * 21) + 4); ::haxe::io::Bytes _this9 = _e36->bytes; int pos9 = (((k9 + 2) << 2) + _e36->byteOffset); bool _g10; if ((pos9 >= 0)) { _g10 = ((pos9 + 4) > _this9->length); } else { _g10 = true; } if (_g10) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int _g11 = (_g9 | (::Math_obj::round(( ::__hxcpp_memory_get_float(_this9->b,pos9) * ( (Float)(255) ))) << 8)); ::haxe::io::Bytes _this10 = _e36->bytes; int pos10 = _e36->byteOffset; bool k10; if ((pos10 >= 0)) { k10 = ((pos10 + 4) > _this10->length); } else { k10 = true; } if (k10) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k11 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this10->b,pos10)) * 21) + 5); ::haxe::io::Bytes _this11 = _e36->bytes; int pos11 = (((k11 + 2) << 2) + _e36->byteOffset); bool _g12; if ((pos11 >= 0)) { _g12 = ((pos11 + 4) > _this11->length); } else { _g12 = true; } if (_g12) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int _g13 = (_g11 | ::Math_obj::round(( ::__hxcpp_memory_get_float(_this11->b,pos11) * ( (Float)(255) )))); ::haxe::io::Bytes _this12 = _e36->bytes; int pos12 = _e36->byteOffset; bool k12; if ((pos12 >= 0)) { k12 = ((pos12 + 4) > _this12->length); } else { k12 = true; } if (k12) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k13 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this12->b,pos12)) * 21) + 6); ::haxe::io::Bytes _this13 = _e36->bytes; int pos13 = (((k13 + 2) << 2) + _e36->byteOffset); bool colorAbstract; if ((pos13 >= 0)) { colorAbstract = ((pos13 + 4) > _this13->length); } else { colorAbstract = true; } if (colorAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int colorAbstract1 = (::Math_obj::round(( ::__hxcpp_memory_get_float(_this13->b,pos13) * ( (Float)(255) ))) << 24); int colorAbstract2 = (colorAbstract1 | (::Math_obj::round((::dsHelper::flatInterleave::_FloatColorTriangles::FloatColorTriangles_Impl__obj::get_redA(_e36) * ( (Float)(255) ))) << 16)); ::haxe::io::Bytes _this14 = _e36->bytes; int pos14 = _e36->byteOffset; bool k14; if ((pos14 >= 0)) { k14 = ((pos14 + 4) > _this14->length); } else { k14 = true; } if (k14) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k15 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this14->b,pos14)) * 21) + 4); ::haxe::io::Bytes _this15 = _e36->bytes; int pos15 = (((k15 + 2) << 2) + _e36->byteOffset); bool colorAbstract3; if ((pos15 >= 0)) { colorAbstract3 = ((pos15 + 4) > _this15->length); } else { colorAbstract3 = true; } if (colorAbstract3) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int colorAbstract4 = (colorAbstract2 | (::Math_obj::round(( ::__hxcpp_memory_get_float(_this15->b,pos15) * ( (Float)(255) ))) << 8)); ::haxe::io::Bytes _this16 = _e36->bytes; int pos16 = _e36->byteOffset; bool k16; if ((pos16 >= 0)) { k16 = ((pos16 + 4) > _this16->length); } else { k16 = true; } if (k16) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } int k17 = ((::Std_obj::_hx_int( ::__hxcpp_memory_get_float(_this16->b,pos16)) * 21) + 5); ::haxe::io::Bytes _this17 = _e36->bytes; int pos17 = (((k17 + 2) << 2) + _e36->byteOffset); bool colorAbstract5; if ((pos17 >= 0)) { colorAbstract5 = ((pos17 + 4) > _this17->length); } else { colorAbstract5 = true; } if (colorAbstract5) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } return ::trilateral3::structure::TriInt_obj::__alloc( HX_CTX ,_g6,_g13,(colorAbstract4 | ::Math_obj::round(( ::__hxcpp_memory_get_float(_this17->b,pos17) * ( (Float)(255) ))))); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_37, ::haxe::io::ArrayBufferViewImpl,_e37) HXARGC(0) Float _hx_run(){ ::haxe::io::Bytes _this = _e37->bytes; int pos = _e37->byteOffset; bool colorAbstract; if ((pos >= 0)) { colorAbstract = ((pos + 4) > _this->length); } else { colorAbstract = true; } if (colorAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } return ::__hxcpp_memory_get_float(_this->b,pos); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_38, ::haxe::io::ArrayBufferViewImpl,_e38) HXARGC(1) Float _hx_run(Float pos_){ if ((0 < (_e38->byteLength >> 2))) { _e38->bytes->setFloat(_e38->byteOffset,pos_); } ::haxe::io::Bytes _this = _e38->bytes; int pos = _e38->byteOffset; bool colorAbstract; if ((pos >= 0)) { colorAbstract = ((pos + 4) > _this->length); } else { colorAbstract = true; } if (colorAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float colorAbstract1 = ::__hxcpp_memory_get_float(_this->b,pos); ::haxe::io::Bytes _this1 = _e38->bytes; int pos1 = (4 + _e38->byteOffset); bool colorAbstract2; if ((pos1 >= 0)) { colorAbstract2 = ((pos1 + 4) > _this1->length); } else { colorAbstract2 = true; } if (colorAbstract2) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } if ((colorAbstract1 > ( ::__hxcpp_memory_get_float(_this1->b,pos1) - ( (Float)(1) )))) { ::haxe::io::Bytes _this = _e38->bytes; int pos = _e38->byteOffset; bool colorAbstract; if ((pos >= 0)) { colorAbstract = ((pos + 4) > _this->length); } else { colorAbstract = true; } if (colorAbstract) { HX_STACK_DO_THROW(::haxe::Exception_obj::thrown(::haxe::io::Error_obj::OutsideBounds_dyn())); } Float value = ::__hxcpp_memory_get_float(_this->b,pos); if ((1 < (_e38->byteLength >> 2))) { _e38->bytes->setFloat((4 + _e38->byteOffset),value); } } return pos_; } HX_END_LOCAL_FUNC1(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_39, ::haxe::io::ArrayBufferViewImpl,_e39) HXARGC(0) int _hx_run(){ return ::dsHelper::flatInterleave::core::_Flat32::Flat32_Impl__obj::get_size(_e39); } HX_END_LOCAL_FUNC0(return) HX_BEGIN_LOCAL_FUNC_S1(::hx::LocalFunc,_hx_Closure_40, ::haxe::io::ArrayBufferViewImpl,_e40) HXARGC(1) int _hx_run(int id){ return ::dsHelper::flatInterleave::core::_Flat32::Flat32_Impl__obj::set_size(_e40,id); } HX_END_LOCAL_FUNC1(return) HX_JUST_GC_STACKFRAME ::haxe::io::ArrayBufferViewImpl t = this->colorTriangles; ::haxe::io::ArrayBufferViewImpl _e = t; ::haxe::io::ArrayBufferViewImpl _e1 = t; ::haxe::io::ArrayBufferViewImpl _e2 = t; ::haxe::io::ArrayBufferViewImpl _e3 = t; ::haxe::io::ArrayBufferViewImpl _e4 = t; ::haxe::io::ArrayBufferViewImpl _e5 = t; ::haxe::io::ArrayBufferViewImpl _e6 = t; ::haxe::io::ArrayBufferViewImpl _e7 = t; ::haxe::io::ArrayBufferViewImpl _e8 = t; ::haxe::io::ArrayBufferViewImpl _e9 = t; ::haxe::io::ArrayBufferViewImpl _e10 = t; ::haxe::io::ArrayBufferViewImpl _e11 = t; ::haxe::io::ArrayBufferViewImpl _e12 = t; ::haxe::io::ArrayBufferViewImpl _e13 = t; ::haxe::io::ArrayBufferViewImpl _e14 = t; ::haxe::io::ArrayBufferViewImpl _e15 = t; ::haxe::io::ArrayBufferViewImpl _e16 = t; ::Dynamic triangleAbstract = ::Dynamic(::hx::Anon_obj::Create(17) ->setFixed(0,HX_("rotateTrig",d7,d8,0e,85), ::Dynamic(new _hx_Closure_2(_e2))) ->setFixed(1,HX_("triangle",c8,be,c5,8d), ::Dynamic(new _hx_Closure_14(_e14))) ->setFixed(2,HX_("get_x",4f,a5,60,91), ::Dynamic(new _hx_Closure_8(_e8))) ->setFixed(3,HX_("get_y",50,a5,60,91), ::Dynamic(new _hx_Closure_10(_e10))) ->setFixed(4,HX_("get_z",51,a5,60,91), ::Dynamic(new _hx_Closure_12(_e12))) ->setFixed(5,HX_("liteHit",c5,d1,fb,93), ::Dynamic(new _hx_Closure_4(_e4))) ->setFixed(6,HX_("moveDelta",27,b7,ed,94), ::Dynamic(new _hx_Closure_1(_e1))) ->setFixed(7,HX_("get_back",b0,95,cb,c0), ::Dynamic(new _hx_Closure_6(_e6))) ->setFixed(8,HX_("rotate",5b,46,20,cb), ::Dynamic(new _hx_Closure_0(_e))) ->setFixed(9,HX_("fullHit",04,38,2f,d0), ::Dynamic(new _hx_Closure_3(_e3))) ->setFixed(10,HX_("get_right",33,68,0d,2d), ::Dynamic(new _hx_Closure_7(_e7))) ->setFixed(11,HX_("getTriangle3D",af,89,3b,36), ::Dynamic(new _hx_Closure_15(_e15))) ->setFixed(12,HX_("transform",6c,2d,93,45), ::Dynamic(new _hx_Closure_16(_e16))) ->setFixed(13,HX_("get_bottom",b4,56,00,56), ::Dynamic(new _hx_Closure_5(_e5))) ->setFixed(14,HX_("set_x",5b,9b,2f,7a), ::Dynamic(new _hx_Closure_9(_e9))) ->setFixed(15,HX_("set_y",5c,9b,2f,7a), ::Dynamic(new _hx_Closure_11(_e11))) ->setFixed(16,HX_("set_z",5d,9b,2f,7a), ::Dynamic(new _hx_Closure_13(_e13)))); ::haxe::io::ArrayBufferViewImpl _e17 = t; ::haxe::io::ArrayBufferViewImpl _e18 = t; ::haxe::io::ArrayBufferViewImpl _e19 = t; ::haxe::io::ArrayBufferViewImpl _e20 = t; ::haxe::io::ArrayBufferViewImpl _e21 = t; ::haxe::io::ArrayBufferViewImpl _e22 = t; ::haxe::io::ArrayBufferViewImpl _e23 = t; ::haxe::io::ArrayBufferViewImpl _e24 = t; ::haxe::io::ArrayBufferViewImpl _e25 = t; ::haxe::io::ArrayBufferViewImpl _e26 = t; ::Dynamic drawAbstract = ::Dynamic(::hx::Anon_obj::Create(11) ->setFixed(0,HX_("triangle",c8,be,c5,8d), ::Dynamic(new _hx_Closure_17(_e17))) ->setFixed(1,HX_("triangleCurrent",51,ee,2b,9e),triangleAbstract) ->setFixed(2,HX_("get_size",4a,5c,0e,cc), ::Dynamic(new _hx_Closure_25(_e25))) ->setFixed(3,HX_("hasNext",6d,a5,46,18), ::Dynamic(new _hx_Closure_22(_e22))) ->setFixed(4,HX_("set_pos",37,b7,cb,19), ::Dynamic(new _hx_Closure_24(_e24))) ->setFixed(5,HX_("get_pos",2b,26,ca,26), ::Dynamic(new _hx_Closure_23(_e23))) ->setFixed(6,HX_("getTriangle3D",af,89,3b,36), ::Dynamic(new _hx_Closure_20(_e20))) ->setFixed(7,HX_("transform",6c,2d,93,45), ::Dynamic(new _hx_Closure_18(_e18))) ->setFixed(8,HX_("next",f3,84,02,49), ::Dynamic(new _hx_Closure_21(_e21))) ->setFixed(9,HX_("transformRange",b1,c0,a1,5a), ::Dynamic(new _hx_Closure_19(_e19))) ->setFixed(10,HX_("set_size",be,b5,6b,7a), ::Dynamic(new _hx_Closure_26(_e26)))); ::haxe::io::ArrayBufferViewImpl _e27 = t; ::haxe::io::ArrayBufferViewImpl _e28 = t; ::haxe::io::ArrayBufferViewImpl _e29 = t; ::haxe::io::ArrayBufferViewImpl _e30 = t; ::haxe::io::ArrayBufferViewImpl _e31 = t; ::haxe::io::ArrayBufferViewImpl _e32 = t; ::haxe::io::ArrayBufferViewImpl _e33 = t; ::Dynamic color3Abstract = ::Dynamic(::hx::Anon_obj::Create(7) ->setFixed(0,HX_("set_argbA",d8,5d,81,4c), ::Dynamic(new _hx_Closure_28(_e28))) ->setFixed(1,HX_("set_argbB",d9,5d,81,4c), ::Dynamic(new _hx_Closure_30(_e30))) ->setFixed(2,HX_("set_argbC",da,5d,81,4c), ::Dynamic(new _hx_Closure_32(_e32))) ->setFixed(3,HX_("get_argbA",cc,71,30,69), ::Dynamic(new _hx_Closure_29(_e29))) ->setFixed(4,HX_("get_argbB",cd,71,30,69), ::Dynamic(new _hx_Closure_31(_e31))) ->setFixed(5,HX_("get_argbC",ce,71,30,69), ::Dynamic(new _hx_Closure_33(_e33))) ->setFixed(6,HX_("set_argb",49,a2,8c,6e), ::Dynamic(new _hx_Closure_27(_e27)))); ::haxe::io::ArrayBufferViewImpl _e34 = t; ::haxe::io::ArrayBufferViewImpl _e35 = t; ::haxe::io::ArrayBufferViewImpl _e36 = t; ::haxe::io::ArrayBufferViewImpl _e37 = t; ::haxe::io::ArrayBufferViewImpl _e38 = t; ::haxe::io::ArrayBufferViewImpl _e39 = t; ::haxe::io::ArrayBufferViewImpl _e40 = t; ::Dynamic colorAbstract = ::Dynamic(::hx::Anon_obj::Create(8) ->setFixed(0,HX_("cornerColors",65,87,ed,a2), ::Dynamic(new _hx_Closure_34(_e34))) ->setFixed(1,HX_("getTriInt",ba,b1,09,c7), ::Dynamic(new _hx_Closure_36(_e36))) ->setFixed(2,HX_("get_size",4a,5c,0e,cc), ::Dynamic(new _hx_Closure_39(_e39))) ->setFixed(3,HX_("colorTriangles",c8,da,ed,e6), ::Dynamic(new _hx_Closure_35(_e35))) ->setFixed(4,HX_("set_pos",37,b7,cb,19), ::Dynamic(new _hx_Closure_38(_e38))) ->setFixed(5,HX_("get_pos",2b,26,ca,26), ::Dynamic(new _hx_Closure_37(_e37))) ->setFixed(6,HX_("color3current",c9,64,01,5a),color3Abstract) ->setFixed(7,HX_("set_size",be,b5,6b,7a), ::Dynamic(new _hx_Closure_40(_e40)))); this->pen = ::trilateral3::drawing::Pen_obj::__alloc( HX_CTX ,drawAbstract,colorAbstract); } HX_DEFINE_DYNAMIC_FUNC0(PenNodule_obj,createPen,(void)) int PenNodule_obj::largeEnough; ::hx::ObjectPtr< PenNodule_obj > PenNodule_obj::__new() { ::hx::ObjectPtr< PenNodule_obj > __this = new PenNodule_obj(); __this->__construct(); return __this; } ::hx::ObjectPtr< PenNodule_obj > PenNodule_obj::__alloc(::hx::Ctx *_hx_ctx) { PenNodule_obj *__this = (PenNodule_obj*)(::hx::Ctx::alloc(_hx_ctx, sizeof(PenNodule_obj), true, "trilateral3.nodule.PenNodule")); *(void **)__this = PenNodule_obj::_hx_vtable; __this->__construct(); return __this; } PenNodule_obj::PenNodule_obj() { } void PenNodule_obj::__Mark(HX_MARK_PARAMS) { HX_MARK_BEGIN_CLASS(PenNodule); HX_MARK_MEMBER_NAME(colorTriangles,"colorTriangles"); HX_MARK_MEMBER_NAME(pen,"pen"); HX_MARK_END_CLASS(); } void PenNodule_obj::__Visit(HX_VISIT_PARAMS) { HX_VISIT_MEMBER_NAME(colorTriangles,"colorTriangles"); HX_VISIT_MEMBER_NAME(pen,"pen"); } ::hx::Val PenNodule_obj::__Field(const ::String &inName,::hx::PropertyAccess inCallProp) { switch(inName.length) { case 3: if (HX_FIELD_EQ(inName,"pen") ) { return ::hx::Val( pen ); } break; case 9: if (HX_FIELD_EQ(inName,"createPen") ) { return ::hx::Val( createPen_dyn() ); } break; case 14: if (HX_FIELD_EQ(inName,"colorTriangles") ) { return ::hx::Val( colorTriangles ); } } return super::__Field(inName,inCallProp); } bool PenNodule_obj::__GetStatic(const ::String &inName, Dynamic &outValue, ::hx::PropertyAccess inCallProp) { switch(inName.length) { case 11: if (HX_FIELD_EQ(inName,"largeEnough") ) { outValue = ( largeEnough ); return true; } } return false; } ::hx::Val PenNodule_obj::__SetField(const ::String &inName,const ::hx::Val &inValue,::hx::PropertyAccess inCallProp) { switch(inName.length) { case 3: if (HX_FIELD_EQ(inName,"pen") ) { pen=inValue.Cast< ::trilateral3::drawing::Pen >(); return inValue; } break; case 14: if (HX_FIELD_EQ(inName,"colorTriangles") ) { colorTriangles=inValue.Cast< ::haxe::io::ArrayBufferViewImpl >(); return inValue; } } return super::__SetField(inName,inValue,inCallProp); } bool PenNodule_obj::__SetStatic(const ::String &inName,Dynamic &ioValue,::hx::PropertyAccess inCallProp) { switch(inName.length) { case 11: if (HX_FIELD_EQ(inName,"largeEnough") ) { largeEnough=ioValue.Cast< int >(); return true; } } return false; } void PenNodule_obj::__GetFields(Array< ::String> &outFields) { outFields->push(HX_("colorTriangles",c8,da,ed,e6)); outFields->push(HX_("pen",d9,54,55,00)); super::__GetFields(outFields); }; #ifdef HXCPP_SCRIPTABLE static ::hx::StorageInfo PenNodule_obj_sMemberStorageInfo[] = { {::hx::fsObject /* ::haxe::io::ArrayBufferViewImpl */ ,(int)offsetof(PenNodule_obj,colorTriangles),HX_("colorTriangles",c8,da,ed,e6)}, {::hx::fsObject /* ::trilateral3::drawing::Pen */ ,(int)offsetof(PenNodule_obj,pen),HX_("pen",d9,54,55,00)}, { ::hx::fsUnknown, 0, null()} }; static ::hx::StaticInfo PenNodule_obj_sStaticStorageInfo[] = { {::hx::fsInt,(void *) &PenNodule_obj::largeEnough,HX_("largeEnough",cb,b7,3c,cb)}, { ::hx::fsUnknown, 0, null()} }; #endif static ::String PenNodule_obj_sMemberFields[] = { HX_("colorTriangles",c8,da,ed,e6), HX_("pen",d9,54,55,00), HX_("createPen",3d,2b,e3,49), ::String(null()) }; static void PenNodule_obj_sMarkStatics(HX_MARK_PARAMS) { HX_MARK_MEMBER_NAME(PenNodule_obj::largeEnough,"largeEnough"); }; #ifdef HXCPP_VISIT_ALLOCS static void PenNodule_obj_sVisitStatics(HX_VISIT_PARAMS) { HX_VISIT_MEMBER_NAME(PenNodule_obj::largeEnough,"largeEnough"); }; #endif ::hx::Class PenNodule_obj::__mClass; static ::String PenNodule_obj_sStaticFields[] = { HX_("largeEnough",cb,b7,3c,cb), ::String(null()) }; void PenNodule_obj::__register() { PenNodule_obj _hx_dummy; PenNodule_obj::_hx_vtable = *(void **)&_hx_dummy; ::hx::Static(__mClass) = new ::hx::Class_obj(); __mClass->mName = HX_("trilateral3.nodule.PenNodule",c8,6f,ae,d4); __mClass->mSuper = &super::__SGetClass(); __mClass->mConstructEmpty = &__CreateEmpty; __mClass->mConstructArgs = &__Create; __mClass->mGetStaticField = &PenNodule_obj::__GetStatic; __mClass->mSetStaticField = &PenNodule_obj::__SetStatic; __mClass->mMarkFunc = PenNodule_obj_sMarkStatics; __mClass->mStatics = ::hx::Class_obj::dupFunctions(PenNodule_obj_sStaticFields); __mClass->mMembers = ::hx::Class_obj::dupFunctions(PenNodule_obj_sMemberFields); __mClass->mCanCast = ::hx::TCanCast< PenNodule_obj >; #ifdef HXCPP_VISIT_ALLOCS __mClass->mVisitFunc = PenNodule_obj_sVisitStatics; #endif #ifdef HXCPP_SCRIPTABLE __mClass->mMemberStorageInfo = PenNodule_obj_sMemberStorageInfo; #endif #ifdef HXCPP_SCRIPTABLE __mClass->mStaticStorageInfo = PenNodule_obj_sStaticStorageInfo; #endif ::hx::_hx_RegisterClass(__mClass->mName, __mClass); } void PenNodule_obj::__boot() { { largeEnough = 20000000; } } } // end namespace trilateral3 } // end namespace nodule
. INTRODUCTION In Chile, hemophilia was incorporated into the System of Explicit Health Guarantees (GES), which ensures access to treatment and financial protection for these patients. To support patients and their families, educational programs have been proposed that focus on managing possible complications of the pathology, first aid, and prophylaxis, however, there are no educational instances focused on the needs of the patients. OBJECTIVE To know the educational needs of parents with hemophilic chil dren and adolescents regarding contents, people, place, methodology, and stage of the illness. Sub jects and Method: Descriptive qualitative study of 15 parents with hemophilic children in outpatient care. For the data collection, we used a semi-structured interview with five open questions, aimed at the search for educational needs such as what (contents), how (methodology), when (moment), who (person), and where (place) is education needed. For data analysis, were used the Berelson's content analysis technique. To guarantee the scientific validity of the qualitative results, the methodological rigor criteria of Guba and Lincoln were used. RESULTS The most frequent educational needs reported by parents include content such as venipuncture training, injury prevention, pathophysiological as pects of the disease, among others; with methodology developed in group workshops and guided by a peer; in a comfortable and familiar place; in three stages of the disease's development (diagnosis, blee ding events, and development of autonomous activities), and provided by professionals and peers. CONCLUSION Knowledge of educational needs is the basis for the creation of an educational program that guides the comprehensive care of hemophilic children and their parents.
#ifndef LIBIS_SOURCE_H #define LIBIS_SOURCE_H #include <libis.h> #include <stdbool.h> struct LibisSource_ { // Read single byte from source into *c. // If end of file is reached *eof sets to true and *c sets to '\0'. // Otherwise *eof sets to false and *c sets to the next byte in the source. LibisError (*read)(Libis *libis, LibisSource *source, bool *eof, char *c); // Free resources taken by a source. LibisError (*free)(Libis *libis, LibisSource *source); }; #endif
#include<stdio.h> #define fromleft 1 #define fromup 0 int out[2002],twos[1001][1001]={0},fives[1001][1001]={0},a[1001][1001],dp[1001][1001]={0},dp2[1001][1001]={0},path[1001][1001],path2[1001][1001]; int min(int a,int b){return a<b?a:b;} void solve(int q,int w,int e) { while(e%2==0) { twos[q][w]++; e/=2; if(e==0) return; } while(e%5==0) { fives[q][w]++; e/=5; if(e==0) return; } } int main(){ int n,i,j,zx,zy,mintwos,minfives,k=0; scanf("%d",&n); for(i=0;i<n;i++){ for(j=0;j<n;j++){ scanf("%d",&a[i][j]); if(a[i][j]==0) k=1,zx=j,zy=i; solve(i,j,a[i][j]); } } dp[0][0]=twos[0][0]; for(i=1;i<n;i++) dp[0][i]=dp[0][i-1]+twos[0][i],path[0][i]=fromleft; for(i=1;i<n;i++) dp[i][0]=dp[i-1][0]+twos[i][0],path[i][0]=fromup; for(i=1;i<n;i++){ for(j=1;j<n;j++){ if(dp[i-1][j]<dp[i][j-1]) { dp[i][j]=dp[i-1][j]+twos[i][j]; path[i][j]=fromup; } else { dp[i][j]=dp[i][j-1]+twos[i][j]; path[i][j]=fromleft; } } } mintwos=dp[n-1][n-1]; dp2[0][0]=fives[0][0]; for(i=1;i<n;i++) dp2[0][i]=dp2[0][i-1]+fives[0][i],path2[0][i]=fromleft; for(i=1;i<n;i++) dp2[i][0]=dp2[i-1][0]+fives[i][0],path2[i][0]=fromup; for(i=1;i<n;i++){ for(j=1;j<n;j++){ if(dp2[i-1][j]<dp2[i][j-1]) { dp2[i][j]=dp2[i-1][j]+fives[i][j]; path2[i][j]=fromup; } else { dp2[i][j]=dp2[i][j-1]+fives[i][j]; path2[i][j]=fromleft; } } } minfives = dp2[n-1][n-1]; int pout=0; i=n-1,j=n-1; if((min(mintwos,minfives)>1)&&(k==1)) { printf("1\n"); int tx=zx,ty=zy; while(tx) printf("R"),tx--; while(ty) printf("D"),ty--; while(zx+1<n) printf("R"),zx++; while(zy+1<n) printf("D"),zy++; return 0; } else{ if(mintwos<minfives) { while((i!=0)||(j!=0)) { out[pout]=path[i][j]; pout++; if(path[i][j]==fromleft) j--; else if(path[i][j]==fromup) i--; } } else { while((i!=0)||(j!=0)) { out[pout]=path2[i][j]; pout++; if(path2[i][j]==fromleft) j--; else if(path2[i][j]==fromup) i--; } } } printf("%d\n",min(minfives,mintwos)); for(i=n*2-3;i>=0;i--) { if(out[i]==fromleft) printf("R"); else printf("D"); } return 0; }
export type ApiMethod = | "delete" | "get" | "GET" | "DELETE" | "head" | "HEAD" | "options" | "OPTIONS" | "post" | "POST" | "put" | "PUT" | "patch" | "PATCH" | "link" | "LINK" | "unlink" | "UNLINK"; export interface ApiResultObject<TPayload, TData, TError> { endpoint: string; method: ApiMethod; payload: TPayload | undefined; data?: TData; success: boolean; error?: string | string[]; errorDetails?: TError; } export type OutgoingOperationType = "insert" | "update" | "skip"; export type OutgoingOperationObjectType = "user" | "event" | "account"; export interface OutgoingOperationEnvelope<TMessage, TServiceObject> { message: TMessage; serviceObject?: TServiceObject; operation: OutgoingOperationType; objectType: OutgoingOperationObjectType; notes?: string[]; } export interface OutgoingOperationEnvelopesFiltered<TMessage, TServiceObject> { inserts: OutgoingOperationEnvelope<TMessage, TServiceObject>[]; updates: OutgoingOperationEnvelope<TMessage, TServiceObject>[]; skips: OutgoingOperationEnvelope<TMessage, TServiceObject>[]; } export interface SellsyOAuthCredentials { consumerToken: string; consumerSecret: string; userToken: string; userSecret: string; } export interface SellsyExecuteRequest { auth: SellsyOAuthCredentials; method: string; params: any; } export interface SellsyResponse<TResponseData> { response: TResponseData; error: string; status: string; } export interface SellsyResponseListData<TData> { infos: { nbperpage: number; nbpages: number; pagenum: number; nbtotal: string; }; result: { [key: string]: TData; }; } export interface SellsyPaginationRequest { nbperpage: number; pagenum: number; } export interface SellsyCustomFieldGroup { id: string; corpid: string; ownerid: string; status: string; name: string; code: string; openbydefault: string; } export interface SellsyCustomField { id: string; corpid: string; ownerid: string; status: string; type: string; name: string; code: string; showIn_list: string; showIn_filter: string; showIn_ecommerce: string; showOn_pdf: string; showOn_desc: string; useOn_document: string; useOn_people: string; useOn_client: string; useOn_prospect: string; useOn_supplier: string; useOn_item: string; useOn_service: string; useOn_ticket: string; useOn_task: string; useOn_purchase: string; useOn_opportunity: string; useOn_staff: string; useOn_project: string; useOn_book: string; moreInfoOnPdf_address: string; moreInfoOnPdf_mail: string; moreInfoOnPdf_tel: string; moreInfoOnPdf_mobile: string; cfid: string; isRequired: string; description: string; min: string; max: string; defaultValue: string; listType: string; listId: string; groupid: string; groupname: string; openbydefault: string; rank: string; formatted_type: string; prefsList: any[]; } export interface SellsyClient { thirdid: string; capital: string; logo: string; joindate: string; // SQL date auxCode: string; accountingCode: string; stickyNote: string; ident: string; rateCategory: string; massmailingUnsubscribed: string; massmailingUnsubscribedSMS: string; phoningUnsubscribed: string; massmailingUnsubscribedMail: string; massmailingUnsubscribedCustom: string; lastactivity: string; // timestamp ownerid: string; type: string; maincontactid: string; relationType: string; actif: string; pic: string; people_forename: string; people_name: string; people_civil: string; dateTransformProspect: string; // SQL date score: any | null; mainContactName: string; name: string; tel: string; fax: string; email: string; mobile: string; apenaf: string; rcs: string; siret: string; siren: string; vat: string; mainaddressid: string; maindelivaddressid: string; web: string; corpType: string; addr_name: string; addr_part1: string; addr_part2: string; addr_zip: string; addr_town: string; addr_state: string; addr_lat: string; addr_lng: string; addr_countrycode: string; delivaddr_name: string; delivaddr_part1: string; delivaddr_part2: string; delivaddr_zip: string; delivaddr_town: string; delivaddr_state: string; delivaddr_lat: string; delivaddr_lng: string; delivaddr_countrycode: string; formated_joindate: string; // dd/mm/yyyy formated_transformprospectdate: string; // dd/mm/yyyy scoreFormatted: string; scoreClass: string; corpid: string; avatar: { type: string; value: string; class: number; }; lastactivity_formatted: string; // dd/mm/yyyy addr_countryname: string; mainAddress: string; addr_geocode: string; delivaddr_countryname: string; delivAddress: string; delivaddr_geocode: string; fullName: string; contactId: string; contactDetails: string; formatted_tel: string; formatted_mobile: string; formatted_fax: string; owner: string; webUrl: string; customfields: SellsyClientCustomField[]; contacts: { [key: string]: SellsyContactBase; }; smartTags: { [key: string]: SellsySmartTagBase; }; id: string; } export interface SellsyClientCustomField { id: string; status: string; corpid: string; cfid: string; groupid: string; type: string; linkedtype: string; linkedid: string; textval: string | null; boolval: string | null; timestampval: string | null; decimalval: string | null; numericval: string | null; stringval: string | null; code: string; formatted_value: string; currency?: string; unit?: string; } export interface SellsySmartTagBase { id: string; corpid: string; status: string; category: string; created: string; word: string; thirdid: string; } export interface SellsyContactBase { pic: string; name: string; forename: string; tel: string; email: string; mobile: string; civil: string; position: string; birthdate: string; thirdid: string; id: string; peopleid: string; fullName: string; corpid: string; formatted_tel: string; formatted_mobile: string; formatted_fax: string; formatted_birthdate: string; avatar: { type: string; value: string; class: number; }; } export interface SellsyContact extends SellsyContactBase { customfields: SellsyClientCustomField[]; smartTags?: { [key: string]: SellsySmartTagBase; }; isMain: string; } export interface SellsyProspect { thirdid: string; capital: string; logo: string; joindate: string; // SQL Date auxCode: string; accountingCode: string; stickyNote: string; ident: string; rateCategory: string; massmailingUnsubscribed: string; massmailingUnsubscribedSMS: string; phoningUnsubscribed: string; massmailingUnsubscribedMail: string; massmailingUnsubscribedCustom: string; lastactivity: string | null; ownerid: string; type: string; // corporation maincontactid: string; relationType: string; // prospect actif: string; pic: string; people_forename: string | null; people_name: string | null; people_civil: string | null; score: string | number | null; mainContactName: string; name: string; tel: string; fax: string; email: string; mobile: string; apenaf: string; rcs: string; siret: string; siren: string; vat: string; mainaddressid: string; maindelivaddressid: string; web: string; corpType: string; addr_name: string; addr_part1: string; addr_part2: string; addr_zip: string; addr_town: string; addr_state: string; addr_lat: string; addr_lng: string; addr_countrycode: string; delivaddr_name: string | null; delivaddr_part1: string | null; delivaddr_part2: string | null; delivaddr_zip: string | null; delivaddr_town: string | null; delivaddr_state: string | null; delivaddr_lat: string | null; delivaddr_lng: string | null; delivaddr_countrycode: string | null; formated_joindate: string; // dd/mm/yyyy formated_transformprospectdate: string; scoreFormatted: string; scoreClass: string; corpid: string; avatar: { type: string; value: string; class: number; }; lastactivity_formatted: string; addr_countryname: string; mainAddress: string; addr_geocode: string; fullName: string; contactId: string; contactDetails: string; formatted_tel: string; formatted_mobile: string; formatted_fax: string; owner: string; webUrl: string; customfields: SellsyClientCustomField[]; contacts?: { [key: string]: SellsyContactBase; }; smartTags?: { [key: string]: SellsySmartTagBase; }; id: string; } export type SellsyFieldType = | "simpletext" | "richtext" | "numeric" | "amount" | "unit" | "radio" | "select" | "checkbox" | "date" | "time" | "email" | "url" | "boolean" | "third" | "item" | "people" | "staff"; export interface SellsyFieldDefinition { code: string; label: string; type: SellsyFieldType; readonly: boolean; isDefault: boolean; allowedValues?: any[]; } export const SELLSY_DEFAULTFIELDS_CLIENT: SellsyFieldDefinition[] = [ { code: "thirdid", label: "Third ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "capital", label: "Capital", type: "simpletext", readonly: false, isDefault: true, }, { code: "logo", label: "Logo", type: "url", readonly: false, isDefault: true, }, { code: "joindate", label: "Join Date", type: "date", readonly: true, isDefault: true, }, { code: "auxCode", label: "Auxiliary accounting code", type: "simpletext", readonly: false, isDefault: true, }, { code: "accountingCode", label: "Accounting Code", type: "simpletext", readonly: false, isDefault: true, }, { code: "stickyNote", label: "Sticky Note", type: "richtext", readonly: false, isDefault: true, }, { code: "ident", label: "Customer reference", type: "simpletext", readonly: false, isDefault: true, }, { code: "rateCategory", label: "Company rate category", type: "simpletext", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribed", label: "Unsubscribe to email campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedSMS", label: "Unsubscribe to SMS campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "phoningUnsubscribed", label: "Unsubscribe to phone campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedMail", label: "Unsubscribe to postal campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedCustom", label: "Unsubscribe to personalized marketing campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "lastactivity", label: "Last activity", type: "date", readonly: true, isDefault: true, }, { code: "ownerid", label: "Owner ID", type: "staff", readonly: false, isDefault: true, }, { code: "type", label: "Type", type: "radio", allowedValues: ["corporation", "person"], readonly: false, isDefault: true, }, { code: "maincontactid", label: "Main Contact ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "relationType", label: "Relation Type", type: "simpletext", readonly: true, isDefault: true, }, { code: "actif", label: "Active", type: "boolean", readonly: true, isDefault: true, }, { code: "pic", label: "Picture", type: "url", readonly: true, isDefault: true, }, { code: "people_forename", label: "Main Contact First Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "people_name", label: "Main Contact Last Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "people_civil", label: "Main Contact Civility", type: "simpletext", readonly: true, isDefault: true, }, { code: "dateTransformProspect", label: "Transformation Prospect Date", type: "date", readonly: true, isDefault: true, }, { code: "score", label: "Score", type: "simpletext", readonly: true, isDefault: true, }, { code: "mainContactName", label: "Main Contact Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "name", label: "Name", type: "simpletext", readonly: false, isDefault: true, }, { code: "tel", label: "Telephone", type: "simpletext", readonly: false, isDefault: true, }, { code: "fax", label: "Telefax", type: "simpletext", readonly: false, isDefault: true, }, { code: "email", label: "Email", type: "email", readonly: false, isDefault: true, }, { code: "mobile", label: "Mobile", type: "simpletext", readonly: false, isDefault: true, }, { code: "apenaf", label: "Company NAF code", type: "simpletext", readonly: false, isDefault: true, }, { code: "rcs", label: "Company RCS (Fr)", type: "simpletext", readonly: false, isDefault: true, }, { code: "siret", label: "Company SIRET", type: "simpletext", readonly: false, isDefault: true, }, { code: "siren", label: "Corporation Siren", type: "simpletext", readonly: false, isDefault: true, }, { code: "vat", label: "Company tax number", type: "simpletext", readonly: false, isDefault: true, }, { code: "mainaddressid", label: "Main Address ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "maindelivaddressid", label: "Main Delivery Address ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "web", label: "Website", type: "url", readonly: false, isDefault: true, }, { code: "corpType", label: "Corporation Type", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_name", label: "Address Name", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_part1", label: "Address Part 1", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_part2", label: "Address Part 2", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_zip", label: "Address Postal Code", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_town", label: "Address City", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_state", label: "Address State", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_lat", label: "Address Latitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_lng", label: "Address Longitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_countrycode", label: "Address Country Code", type: "simpletext", readonly: false, isDefault: true, }, { code: "delivaddr_name", label: "Delivery Address Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_part1", label: "Delivery Address Part 1", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_part2", label: "Delivery Address Part 2", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_zip", label: "Delivery Address Postal Code", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_town", label: "Delivery Address City", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_state", label: "Delivery Address State", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_lat", label: "Delivery Address Latitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_lng", label: "Delivery Address Longitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_countrycode", label: "Delivery Address Country Code", type: "simpletext", readonly: true, isDefault: true, }, { code: "formated_joindate", label: "Formatted Join Date", type: "date", readonly: true, isDefault: true, }, { code: "formated_transformprospectdate", label: "Formatted Transform Prospect Date", type: "date", readonly: true, isDefault: true, }, { code: "scoreFormatted", label: "Formatted Score", type: "simpletext", readonly: true, isDefault: true, }, { code: "scoreClass", label: "Score Class", type: "simpletext", readonly: true, isDefault: true, }, { code: "corpid", label: "Corporation ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "lastactivity_formatted", label: "Formatted Last Activity", type: "date", readonly: true, isDefault: true, }, { code: "addr_countryname", label: "Address Country Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "mainAddress", label: "Main Address", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_geocode", label: "Address Geocode", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_countryname", label: "Delivery Address Country Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivAddress", label: "Delivery Address", type: "simpletext", readonly: true, isDefault: true, }, { code: "fullName", label: "<NAME>", type: "simpletext", readonly: true, isDefault: true, }, { code: "contactId", label: "Contact ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "contactDetails", label: "Contact Details", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_tel", label: "Formatted Telephone", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_mobile", label: "Formatted Mobile", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_fax", label: "Formatted Telefaz", type: "simpletext", readonly: true, isDefault: true, }, { code: "owner", label: "Owner", type: "simpletext", readonly: true, isDefault: true, }, { code: "webUrl", label: "Web Url", type: "simpletext", readonly: true, isDefault: true, }, { code: "id", label: "Sellsy ID", type: "simpletext", readonly: true, isDefault: true, }, ]; export const SELLSY_DEFAULTFIELDS_PROSPECT: SellsyFieldDefinition[] = [ { code: "thirdid", label: "Third ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "capital", label: "Capital", type: "simpletext", readonly: false, isDefault: true, }, { code: "logo", label: "Logo", type: "url", readonly: false, isDefault: true, }, { code: "joindate", label: "Join Date", type: "date", readonly: true, isDefault: true, }, { code: "auxCode", label: "Auxiliary accounting code", type: "simpletext", readonly: false, isDefault: true, }, { code: "accountingCode", label: "Accounting Code", type: "simpletext", readonly: false, isDefault: true, }, { code: "stickyNote", label: "Sticky Note", type: "richtext", readonly: false, isDefault: true, }, { code: "ident", label: "Customer reference", type: "simpletext", readonly: false, isDefault: true, }, { code: "rateCategory", label: "Company rate category", type: "simpletext", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribed", label: "Unsubscribe to email campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedSMS", label: "Unsubscribe to SMS campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "phoningUnsubscribed", label: "Unsubscribe to phone campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedMail", label: "Unsubscribe to postal campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedCustom", label: "Unsubscribe to personalized marketing campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "lastactivity", label: "Last activity", type: "date", readonly: true, isDefault: true, }, { code: "ownerid", label: "Owner ID", type: "staff", readonly: false, isDefault: true, }, { code: "type", label: "Type", type: "radio", allowedValues: ["corporation", "person"], readonly: false, isDefault: true, }, { code: "maincontactid", label: "Main Contact ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "relationType", label: "Relation Type", type: "simpletext", readonly: true, isDefault: true, }, { code: "actif", label: "Active", type: "boolean", readonly: true, isDefault: true, }, { code: "pic", label: "Picture", type: "url", readonly: true, isDefault: true, }, { code: "people_forename", label: "Main Contact First Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "people_name", label: "Main Contact Last Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "people_civil", label: "Main Contact Civility", type: "simpletext", readonly: true, isDefault: true, }, { code: "dateTransformProspect", label: "Transformation Prospect Date", type: "date", readonly: true, isDefault: true, }, { code: "score", label: "Score", type: "simpletext", readonly: true, isDefault: true, }, { code: "mainContactName", label: "Main Contact Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "name", label: "Name", type: "simpletext", readonly: false, isDefault: true, }, { code: "tel", label: "Telephone", type: "simpletext", readonly: false, isDefault: true, }, { code: "fax", label: "Telefax", type: "simpletext", readonly: false, isDefault: true, }, { code: "email", label: "Email", type: "email", readonly: false, isDefault: true, }, { code: "mobile", label: "Mobile", type: "simpletext", readonly: false, isDefault: true, }, { code: "apenaf", label: "Company NAF code", type: "simpletext", readonly: false, isDefault: true, }, { code: "rcs", label: "Company RCS (Fr)", type: "simpletext", readonly: false, isDefault: true, }, { code: "siret", label: "Company SIRET", type: "simpletext", readonly: false, isDefault: true, }, { code: "siren", label: "Corporation Siren", type: "simpletext", readonly: false, isDefault: true, }, { code: "vat", label: "Company tax number", type: "simpletext", readonly: false, isDefault: true, }, { code: "mainaddressid", label: "Main Address ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "maindelivaddressid", label: "Main Delivery Address ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "web", label: "Website", type: "url", readonly: false, isDefault: true, }, { code: "corpType", label: "Corporation Type", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_name", label: "Address Name", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_part1", label: "Address Part 1", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_part2", label: "Address Part 2", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_zip", label: "Address Postal Code", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_town", label: "Address City", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_state", label: "Address State", type: "simpletext", readonly: false, isDefault: true, }, { code: "addr_lat", label: "Address Latitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_lng", label: "Address Longitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_countrycode", label: "Address Country Code", type: "simpletext", readonly: false, isDefault: true, }, { code: "delivaddr_name", label: "Delivery Address Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_part1", label: "Delivery Address Part 1", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_part2", label: "Delivery Address Part 2", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_zip", label: "Delivery Address Postal Code", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_town", label: "Delivery Address City", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_state", label: "Delivery Address State", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_lat", label: "Delivery Address Latitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_lng", label: "Delivery Address Longitude", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_countrycode", label: "Delivery Address Country Code", type: "simpletext", readonly: true, isDefault: true, }, { code: "formated_joindate", label: "Formatted Join Date", type: "date", readonly: true, isDefault: true, }, { code: "formated_transformprospectdate", label: "Formatted Transform Prospect Date", type: "date", readonly: true, isDefault: true, }, { code: "scoreFormatted", label: "Formatted Score", type: "simpletext", readonly: true, isDefault: true, }, { code: "scoreClass", label: "Score Class", type: "simpletext", readonly: true, isDefault: true, }, { code: "corpid", label: "Corporation ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "lastactivity_formatted", label: "Formatted Last Activity", type: "date", readonly: true, isDefault: true, }, { code: "addr_countryname", label: "Address Country Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "mainAddress", label: "Main Address", type: "simpletext", readonly: true, isDefault: true, }, { code: "addr_geocode", label: "Address Geocode", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivaddr_countryname", label: "Delivery Address Country Name", type: "simpletext", readonly: true, isDefault: true, }, { code: "delivAddress", label: "Delivery Address", type: "simpletext", readonly: true, isDefault: true, }, { code: "fullName", label: "<NAME>", type: "simpletext", readonly: true, isDefault: true, }, { code: "contactId", label: "Contact ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "contactDetails", label: "Contact Details", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_tel", label: "Formatted Telephone", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_mobile", label: "Formatted Mobile", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_fax", label: "Formatted Telefaz", type: "simpletext", readonly: true, isDefault: true, }, { code: "owner", label: "Owner", type: "simpletext", readonly: true, isDefault: true, }, { code: "webUrl", label: "Web Url", type: "simpletext", readonly: true, isDefault: true, }, { code: "id", label: "Sellsy ID", type: "simpletext", readonly: true, isDefault: true, }, ]; export const SELLSY_DEFAULTFIELDS_CONTACT: SellsyFieldDefinition[] = [ { code: "pic", label: "Picture", type: "url", readonly: true, isDefault: true, }, { code: "name", label: "<NAME>", type: "simpletext", readonly: false, isDefault: true, }, { code: "forename", label: "<NAME>", type: "simpletext", readonly: false, isDefault: true, }, { code: "tel", label: "Telephone", type: "simpletext", readonly: false, isDefault: true, }, { code: "email", label: "Email", type: "email", readonly: false, isDefault: true, }, { code: "mobile", label: "Mobile", type: "simpletext", readonly: false, isDefault: true, }, { code: "civil", label: "Civility", type: "select", readonly: false, isDefault: true, allowedValues: ["man", "woman", "lady"], }, { code: "position", label: "Position", type: "simpletext", readonly: false, isDefault: true, }, { code: "birthdate", label: "Date of Birth", type: "date", readonly: false, isDefault: true, }, { code: "thirdid", label: "Third ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "id", label: "Sellsy ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "peopleid", label: "People ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "fullName", label: "<NAME>", type: "simpletext", readonly: true, isDefault: true, }, { code: "corpid", label: "Corporation ID", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_tel", label: "Formatted Telephone", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_mobile", label: "Mobile", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_fax", label: "Formatted Telefax", type: "simpletext", readonly: true, isDefault: true, }, { code: "formatted_birthdate", label: "Formatted Date of Birth", type: "date", readonly: true, isDefault: true, }, { code: "actif", label: "Active", type: "boolean", readonly: true, isDefault: true, }, { code: "massmailingUnsubscribed", label: "Unsubscribe to email campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedSMS", label: "Unsubscribe to SMS campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "phoningUnsubscribed", label: "Unsubscribe to phone campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedMail", label: "Unsubscribe to postal campaigns", type: "boolean", readonly: false, isDefault: true, }, { code: "massmailingUnsubscribedCustom", label: "Unsubscribe to personalized marketing campaigns", type: "boolean", readonly: false, isDefault: true, }, ]; export interface SellsyWebhookRequest { notif: { eventType: string; timestamp: string; event: string; relatedid: string; relatedtype: string; thirdtype: string; ownerid: string; ownertype: string; corpid: string; }; } export interface SellsyClientProspectDetail { client: SellsyClientDetailClientData; corporation?: SellsyCorporationData; contact?: SellsyContactData; contacts?: { [key: string]: SellsyContact; }; address: SellsyAddress[]; smartTags?: { [key: string]: SellsySmartTagBase; }; tags?: { [key: string]: SellsySmartTagBase; }; customFields?: { [key: string]: { list: SellsyClientCustomField[]; }; }; avatar: { type: string; value: string; class: number; }; score: { value: string; formatted: string; }; } export interface SellsyClientDetailClientData { id: string; detailsid: string; // corporation.id corpid: string; // sellsy account id ownerid: string; // staff id joindate: string; // SQL Date type: string; relationType: string; status: string; actif: string; // boolean maincontactid: string; ident: string; accountingCode: string; accountingPurchaseCode: string; auxCode: string; stickyNote: string; twitter: string; facebook: string; linkedin: string; viadeo: string; rateCategory: string; wasProspect: string; // boolean source: string; originid: string | null; originType: string; tenLastType: string; massmailingUnsubscribed: string; massmailingUnsubscribedSMS: string; phoningUnsubscribed: string; massmailingUnsubscribedMail: string; massmailingUnsubscribedCustom: string; firmsearchurl: string; lang: string; lastactivity: string | null; business_segment: string | null; number_of_employees: string | null; name: string; web: string; formatted_joindate: string; transformationDate: string | null; } export interface SellsyContactData { id: string; corpid: string; ownerid: string; linkedtype: string; linkedid: string; status: string; rank: string; gender: string; civil: string; forename: string; name: string; email: string; web: string; tel: string; mobile: string; fax: string; position: string; pic: string; sign: string; birthdate: string; twitter: string; linkedin: string; facebook: string; viadeo: string; stickyNote: string; mainAddressID: string; mainDelivAddressID: string; mailchimp: string; mailjet: string; simplemail: string; massmailingUnsubscribed: string; massmailingUnsubscribedSMS: string; phoningUnsubscribed: string; massmailingUnsubscribedMail: string; massmailingUnsubscribedCustom: string; langid: string; lang: string; actif: string; created: string; lastUpdate: string; fullName: string; formatted_tel: string; formatted_mobile: string; formatted_fax: string; mcoptin: string; mjoptin: string; smoptin: string; } export interface SellsyCorporationData { id: string; linkedtype: string; linkedid: string; prefsid: string; accountingPrefsId: string; logo: string; name: string; email: string; web: string; tel: string; mobile: string; fax: string; siret: string; vat: string; apenaf: string; rcs: string; type: string; capital: string; mainaddressid: string; maindelivaddressid: string; siren: string; formatted_tel: string; formatted_mobile: string; formatted_fax: string; } export interface SellsyAddress { status: string; rank: string; name: string; part1: string; part2: string; part3: string; part4: string; zip: string; town: string; state: string; countrycode: string; originalid: string; lat: string; lng: string; id: string; isMain: string; isMainDeliv: string; countryname: string; toHTML: string; } export interface SellsyContactDetailData { id: string; corpid: string; ownerid: string; linkedtype: string; linkedid: string; status: string; rank: string; gender: string; civil: string; forename: string; name: string; email: string; web: string; tel: string; mobile: string; fax: string; position: string; pic: string; sign: string; birthdate: string; twitter: string; linkedin: string; facebook: string; viadeo: string; stickyNote: string; mainAddressID: string; mainDelivAddressID: string; mailchimp: string; mailjet: string; simplemail: string; massmailingUnsubscribed: string; massmailingUnsubscribedSMS: string; phoningUnsubscribed: string; massmailingUnsubscribedMail: string; massmailingUnsubscribedCustom: string; langid: string; lang: string; actif: string; created: string; lastUpdate: string; fullName: string; avatar: { type: string; value: string; class: number; }; formatted_birthdate: string; tags: []; customFields?: { [key: string]: { list: SellsyClientCustomField[]; }; }; formatted_tel: string; formatted_mobile: string; formatted_fax: string; }
Ag2r La Mondiale into second place The UCI Pro Tour started in 2005 as a way of categorising the top teams in pro cycling and has since become the World Tour. Since this designation became many teams have enjoyed success and failure but we must go back to 2005 for the longest losing streak when Saunier Duval didn’t win a race of any kind until 19 May with Leonardo Piepoli taking a stage in the Tour of Catalonia. But French squad Ag2r La Mondiale now surpass the second longest losing streak set by Euskaltel-Euskadi in 2005, when they didn’t win a race until 8 May. With no stage in the Giro and Europcar’s Matteo Pelucchi winning the final stage of the Four Days of Dunkirk, Ag2r now beat the Basque team to take second spot in the losing streak status race and now need to win a race in the next 10 days to avoid setting the “record” currently held by Saunier Duval. I feel bad pointing this out as it is not for the want of trying and they’ve had two second places this year plus John Gadret won a cyclocross race on New Year’s Day and Matteo Montaguti won the mountains competition in the Critérium International. But Ag2r have yet to win a road race of any kind anywhere in the world this year. Sometimes these things happen and the team can’t get a lucky break. In interviews riders have also spoken about feeling the pressure and fears of a negative spiral. Perhaps we should note that in other sports the management would have been fired by now. But in cycling the team management often own the squad they merely rent out the naming rights and “real estate” on the jersey, shorts and team vehicles to a willing sponsor. So there is no board or other authority to replace the manager. It is not all bad luck, there are factors that explain the lack of results. First the team’s sprinters don’t have a proven record, Jimmy Casper is the best bet but it’s not yet worked out whilst Sébastian Minard and Lloyd Mondory are regular top-10 finishers but outright wins are rare. And new recruit Manuel Belletti hasn’t delivered yet. More broadly they’re the fifth oldest team in the World Tour and a squad than didn’t win much last year either. When it came to making changes to the roster for 2012 not much changed. One shining recruit is Romain Bardet, author of an impressive move in the Amstel when he gradually dropped everyone from the morning break to stay away solo before being caught by the lead group before the final climb of the Cauberg. But other recruits include Gregor Gazvoda, Boris Shpilevsky and Amir Zagari, who whilst all fine elite athletes, were primarily hired for their past haul of points on the UCI Asia Tour rather than their potential to win in Europe. Meanwhile Jean-Christophe Péraud has been practising for the MTB Olympics instead of focussing 100% on the road. Good for him, he said in a recent interview he’d rather win gold than win the Tour de France and besides, he’s a rare winner on the road. But it’s another factor behind the team’s lack of success. Looking forward A Giro stage win looks possible for John Gadret, he won last year after all and given the more open race perhaps he’ll deliver again. If not Hubert Dupont is another candidate for success in Italy so there’s every chance the now disgraced Saunier Duval team retains the dismal record. Back in France there’s also the Tour de Picardie and the Circuit de Lorraine Professionnel, two modest races well within the reach of a UCI World Tour team. But the risk is that the team feels more and more pressure to win and this makes them lose their cool, for example when in a breakaway they attack too early out of desperation. But they need to rack up points in order to stay in the top tier for 2013 and beyond. So far the strategy has been to place riders into the top-10 and this does win points. Of course should they win a stage in the Tour de France or take the yellow jersey for a few days then the entire 2012 season will be a success for the sponsor because the publicity generated will be enormous.
// Code generated by goa v2.0.0-wip, DO NOT EDIT. // // chatter HTTP client types // // Command: // $ goa gen goa.design/goa/examples/chatter/design -o // $(GOPATH)/src/goa.design/goa/examples/chatter package client import ( goa "goa.design/goa" chattersvc "goa.design/goa/examples/chatter/gen/chatter" chattersvcviews "goa.design/goa/examples/chatter/gen/chatter/views" ) // SummaryResponseBody is the type of the "chatter" service "summary" endpoint // HTTP response body. type SummaryResponseBody []*ChatSummaryResponseBody // HistoryResponseBody is the type of the "chatter" service "history" endpoint // HTTP response body. type HistoryResponseBody struct { // Message sent to the server Message *string `form:"message,omitempty" json:"message,omitempty" xml:"message,omitempty"` // Length of the message sent Length *int `form:"length,omitempty" json:"length,omitempty" xml:"length,omitempty"` // Time at which the message was sent SentAt *string `form:"sent_at,omitempty" json:"sent_at,omitempty" xml:"sent_at,omitempty"` } // LoginUnauthorizedResponseBody is the type of the "chatter" service "login" // endpoint HTTP response body for the "unauthorized" error. type LoginUnauthorizedResponseBody string // EchoerInvalidScopesResponseBody is the type of the "chatter" service // "echoer" endpoint HTTP response body for the "invalid-scopes" error. type EchoerInvalidScopesResponseBody string // EchoerUnauthorizedResponseBody is the type of the "chatter" service "echoer" // endpoint HTTP response body for the "unauthorized" error. type EchoerUnauthorizedResponseBody string // ListenerInvalidScopesResponseBody is the type of the "chatter" service // "listener" endpoint HTTP response body for the "invalid-scopes" error. type ListenerInvalidScopesResponseBody string // ListenerUnauthorizedResponseBody is the type of the "chatter" service // "listener" endpoint HTTP response body for the "unauthorized" error. type ListenerUnauthorizedResponseBody string // SummaryInvalidScopesResponseBody is the type of the "chatter" service // "summary" endpoint HTTP response body for the "invalid-scopes" error. type SummaryInvalidScopesResponseBody string // SummaryUnauthorizedResponseBody is the type of the "chatter" service // "summary" endpoint HTTP response body for the "unauthorized" error. type SummaryUnauthorizedResponseBody string // HistoryInvalidScopesResponseBody is the type of the "chatter" service // "history" endpoint HTTP response body for the "invalid-scopes" error. type HistoryInvalidScopesResponseBody string // HistoryUnauthorizedResponseBody is the type of the "chatter" service // "history" endpoint HTTP response body for the "unauthorized" error. type HistoryUnauthorizedResponseBody string // ChatSummaryResponseBody is used to define fields on response body types. type ChatSummaryResponseBody struct { // Message sent to the server Message *string `form:"message,omitempty" json:"message,omitempty" xml:"message,omitempty"` // Length of the message sent Length *int `form:"length,omitempty" json:"length,omitempty" xml:"length,omitempty"` // Time at which the message was sent SentAt *string `form:"sent_at,omitempty" json:"sent_at,omitempty" xml:"sent_at,omitempty"` } // NewLoginUnauthorized builds a chatter service login endpoint unauthorized // error. func NewLoginUnauthorized(body LoginUnauthorizedResponseBody) chattersvc.Unauthorized { v := chattersvc.Unauthorized(body) return v } // NewEchoerInvalidScopes builds a chatter service echoer endpoint // invalid-scopes error. func NewEchoerInvalidScopes(body EchoerInvalidScopesResponseBody) chattersvc.InvalidScopes { v := chattersvc.InvalidScopes(body) return v } // NewEchoerUnauthorized builds a chatter service echoer endpoint unauthorized // error. func NewEchoerUnauthorized(body EchoerUnauthorizedResponseBody) chattersvc.Unauthorized { v := chattersvc.Unauthorized(body) return v } // NewListenerInvalidScopes builds a chatter service listener endpoint // invalid-scopes error. func NewListenerInvalidScopes(body ListenerInvalidScopesResponseBody) chattersvc.InvalidScopes { v := chattersvc.InvalidScopes(body) return v } // NewListenerUnauthorized builds a chatter service listener endpoint // unauthorized error. func NewListenerUnauthorized(body ListenerUnauthorizedResponseBody) chattersvc.Unauthorized { v := chattersvc.Unauthorized(body) return v } // NewSummaryChatSummaryCollectionOK builds a "chatter" service "summary" // endpoint result from a HTTP "OK" response. func NewSummaryChatSummaryCollectionOK(body SummaryResponseBody) chattersvcviews.ChatSummaryCollectionView { v := make([]*chattersvcviews.ChatSummaryView, len(body)) for i, val := range body { v[i] = &chattersvcviews.ChatSummaryView{ Message: val.Message, Length: val.Length, SentAt: val.SentAt, } } return v } // NewSummaryInvalidScopes builds a chatter service summary endpoint // invalid-scopes error. func NewSummaryInvalidScopes(body SummaryInvalidScopesResponseBody) chattersvc.InvalidScopes { v := chattersvc.InvalidScopes(body) return v } // NewSummaryUnauthorized builds a chatter service summary endpoint // unauthorized error. func NewSummaryUnauthorized(body SummaryUnauthorizedResponseBody) chattersvc.Unauthorized { v := chattersvc.Unauthorized(body) return v } // NewHistoryChatSummaryOK builds a "chatter" service "history" endpoint result // from a HTTP "OK" response. func NewHistoryChatSummaryOK(body *HistoryResponseBody) *chattersvcviews.ChatSummaryView { v := &chattersvcviews.ChatSummaryView{ Message: body.Message, Length: body.Length, SentAt: body.SentAt, } return v } // NewHistoryInvalidScopes builds a chatter service history endpoint // invalid-scopes error. func NewHistoryInvalidScopes(body HistoryInvalidScopesResponseBody) chattersvc.InvalidScopes { v := chattersvc.InvalidScopes(body) return v } // NewHistoryUnauthorized builds a chatter service history endpoint // unauthorized error. func NewHistoryUnauthorized(body HistoryUnauthorizedResponseBody) chattersvc.Unauthorized { v := chattersvc.Unauthorized(body) return v } // Validate runs the validations defined on ChatSummaryResponseBody func (body *ChatSummaryResponseBody) Validate() (err error) { if body.Message == nil { err = goa.MergeErrors(err, goa.MissingFieldError("message", "body")) } if body.SentAt != nil { err = goa.MergeErrors(err, goa.ValidateFormat("body.sent_at", *body.SentAt, goa.FormatDateTime)) } return }
class Graph: """ Object holding the representation of the graph and some metrics """ def __init__(self): self.individuals = [] self.adj = [] self.encounters = [[[] for day in range(daysNotif)] for individual in range(nbIndividuals)] self.nbHealthy = 0 # number of healthy people self.nbAS = 0 # number of asymptomatic people self.nbPS = 0 # number of premptomatic people self.nbS = 0 # number of symptomatic people self.nbCured = 0 # number of cured persons self.nbDead = 0 # number of deceased people self.nbQuarantineI = 0 # number of infected people in quarantine self.nbQuarantineNonI = 0 # number of non infected people in quarantine self.nbTest = 0 # number of tests made # cumulative counters : self.nbQuarantineTotal = 0 # number of people in quarantine self.nbInfectedByASPS = 0 # number of people infected by asymp. + presymp. people #to compute Rt self.stepNb = 0 self.contaminations = [] # number of people contaminated at a given time self.numInfectedByNewInfected = []
use proconio::fastout; use whiteread::parse_line; #[fastout] fn main() { let a: usize = parse_line().unwrap(); let b: usize = parse_line().unwrap(); let c: usize = parse_line().unwrap(); let d: usize = parse_line().unwrap(); println!("{}", a.min(b) + c.min(d)); }
/** * Disposes of the operating system resources associated with the clipboard. * The data will still be available on the system clipboard after the dispose * method is called. * * <p>NOTE: On some platforms the data will not be available once the application * has exited or the display has been disposed.</p> * * @exception SWTException <ul> * <li>ERROR_THREAD_INVALID_ACCESS - if not called from the thread that created the parent</li> * </ul> */ public void dispose () { if (isDisposed()) return; if (display.getThread() != Thread.currentThread()) DND.error(SWT.ERROR_THREAD_INVALID_ACCESS); if (COM.OleIsCurrentClipboard(this.iDataObject.getAddress()) == COM.S_OK) { COM.OleFlushClipboard(); } this.Release(); display = null; }
def _remove_excluded_lines(formatted, exclude): out = list() for line in formatted.splitlines(): for ex in exclude: badlines = BADLINES.get(ex, list()) if any(line.startswith(i) for i in badlines): break else: out.append(line) return "\n".join(out)
<gh_stars>10-100 package github import ( "time" "github.com/giantswarm/microerror" "github.com/giantswarm/micrologger" ratelimit "github.com/giantswarm/draughtsman/service/eventer/github/internal/ratelimit" "github.com/giantswarm/draughtsman/service/eventer/spec" httpspec "github.com/giantswarm/draughtsman/service/http" ) // GithubEventerType is an Eventer that uses Github Deployment Events as a backend. var GithubEventerType spec.EventerType = "GithubEventer" // Config represents the configuration used to create a GitHub Eventer. type Config struct { // Dependencies. HTTPClient httpspec.Client Logger micrologger.Logger Environment string OAuthToken string Organisation string PollInterval time.Duration ProjectList []string Provider string } // DefaultConfig provides a default configuration to create a new GitHub // Eventer by best effort. func DefaultConfig() Config { return Config{ // Dependencies. HTTPClient: nil, Logger: nil, } } // New creates a new configured GitHub Eventer. func New(config Config) (*GithubEventer, error) { if config.HTTPClient == nil { return nil, microerror.Maskf(invalidConfigError, "http client must not be empty") } if config.Logger == nil { return nil, microerror.Maskf(invalidConfigError, "logger must not be empty") } if config.Environment == "" { return nil, microerror.Maskf(invalidConfigError, "environment must not be empty") } if config.OAuthToken == "" { return nil, microerror.Maskf(invalidConfigError, "oauth token must not be empty") } if config.Organisation == "" { return nil, microerror.Maskf(invalidConfigError, "organisation must not be empty") } if config.PollInterval.Seconds() == 0 { return nil, microerror.Maskf(invalidConfigError, "interval must be greater than zero") } if len(config.ProjectList) == 0 { return nil, microerror.Maskf(invalidConfigError, "project list must not be empty") } if config.Provider == "" { return nil, microerror.Maskf(invalidConfigError, "provider must not be empty") } eventer := &GithubEventer{ // Dependencies. client: config.HTTPClient, logger: config.Logger, rateLimiter: ratelimit.New(), // Settings. environment: config.Environment, oauthToken: config.OAuthToken, organisation: config.Organisation, pollInterval: config.PollInterval, projectList: config.ProjectList, provider: config.Provider, } return eventer, nil } // GithubEventer is an implementation of the Eventer interface, // that uses GitHub Deployment Events as a backend. type GithubEventer struct { // Dependencies. client httpspec.Client logger micrologger.Logger rateLimiter *ratelimit.RateLimiter // Settings. environment string oauthToken string organisation string pollInterval time.Duration projectList []string provider string } func (e *GithubEventer) NewDeploymentEvents() (<-chan spec.DeploymentEvent, error) { e.logger.Log("debug", "starting polling for github deployment events", "interval", e.pollInterval) deploymentEventChannel := make(chan spec.DeploymentEvent) ticker := time.NewTicker(e.pollInterval) go func() { etagMap := make(map[string]string) for c := ticker.C; ; <-c { e.logger.Log("debug", "Fetching deployment events", "projectlist", e.projectList) for _, project := range e.projectList { deployments, err := e.fetchNewDeploymentEvents(project, etagMap) if err != nil { e.logger.Log("error", "could not fetch deployment events", "message", err.Error()) } for _, deployment := range deployments { deploymentEventChannel <- deployment.DeploymentEvent(project) } } } }() return deploymentEventChannel, nil } func (e *GithubEventer) SetPending(event spec.DeploymentEvent) error { return e.postDeploymentStatus(event.Name, event.ID, pendingState) } func (e *GithubEventer) SetSuccess(event spec.DeploymentEvent) error { return e.postDeploymentStatus(event.Name, event.ID, successState) } func (e *GithubEventer) SetFailed(event spec.DeploymentEvent) error { return e.postDeploymentStatus(event.Name, event.ID, failureState) }
/** *\brief Prints the DAQ mapping loaded by TotemDAQMappingESSourceXML. **/ class PrintTotemDAQMapping : public edm::one::EDAnalyzer<> { public: PrintTotemDAQMapping(const edm::ParameterSet &ps); ~PrintTotemDAQMapping() override {} private: std::string subSystemName; void analyze(const edm::Event &e, const edm::EventSetup &es) override; }
/** * saves the current state image to a JPG */ public void ToJPG(String path) { if (active) { SetupScaledBuff(); File out = new File(path); try { ImageIO.write(scaledBuff, "jpg", out); } catch (IOException ex) { ex.printStackTrace(); } } }
def alloc_max_array(): collection = [] while True: try: collection.append(MEGA_STR) except MemoryError as error: log_exception(error) break except Exception as exception: log_exception(exception, False) print('Maximum array size:', len(collection) * 10) print_memory_usage()
topic "WithFactory"; [ $$0,0#00000000000000000000000000000000:Default] [i448;a25;kKO9; $$1,0#37138531426314131252341829483380:structitem] [l288;2 $$2,0#27521748481378242620020725143825:desc] [0 $$3,0#96390100711032703541132217272105:end] [i448;b42;O9;2 $$4,4#61672508125594000341940100500538:tparam] [b42;2 $$5,5#13035079074754324216151401829390:normal] [H6;0 $$6,0#05600065144404261032431302351956:begin] [i448;a25;kKO9;2 $$7,0#37138531426314131252341829483370:codeitem] [{_}%EN-US [ {{10000@(113.42.0) [s0; [*@7;4 WithFactory]]}}&] [s0;*@3;4 &] [s1;:noref:%- [@(0.0.255) template]_<[@(0.0.255) class]_[*@4 T]>&] [s1;:WithFactory`:`:class:%- [@(0.0.255) class]_[* WithFactory]&] [s4; [*C@4 T]-|Type of base class of polymorphic hierarchy.&] [s5; This template class adds some RTTI (Run Time Type Identification) to your classes, and implements a so called `'class factory`', bringing the ability to register class hierarchies, construct them by name, check their type at runtime, list all classes in hierarchy and so on. &] [s5; It`'s a base behaviour for most polymorphic class usage.&] [s2; &] [s3;%- &] [s0;2 &] [ {{10000F(128)G(128)@1 [s0; [* Basic usage]]}}&] [s3;%- &] [s5; To implement the factory behaviour in your base class, just derive from WithFactory :&] [s0; &] [s2; [* class MyBaseClass : public WithFactory<MyBaseClass>]&] [s2; [* `{]&] [s2; [* -|-|.........]&] [s2; [* `};]&] [s5; Then you can derive your class hierarchy from base, as usual :&] [s0; &] [s2; [* class MyDerivedClass : public MyBaseClass]&] [s2; [* `{]&] [s2; [* -|-|.........]&] [s2; [* `};]&] [s5; When you`'ve your class definition, you must register them; that`'s done inserting in a .cpp file (or in separate ones if you like, but NOT inside include files, the following statement :&] [s5; &] [s2; [* REGISTERCLASS(MyBaseClass `[, `"a class description`" `[, anIndex `[, `"an Iml image name`"`]`]`])]&] [s2; [* REGISTERCLASS(MyDerivedClass `[, `"a class description`" `[, anIndex `[, `"an Iml image name`"`]`]`])]&] [s5; &] [s5; where you can insert an optional class description string, an index and and icon in Iml format; their usage will be clarified later on.&] [s5; Class creation can be done by following ways :&] [s5; &] [s2; As a pointer by classic new operator:&] [s2; -|-|[* MyBaseClass `*ptr `= new MyBaseClass;]&] [s2; [* -|-|MyBaseClass `*ptr `= new MyDerivedClass;]&] [s0; &] [s2; As a pointer, by ascii class name :&] [s2; -|-|[* MyBaseClass `*ptr `= MyBaseClass`::CreatePtr(`"MyBaseClass`");]&] [s2; [* -|-|MyBaseClass `*ptr `= MyBaseClass`::CreatePtr(`"MyDerivedClass`");]&] [s0; &] [s2; As One<MyBaseClass> :&] [s2; -|-|[* One<MyBaseClass> ptr `= MyBaseClass`::CreateInstance(`"MyBaseClass`");]&] [s2; [* -|-|One<MyBaseClass> ptr `= MyBaseClass`::CreateInstance(`"MyDerivedClass`");]&] [s0; &] [s5; You can inquire the type of an object at runtime with the [* IsA()] member function `::&] [s5; &] [s2; [* MyBaseClass `*ptr `= new MyDerivedClass;]&] [s2; [* String classType `= ptr`->IsA(); ]returns the string `"MyDerivedClass`"&] [s0; &] [s5; You can also list at runtime your class hierarchy by mean of static member [* Classes()]:&] [s5; &] [s2; [* Vector<String> const `&classList `= MyBaseClass`::Classes();]&] [s0; &] [s5; or get descriptions by class name with [* GetClassDescription()] static member :&] [s5; &] [s2; [* String classDesc `= MyBaseClass`::GetClassDescription(`"MyBaseClass`");]&] [s2; [* String classDesc `= MyBaseClass`::GetClassDescription(`"MyDerivedClass`");]&] [s0; &] [s5; or get their integer `'index`' with [* GetClassIndex()] static member :&] [s5; &] [s2; [* int classIndex `= MyBaseClass`::GetClassIndex(`"MyBaseClass`");]&] [s2; [* int classIndex `= MyBaseClass`::GetClassIndex(`"MyDerivedClass`");]&] [s0;2 &] [ {{10000F(128)G(128)@1 [s0; [* Class registering]]}}&] [s3;%- &] [s7;:REGISTERCLASS`(type`, `.`.`.`):%- [* REGISTERCLASS]([*@3 type])&] [s2; Registers a class by its [%-*@3 type].&] [s3; &] [s6; &] [s7;:REGISTERCLASS`(type`, `.`.`.`):%- [* REGISTERCLASS]([*@3 type], [*@3 description])&] [s2; Registers a class by its [%-*@3 type] giving an optional [%-*@3 description ]which can be queried later on. Main purpose of [%-*@3 description] is the presentation of class lists on menus and or dialogs when creating classes at runtime.&] [s3; &] [s7;:REGISTERCLASS`(type`, `.`.`.`):%- [* REGISTERCLASS]([*@3 type], [*@3 description, index])&] [s2; Registers a class by its [%-*@3 type] giving an optional [%-*@3 description ]and an integer [%-*@3 index] which both can be queried later on. Main purpose of [%-*@3 index] is to give the ability to sort at runtime the class list by importance. [%-*@3 Index] can be any integer number &] [s6; &] [s7;:REGISTERCLASS`(type`, `.`.`.`):%- [* REGISTERCLASS]([*@3 type], [*@3 description, index, icon])&] [s2; Registers a class by its [%-*@3 type] giving an optional [%-*@3 description], an integer [%-*@3 index] and an [%-*@3 icon ]which can be queried later on. Main purpose of [%-*@3 index] is to give the ability to sort at runtime the class list by importance or to have sort of class grouping. [%-*@3 Index] can be any integer number [%-*@3 icon ]should be a String containing a full Iml icon name, as `"MyIml`::MyImage`" &] [s3; &] [ {{10000F(128)G(128)@1 [s0; [* Detailed member list]]}}&] [s3;%- &] [s6;%- &] [s7;:WithFactory`:`:Create`(const String`&`):%- [@(0.0.255) static] [_^One^ One]<[*@4 T]>_[* CreateInstance]([@(0.0.255) const]_[_^String^ String]_`&[*@3 classNa me])&] [s2; Creates a class derived from hierarchy base T by its [%-*@3 className].&] [s2; Returns a smart pointer to the base class T&] [s3; &] [s6;%- &] [s7;:WithFactory`:`:CreatePtr`(String const`&`):%- [@(0.0.255) static] [*@4 T]_`*[* CreatePtr]([_^String^ String]_[@(0.0.255) const]_`&[*@3 className])&] [s2; Creates a class derived from hierarchy base T by its [%-*@3 className].&] [s2; Returns a traditional pointer to the base class T&] [s3; &] [s6;%- &] [s7;:WithFactory`:`:Classes`(void`):%- [@(0.0.255) static] [_^Vector^ Vector]<[_^String^ St ring]>_[@(0.0.255) const]_`&[* Classes]([@(0.0.255) void])&] [s2; Returns Vector of strings containing all registered class names in hierarchy.&] [s3;%- &] [s6;%- &] [s7;:WithFactory`:`:GetClassDescription`(const String`&`):%- [@(0.0.255) static] [_^String^ String]_[@(0.0.255) const]_`&[* GetClassDescription]([@(0.0.255) const]_[_^String^ S tring]_`&[*@3 className])&] [s2; Returns an ascii description of a class identified by [%-*@3 className].&] [s2; If no description was given when registering the class, returns an empty string.&] [s3; &] [s6;%- &] [s7;:WithFactory`:`:GetClassIndex`(const String`&`):%- [@(0.0.255) static] [@(0.0.255) int]_[@(0.0.255) const]_`&[* GetClassIndex]([@(0.0.255) const]_[_^String^ Strin g]_`&[*@3 className])&] [s2; Return an integer index assigned to class type identified by [%-*@3 className].&] [s2; If no index was given when registering the class, returns 0.&] [s3; &] [s6;%- &] [s7;:WithFactory`:`:GetClassIndex`(const String`&`):%- [@(0.0.255) static] Image_[* GetClassImage]([@(0.0.255) const]_[_^String^ String]_`&[*@3 className])&] [s2; Return the Image object assigned to class type identified by [%-*@3 className].&] [s2; If no image was given when registering the class, returns Null.&] [s3; &] [s6;%- &] [s7;:WithFactory`:`:IsA`(void`):%- [_^String^ String]_[@(0.0.255) const]_`&[* IsA]([@(0.0.255) v oid])&] [s2; Returns a string containing the class name.&] [s3;%- &] [s6;%- &] [s0; ]]
from rest_framework.serializers import ( ModelSerializer, ) from apps.configs.models import ( Config, AbstractProduct, Chart, Type, Source, Unit, ) class ChartSerializer(ModelSerializer): class Meta: model = Chart fields = '__all__' class TypeSerializer(ModelSerializer): class Meta: model = Type fields = '__all__' class SourceSerializer(ModelSerializer): class Meta: model = Source fields = '__all__' class ConfigSerializer(ModelSerializer): class Meta: model = Config fields = '__all__' class AbstractProductSerializer(ModelSerializer): class Meta: model = AbstractProduct fields = '__all__' class UnitSerializer(ModelSerializer): class Meta: model = Unit fields = '__all__'
/** * @param ignite Ignite instance. * @param partId Partition id. * @return File page store for given partition id. * @throws IgniteCheckedException If failed. */ private FilePageStore filePageStore(IgniteEx ignite, int partId) throws IgniteCheckedException { final PdsFolderSettings folderSettings = ignite.context().pdsFolderResolver().resolveFolders(); File storeWorkDir = new File(folderSettings.persistentStoreRootPath(), folderSettings.folderName()); File cacheWorkDir = new File(storeWorkDir, CACHE_DIR_PREFIX + DEFAULT_CACHE_NAME); File partFile = new File(cacheWorkDir, format(PART_FILE_TEMPLATE, partId)); return (FilePageStore)storeFactory.createPageStore(FLAG_DATA, partFile, a -> {}); }
/** * Format and render the given time to the given surface. * @param target The surface to render to. * @param rect The area of the surface in which to render. * @param time_ms The time to render in milliseconds. */ void render_time(SDL_Surface *target, SDL_Rect rect, unsigned int time_ms) { int m = time_ms/60000; int s = (time_ms/1000) % 60; int ms = time_ms % 1000; if (m > 99) { m = 99; s = 99; ms = 999; } const unsigned char gap = 1; char render_nums[9]; render_nums[0] = m / 10; render_nums[1] = m % 10; render_nums[2] = 10; render_nums[3] = s / 10; render_nums[4] = s % 10; render_nums[5] = 11; render_nums[6] = ms / 100; render_nums[7] = (ms % 100) / 10; render_nums[8] = ms % 10; int cut = 0; if (!render_nums[0]) { cut = 1; if (!render_nums[1]) { cut = 3; if (!render_nums[3]) { cut = 4; } } } for (int i = cut; i < 9; i++) { unsigned char num = render_nums[i]; SDL_BlitSurface(HUD.surface, &HUD.rects[num], target, &rect); rect.x += HUD.rects[num].w + gap; rect.w -= HUD.rects[num].w + gap; } }
def should_resume(self, data_container: DataContainer, context: ExecutionContext) -> bool: if not self.summary_checkpointer.checkpoint_exists(context.get_path(), data_container): return False current_ids = self.summary_checkpointer.read_summary( checkpoint_path=context.get_path(), data_container=data_container ) for current_id in current_ids: if not self.data_input_checkpointer.checkpoint_exists( checkpoint_path=self._get_data_input_checkpoint_path(context), current_id=current_id ): return False if not self.expected_output_checkpointer.checkpoint_exists( checkpoint_path=self._get_expected_output_checkpoint_path(context), current_id=current_id ): return False return True
<reponame>CSM-TeachingProject/Exerc-cios package view; import control.CompraJpaController; import control.PessoaJpaController; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import model.Compra; import model.Pessoa; public class TesteJPA { public static void main(String[] args) { Pessoa p = new Pessoa(); Date d = new Date(); p.setNome("Isabela"); p.setLogin("isabela tal"); p.setEmail("<EMAIL>"); p.setSenha("<PASSWORD>"); try { PessoaJpaController j = new PessoaJpaController(EmProvider.getInstance().getEntityManagerFactory()); j.create(p); System.out.println(j.findPessoaEntities()); } catch (Exception ex) { Logger.getLogger(TesteJPA.class.getName()).log(Level.SEVERE, null, ex); } List<Compra> compras = new ArrayList<>(); Compra c1 = new Compra(); c1.setData(d); c1.setFormaDePagamento("Cartão"); c1.setPessoaidPessoa(p); try { CompraJpaController c = new CompraJpaController(EmProvider.getInstance().getEntityManagerFactory()); c.create(c1); compras.add(c1); p.setCompraList(compras); } catch (Exception ex) { Logger.getLogger(TesteJPA.class.getName()).log(Level.SEVERE, null, ex); } } }
When a human being's body temperature builds up, either because he is in a hot or because he has been exercising or working a great deal, he begins to perspire. When people sweat, it is fairly obvious. Everybody perspires, although some do more so than others. For some human beings their sweat is only visible under their arms and on their brows, while other people seem to sweat almost everywhere. Sweating is one of the ways that our body regulates its temperature. In humans, our sweat glands are distributed over most of our body's surface. When our internal temperature rises to an unhealthy level, the sweat provides a slick of moisture over the skin, which then begins to evaporate. As a fluid evaporates it cools, and in that way the sweat helps to lower our body temperature by effectively wrapping us in a thin cool layer. A dog's skin is quite different, which is why you have never seen a dog with sweaty underarms. Most of the dog's sweat glands are located around its foot pads. That is why, when a dog is overheated, you will sometimes see a trail of wet footprints that he has left behind as he walked across the floor. Rather than relying upon sweat, the principal mechanism that a dog uses to cool himself involves panting with his mouth open. This allows the moisture on his tongue to evaporate, and the heavy breathing also allows the moist lining of their lungs to serve as a surface from which moisture can evaporate. In this way the dog can manage a significant cooling of his body temperature. Another mechanism that dogs use to try to cool off in involves dilating or expanding blood vessels in their face and ears. If it is not too hot outside, this helps to cool the dog's blood by causing it to flow closer to the surface of the skin. This mechanism works best if the overheating is due to exercise, rather than a high outside temperature. You might guess that another reason why dogs might not deal well with heat is because they are covered in fur, which could make their bodies quite hot in the summer. This is only partially the case since fur is actually an insulator that serves as a barrier between the outside environment and the dog's interior. It acts much like the vacuum barrier in a thermos. Thus in the winter the fur preserves the body heat and serves as a barrier to keep the cold out. In the summer it is a barrier to the outside heat. Unfortunately, in a continuously hot environment, once there is a temperature build up in the body, the fur then serves as an impediment to cooling since the heat then has a hard time dissipating through it. On a hot day, especially if the dog is very active, he can overheat, a condition known as hyperthermia. This can eventually lead to heat stroke. A dog that is overheated will seem sluggish and perhaps confused. If you look at his gums and tongue they may appear bright red, and he will probably be panting very hard. If left unattended to, the dog may collapse, have a seizure, or even go into a coma. A simple trick that many dog owners use to help keep their cool on a hot day, involves using a spray bottle or mister, such as those used on plants. Simply fill it with water and periodically spray your dog's body with it. In effect, you have created a condition where there is a slick of moisture covering your dog, and it will evaporate and have the same cooling effect as if your dog had sweat glands all over his body. Stanley Coren is the author of many books including: Born to Bark, The Modern Dog, Why Do Dogs Have Wet Noses? The Pawprints of History, How Dogs Think, How To Speak Dog, Why We Love the Dogs We Do, What Do Dogs Know? The Intelligence of Dogs, Why Does My Dog Act That Way? Understanding Dogs for Dummies, Sleep Thieves, The Left-hander Syndrome Copyright SC Psychological Enterprises Ltd. May not be reprinted or reposted without permission.
# Implementing Tower of Hanoi c=0 def move_tower(height, from_pole, to_pole, with_pole): if height >= 1: move_tower(height-1, from_pole, with_pole, to_pole) move_disk(from_pole, to_pole) move_tower(height-1, with_pole, to_pole, from_pole) def move_disk(fp,tp): global c c+=1 print('Step',c,'- Moving disk from',fp,'to',tp) move_tower(3,'A','B','C')
<reponame>giniedp/glib<filename>apps/web/src/examples/graphics/basics/hello-triangle-colored/index.ts<gh_stars>1-10 import { DeviceGL, createDevice } from '@gglib/graphics' import { loop } from '@gglib/utils' // Create the graphics device and pass the existing canvas element from the DOM. const device = createDevice({ canvas: document.getElementById('canvas') as HTMLCanvasElement, }) // Create a shader program with vertex and fragment shaders. // Here the shader source code is grabbed from the script tags. const program = device.createProgram({ vertexShader: document.getElementById('vertex-shader').textContent, fragmentShader: document.getElementById('fragment-shader').textContent, }) // Create the vertex buffer as seen in the previous example. const vertices = device.createVertexBuffer({ // The `layout` describes that each vertex begins with a `vPosition` attribute // which is a `vec3` with 3 elements. layout: { vPosition: { type: 'float', offset: 0, elements: 3, }, // It is then followed byt a `vColor` attribute which is also a `vec3` with 3 elements // but has an offset of 12 bytes from the beginning of the vertex. vColor: { type: 'float', offset: 12, elements: 3, }, }, // The `data` is a sequence of floats that matches the `layout` specification. // Each 6 floats define a vertex where the first 3 floats are a `vPosition` // and the next 3 floats are the `vColor` data: [ /* position */ -0.5, -0.5, 0.0, /* color */ 1, 0, 0, /* position */ 0.5, -0.5, 0.0, /* color */ 0, 1, 0, /* position */ 0.0, 0.5, 0.0, /* color */ 0, 0, 1, ], }) // Start a loop function. loop(() => { // If the size of the canvas is controlled by css (as it is on this page) // this call will resize the drawing buffer to match the new size of the canvas. device.resize() // Clear the screen. device.clear(0xff2e2620) // Now render the vertex buffer with the program. // The call to `drawPrimitives` instructs to // - draw the vertex buffer as a TriangleList // - starting at the beginning of the buffer (`0` offset) // - and draw only 3 vertices device.vertexBuffer = vertices device.program = program device.drawPrimitives('TriangleList', 0, 3) })
#include <bits/stdc++.h> using namespace std; #define rep(i, a) for(int i = 0; i < a; i++) #define reps(i, a, b) for(int i = a; i < b; i++) #define MP(a, b) make_pair(a, b) int costa[100000]; int costb[100000]; int N; int A[100000]; int main() { cin >> N; rep(i, N) cin >> A[i]; int b = A[0]; costa[0] = abs(A[0]); reps(i, 1, N-1) { costa[i] = abs(A[i]-b) + costa[i-1]; b = A[i]; } b = A[N-1]; costb[N-1] = abs(b); reps(j, 2, N) { int i = N-j; costb[i] = abs(A[i]-b) + costb[i+1]; b = A[i]; } int mi = 1e9; rep(i, N) { int s = i == 0 ? 0 : costa[i-1]; int e = i == N-1 ? 0 : costb[i+1]; int S = i == 0 ? 0 : A[i-1]; int E = i == N-1 ? 0 : A[i+1]; cout << s+e+abs(S-E) << endl; } return 0; }
def box_to_rect(box, color, linewidth=3): box = box.asnumpy() return plt.Rectangle( (box[0], box[1]), box[2] - box[0], box[3] - box[1], fill=False, edgecolor=color, linewidth=linewidth)
/* eslint-disable import/no-anonymous-default-export */ import React, { useEffect, useRef } from 'react'; import axios from '../Requests/Axios'; import '../Styles/sRow.css'; import MovieTile from './MovieTile'; import ArrowBackIosIcon from '@mui/icons-material/ArrowBackIos'; import ArrowForwardIosIcon from '@mui/icons-material/ArrowForwardIos'; import Box from '@mui/material/Box'; interface Movie { id: number; title: string; backdrop_path: string; overview: string; release_date: string; runtime: number; adult: boolean; genres: any[]; production_companies: any[]; } const defaultMovie = { id: 0, title: "", backdrop_path: "", overview: "", release_date: "", runtime: 0, adult: false, genres: [""], production_companies: [""], } export default function TenPopular(props: { title: string , fetchUrl: string }) { // eslint-disable-next-line @typescript-eslint/no-unused-vars const { title, fetchUrl } = props; const [movies, setMovies] = React.useState([defaultMovie]); const scrl = useRef<HTMLHeadingElement>(null); const [scrollX, setscrollX] = React.useState(0); let count = 1; useEffect (() => { async function fetchData() { const response = await axios.get(fetchUrl); get10Movies(response.data.results); return response; } fetchData(); }, [fetchUrl]); const get10Movies = (results : Movie[]) => { var movies: Movie[] = []; for (let i = 0; i < results.length; i++) { if (i < 10) { movies.push(results[i]); } } setMovies(movies); } const slide = (shift: number) => { if(scrl.current !== null) { scrl.current.scrollLeft += shift; setscrollX(scrollX + shift); } }; return ( <div className="div-row"> <h2>{title}</h2> <div className="row-poster"> <button className="prev" onClick={() => slide(-800)}> <ArrowBackIosIcon sx={{color: 'white'}}/> </button> <div className="row-poster" ref={scrl}> {movies.map((movie: any) => ( <Box sx={{ display: 'flex', alignItems: 'center'}}> <span className="numbers">{count++}</span> <MovieTile movie={movie} type={"poster"}/> </Box> ))} </div> <button className="next" onClick={() => slide(+800)}> <ArrowForwardIosIcon sx={{color: 'white'}}/> </button> </div> </div> ); }
def count_params(model): assert isinstance(model, nn.Module) count = 0 for child in model.children(): if is_leaf(child): if hasattr(child, "_mask"): count += child._mask.long().sum().item() else: for p in child.parameters(): count += p.nelement() else: count += count_params(child) return count
#include "HOI4World/Map/Hoi4Building.h" #include "gtest/gtest.h" #include <sstream> TEST(HoI4World_Map_Building, ConnectingSeaProvinceDefaultsToZero) { const HoI4::BuildingPosition position; const HoI4::Building building(1, "type", position, std::nullopt); std::stringstream output; output << building; std::stringstream expectedOutput; expectedOutput << "1;type;0.00;0.00;0.00;0.00;0\n"; ASSERT_EQ(expectedOutput.str(), output.str()); } TEST(HoI4World_Map_Building, ConnectingSeaProvinceCanBeSet) { const HoI4::BuildingPosition position; const HoI4::Building building(1, "type", position, 2); std::stringstream output; output << building; std::stringstream expectedOutput; expectedOutput << "1;type;0.00;0.00;0.00;0.00;2\n"; ASSERT_EQ(expectedOutput.str(), output.str()); }
<gh_stars>0 /***************************************************** * * InstagramLoginActivity.java * * * Modified MIT License * * Copyright (c) 2010-2017 Kite Tech Ltd. https://www.kite.ly * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The software MAY ONLY be used with the Kite Tech Ltd platform and MAY NOT be modified * to be used with any competitor platforms. This means the software MAY NOT be modified * to place orders with any competitors to Kite Tech Ltd, all orders MUST go through the * Kite Tech Ltd platform servers. * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * *****************************************************/ ///// Package Declaration ///// package ly.kite.instagramphotopicker; ///// Import(s) ///// import android.app.Activity; import android.app.AlertDialog; import android.app.Fragment; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.MenuItem; import android.webkit.CookieManager; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; ///// Class Declaration ///// /***************************************************** * * This activity displays the Instagram login screen. * *****************************************************/ public class InstagramLoginActivity extends Activity { ////////// Static Constant(s) ////////// static private final String LOG_TAG = "InstagramLoginActivity"; static private final boolean DEBUGGING_ENABLED = false; static private final String EXTRA_PREFIX = "ly.kite.instagramimagepicker"; static private final String EXTRA_CLIENT_ID = EXTRA_PREFIX + ".EXTRA_CLIENT_ID"; static private final String EXTRA_REDIRECT_URI = EXTRA_PREFIX + ".EXTRA_REDIRECT_URI"; static private final String EXTRA_ACCESS_TOKEN = EXTRA_PREFIX + ".EXTRA_ACCESS_TOKEN"; ////////// Member Variable(s) ////////// private WebView mWebView; private String mClientId; private String mRedirectUri; ////////// Static Method(s) ////////// /***************************************************** * * Returns an intent used to start this activity. * *****************************************************/ static private Intent getIntent( Activity activity, String clientId, String redirectUri ) { Intent intent = new Intent( activity, InstagramLoginActivity.class ); intent.putExtra( EXTRA_CLIENT_ID, clientId ); intent.putExtra( EXTRA_REDIRECT_URI, redirectUri ); return ( intent ); } /***************************************************** * * Starts this activity. * *****************************************************/ public static void startLoginForResult( Activity activity, String clientId, String redirectUri, int requestCode ) { Intent intent = getIntent( activity, clientId, redirectUri ); activity.startActivityForResult( intent, requestCode ); } /***************************************************** * * Starts this activity. * *****************************************************/ public static void startLoginForResult( Fragment fragment, String clientId, String redirectUri, int requestCode ) { Intent intent = getIntent( fragment.getActivity(), clientId, redirectUri ); fragment.startActivityForResult( intent, requestCode ); } /***************************************************** * * Returns the access token from result data. * *****************************************************/ static public String getAccessToken( Intent data ) { return ( data.getStringExtra( EXTRA_ACCESS_TOKEN ) ); } /***************************************************** * * Ensures that we are logged out, by clearing any * cookies. * *****************************************************/ static public void logOut( Context context ) { CookieManager.getInstance().removeAllCookie(); } @Override protected void onCreate( Bundle savedInstanceState ) { super.onCreate( savedInstanceState ); mClientId = getIntent().getStringExtra( EXTRA_CLIENT_ID ); mRedirectUri = getIntent().getStringExtra( EXTRA_REDIRECT_URI ); setContentView( R.layout.screen_instagram_login ); mWebView = (WebView)findViewById( R.id.webview ); WebSettings webSettings = mWebView.getSettings(); webSettings.setJavaScriptEnabled( true ); mWebView.setWebViewClient( new InstagramWebViewClient() ); loadLoginPage(); } @Override public boolean onOptionsItemSelected( MenuItem item ) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if ( id == android.R.id.home ) { setResult( RESULT_CANCELED ); finish(); return true; } return super.onOptionsItemSelected( item ); } private void loadLoginPage() { String instagramAuthURL = "https://api.instagram.com/oauth/authorize/?client_id=" + this.mClientId + "&redirect_uri=" + this.mRedirectUri + "&response_type=token"; mWebView.loadUrl( instagramAuthURL ); } @Override protected void onSaveInstanceState( Bundle outState ) { super.onSaveInstanceState( outState ); outState.putString( EXTRA_CLIENT_ID, mClientId ); outState.putString( EXTRA_REDIRECT_URI, mRedirectUri ); mWebView.saveState( outState ); } @Override protected void onRestoreInstanceState( Bundle savedInstanceState ) { super.onRestoreInstanceState( savedInstanceState ); mClientId = savedInstanceState.getString( EXTRA_CLIENT_ID ); mRedirectUri = savedInstanceState.getString( EXTRA_REDIRECT_URI ); mWebView.restoreState( savedInstanceState ); } @Override public void onBackPressed() { setResult( RESULT_CANCELED ); finish(); } private void gotAccessToken( final String instagramAccessToken ) { Intent resultData = new Intent(); resultData.putExtra( EXTRA_ACCESS_TOKEN, instagramAccessToken ); setResult( RESULT_OK, resultData ); finish(); } @Override protected void onActivityResult( int requestCode, int resultCode, Intent data ) { setResult( resultCode, data ); finish(); } private final String getLoginErrorMessage( Uri uri ) { // Default message String errorMessage = getString( R.string.kitesdk_instagram_error_title); String errorReason = uri.getQueryParameter( "error_reason" ); if ( errorReason != null ) { if ( ! errorReason.equalsIgnoreCase( "user_denied" ) ) { String errorDescription = uri.getQueryParameter( "error_description" ); if ( errorDescription != null ) { try { errorMessage = URLDecoder.decode( errorMessage, "UTF-8" ); } catch ( UnsupportedEncodingException ignore ) { // Ignore } } } } return ( errorMessage ); } private void showErrorDialog( String message ) { AlertDialog.Builder builder = new AlertDialog.Builder( this ); builder.setTitle( R.string.kitesdk_instagram_alert_dialog_title); builder.setMessage( message ); builder.setPositiveButton( R.string.kitesdk_button_text_retry, null ); builder.setNegativeButton( R.string.kitesdk_button_text_cancel, new CancelButtonClickListener() ); builder.show(); } ////////// Inner Class(es) ////////// private class InstagramWebViewClient extends WebViewClient { public boolean shouldOverrideUrlLoading( WebView view, String url ) { if ( DEBUGGING_ENABLED ) Log.d( LOG_TAG, "shouldOverrideUrlLoading( view, url = " + url.toString() + " )" ); if ( url != null && url.startsWith( mRedirectUri ) ) { Uri uri = Uri.parse( url ); String error = uri.getQueryParameter( "error" ); if ( error != null ) { String errorMessage = getLoginErrorMessage( uri ); mWebView.stopLoading(); loadLoginPage(); showErrorDialog( errorMessage ); } else { String fragment = uri.getFragment(); String accessToken = fragment.substring( "access_token=".length() ); gotAccessToken( accessToken ); } return true; } return false; } public void onPageStarted( WebView view, String url, Bitmap favicon ) { if ( DEBUGGING_ENABLED ) Log.d( LOG_TAG, "onPageStarted( view, url = " + url.toString() + ", favicon )" ); } public void onPageFinished( WebView view, String url ) { if ( DEBUGGING_ENABLED ) Log.d( LOG_TAG, "onPageFinished( view, url = " + url.toString() + " )" ); } public void onLoadResource( WebView view, String url ) { if ( DEBUGGING_ENABLED ) Log.d( LOG_TAG, "onLoadResources( view, url = " + url.toString() + " )" ); } }; private class CancelButtonClickListener implements AlertDialog.OnClickListener { @Override public void onClick( DialogInterface dialog, int which ) { setResult( RESULT_CANCELED ); finish(); } } }
def _sanitize(header_tuple): header, value = header_tuple if (header.lower().strip() == "Authorization".lower().strip() and "Bearer".lower().strip() in value.lower().strip()): return header, "Bearer <redacted>" else: return header_tuple
import math import itertools import operator primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97] def get_prime_mult(n): m = [0] * 25 if n < 2: return m sq = int(math.sqrt(n)) p = 0 while primes[p] <= sq: if n % primes[p] != 0: p += 1 else: m[p] += 1 n //= primes[p] sq = int(math.sqrt(n)) m[primes.index(n)] += 1 return m a, b, c = map(int, input().split()) mtab = [] for i in range(max((a, b, c)) + 1): mtab.append(get_prime_mult(i)) total = 0 mults_cnt = {} for i in range(1, a + 1): for j in range(1, b + 1): for k in range(1, c + 1): if i * j * k not in mults_cnt: mults = [sum(triple) + 1 for triple in zip(mtab[i], mtab[j], mtab[k])] mults_cnt[i * j * k] = list(itertools.accumulate(mults, operator.mul))[-1] total += mults_cnt[i * j * k] print(total & 0x3FFFFFFF)
/** Initialise the view, letting it know the data it will have to display. */ @CallSuper public void bindDataSource(TileGroup tileGroup, TileRenderer tileRenderer) { mTileGroup = tileGroup; mTileRenderer = tileRenderer; }
import express from 'express'; import 'reflect-metadata'; import './database'; import routes from './routes'; import 'reflect-metadata'; // import bodyParser from 'body-parser'; import cors from 'cors'; import awsConfig from './config/aws'; const app = express(); var bodyParser = require('body-parser'); app.use(cors()); app.use(bodyParser.json({ limit: '100mb', extended: true })); app.use(bodyParser.urlencoded({ limit: '100mb', extended: true })); app.use(express.json()); app.use(routes); app.listen(3334, () => { console.log('Server started on port 3334'); });
<gh_stars>0 package test; import main.com.mmj.dao.IUser; import main.com.mmj.entity.User; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.session.SqlSessionFactoryBuilder; import java.io.InputStream; import java.util.List; /** * Created by MaMingJiang on 2016/6/12. * 使用合理描述参数和SQL语句返回值的接口(比如:IUser.class), * 这样现在就可以至此那个更简单,更安全的代码,没有容易发生的字符串文字和转换的错误 * * 这样可以省略userMapper.xml */ public class App1 { private static SqlSessionFactory sqlSessionFactory; private static String resource = "conf.xml"; static{ InputStream inputStream = App1.class.getClassLoader().getResourceAsStream(resource); sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream); } public static SqlSessionFactory getSession() { return sqlSessionFactory; } public static void main(String[] args) { // sqlSessionFactory.getConfiguration().addMapper(IUser.class); SqlSession sqlSession = sqlSessionFactory.openSession(); IUser iuser = sqlSession.getMapper(IUser.class); User user = iuser.getUserById(1); if(user !=null){ System.out.println(user); } } }
<filename>aiida/restapi/resources.py # -*- coding: utf-8 -*- ########################################################################### # Copyright (c), The AiiDA team. All rights reserved. # # This file is part of the AiiDA code. # # # # The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### """ Resources for REST API """ from __future__ import division from __future__ import print_function from __future__ import absolute_import from six.moves.urllib.parse import unquote # pylint: disable=import-error from flask import request, make_response from flask_restful import Resource from aiida.restapi.common.utils import Utils class ServerInfo(Resource): # pylint: disable=fixme """Endpointd to return general server info""" def __init__(self, **kwargs): # Configure utils utils_conf_keys = ('PREFIX', 'PERPAGE_DEFAULT', 'LIMIT_DEFAULT') self.utils_confs = {k: kwargs[k] for k in utils_conf_keys if k in kwargs} self.utils = Utils(**self.utils_confs) def get(self): """ It returns the general info about the REST API :return: returns current AiiDA version defined in aiida/__init__.py """ ## Decode url parts path = unquote(request.path) url = unquote(request.url) url_root = unquote(request.url_root) pathlist = self.utils.split_path(self.utils.strip_api_prefix(path)) if len(pathlist) > 1: resource_type = pathlist.pop(1) else: resource_type = "info" response = {} import aiida.restapi.common.config as conf from aiida import __version__ if resource_type == "info": response = [] # Add Rest API version response.append("REST API version: " + conf.PREFIX.split("/")[-1]) # Add Rest API prefix response.append("REST API Prefix: " + conf.PREFIX) # Add AiiDA version response.append("AiiDA==" + __version__) elif resource_type == "endpoints": from aiida.restapi.common.utils import list_routes response["available_endpoints"] = list_routes() headers = self.utils.build_headers(url=request.url, total_count=1) ## Build response and return it data = dict( method=request.method, url=url, url_root=url_root, path=path, query_string=request.query_string.decode('utf-8'), resource_type="Info", data=response) return self.utils.build_response(status=200, headers=headers, data=data) class BaseResource(Resource): # pylint: disable=fixme """ Each derived class will instantiate a different type of translator. This is the only difference in the classes. """ ## TODO add the caching support. I cache total count, results, and possibly def __init__(self, **kwargs): self.trans = None # Flag to tell the path parser whether to expect a pk or a uuid pattern self.parse_pk_uuid = None # Configure utils utils_conf_keys = ('PREFIX', 'PERPAGE_DEFAULT', 'LIMIT_DEFAULT') self.utils_confs = {k: kwargs[k] for k in utils_conf_keys if k in kwargs} self.utils = Utils(**self.utils_confs) self.method_decorators = {'get': kwargs.get('get_decorators', [])} def get(self, id=None, page=None): # pylint: disable=redefined-builtin,invalid-name,unused-argument # pylint: disable=too-many-locals """ Get method for the resource :param id: node identifier :param page: page no, used for pagination :return: http response """ ## Decode url parts path = unquote(request.path) query_string = unquote(request.query_string.decode('utf-8')) url = unquote(request.url) url_root = unquote(request.url_root) ## Parse request (resource_type, page, node_id, query_type) = self.utils.parse_path(path, parse_pk_uuid=self.parse_pk_uuid) # pylint: disable=unused-variable (limit, offset, perpage, orderby, filters, _alist, _nalist, _elist, _nelist, _downloadformat, _visformat, _filename, _rtype, tree_in_limit, tree_out_limit) = self.utils.parse_query_string(query_string) ## Validate request self.utils.validate_request( limit=limit, offset=offset, perpage=perpage, page=page, query_type=query_type, is_querystring_defined=(bool(query_string))) ## Treat the schema case which does not imply access to the DataBase if query_type == 'schema': ## Retrieve the schema results = self.trans.get_schema() ## Build response and return it headers = self.utils.build_headers(url=request.url, total_count=1) else: ## Set the query, and initialize qb object self.trans.set_query(filters=filters, orders=orderby, node_id=node_id) ## Count results total_count = self.trans.get_total_count() ## Pagination (if required) if page is not None: (limit, offset, rel_pages) = self.utils.paginate(page, perpage, total_count) self.trans.set_limit_offset(limit=limit, offset=offset) headers = self.utils.build_headers(rel_pages=rel_pages, url=request.url, total_count=total_count) else: self.trans.set_limit_offset(limit=limit, offset=offset) headers = self.utils.build_headers(url=request.url, total_count=total_count) ## Retrieve results results = self.trans.get_results() ## Build response and return it data = dict( method=request.method, url=url, url_root=url_root, path=request.path, id=node_id, query_string=request.query_string.decode('utf-8'), resource_type=resource_type, data=results) return self.utils.build_response(status=200, headers=headers, data=data) class Node(Resource): """ Differs from BaseResource in trans.set_query() mostly because it takes query_type as an input and the presence of additional result types like "tree" """ def __init__(self, **kwargs): # Set translator from aiida.restapi.translator.node import NodeTranslator self.trans = NodeTranslator(**kwargs) from aiida.orm import Node as tNode self.tclass = tNode # Parse a uuid pattern in the URL path (not a pk) self.parse_pk_uuid = 'uuid' # Configure utils utils_conf_keys = ('PREFIX', 'PERPAGE_DEFAULT', 'LIMIT_DEFAULT') self.utils_confs = {k: kwargs[k] for k in utils_conf_keys if k in kwargs} self.utils = Utils(**self.utils_confs) self.method_decorators = {'get': kwargs.get('get_decorators', [])} def get(self, id=None, page=None): # pylint: disable=redefined-builtin,invalid-name,unused-argument # pylint: disable=too-many-locals,too-many-statements,too-many-branches,fixme """ Get method for the Node resource. :param id: node identifier :param page: page no, used for pagination :return: http response """ ## Decode url parts path = unquote(request.path) query_string = unquote(request.query_string.decode('utf-8')) url = unquote(request.url) url_root = unquote(request.url_root) ## Parse request (resource_type, page, node_id, query_type) = self.utils.parse_path(path, parse_pk_uuid=self.parse_pk_uuid) (limit, offset, perpage, orderby, filters, alist, nalist, elist, nelist, downloadformat, visformat, filename, rtype, tree_in_limit, tree_out_limit) = self.utils.parse_query_string(query_string) ## Validate request self.utils.validate_request( limit=limit, offset=offset, perpage=perpage, page=page, query_type=query_type, is_querystring_defined=(bool(query_string))) ## Treat the schema case which does not imply access to the DataBase if query_type == 'schema': ## Retrieve the schema results = self.trans.get_schema() ## Build response and return it headers = self.utils.build_headers(url=request.url, total_count=1) ## Treat the statistics elif query_type == "statistics": (limit, offset, perpage, orderby, filters, alist, nalist, elist, nelist, downloadformat, visformat, filename, rtype, tree_in_limit, tree_out_limit) = self.utils.parse_query_string(query_string) headers = self.utils.build_headers(url=request.url, total_count=0) if filters: usr = filters["user"]["=="] else: usr = None results = self.trans.get_statistics(usr) # TODO improve the performance of tree endpoint by getting the data from database faster # TODO add pagination for this endpoint (add default max limit) elif query_type == "tree": headers = self.utils.build_headers(url=request.url, total_count=0) results = self.trans.get_io_tree(node_id, tree_in_limit, tree_out_limit) else: ## Initialize the translator self.trans.set_query( filters=filters, orders=orderby, query_type=query_type, node_id=node_id, alist=alist, nalist=nalist, elist=elist, nelist=nelist, downloadformat=downloadformat, visformat=visformat, filename=filename, rtype=rtype) ## Count results total_count = self.trans.get_total_count() ## Pagination (if required) if page is not None: (limit, offset, rel_pages) = self.utils.paginate(page, perpage, total_count) self.trans.set_limit_offset(limit=limit, offset=offset) ## Retrieve results results = self.trans.get_results() headers = self.utils.build_headers(rel_pages=rel_pages, url=request.url, total_count=total_count) else: self.trans.set_limit_offset(limit=limit, offset=offset) ## Retrieve results results = self.trans.get_results() if query_type == "download" and results: if results["download"]["status"] == 200: data = results["download"]["data"] response = make_response(data) response.headers['content-type'] = 'application/octet-stream' response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format( results["download"]["filename"]) return response results = results["download"]["data"] if query_type in ["retrieved_inputs", "retrieved_outputs"] and results: try: status = results[query_type]["status"] except KeyError: status = "" except TypeError: status = "" if status == 200: data = results[query_type]["data"] response = make_response(data) response.headers['content-type'] = 'application/octet-stream' response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format( results[query_type]["filename"]) return response results = results[query_type] headers = self.utils.build_headers(url=request.url, total_count=total_count) ## Build response data = dict( method=request.method, url=url, url_root=url_root, path=path, id=node_id, query_string=request.query_string.decode('utf-8'), resource_type=resource_type, data=results) return self.utils.build_response(status=200, headers=headers, data=data) class Computer(BaseResource): """ Resource for Computer """ def __init__(self, **kwargs): super(Computer, self).__init__(**kwargs) ## Instantiate the correspondent translator from aiida.restapi.translator.computer import ComputerTranslator self.trans = ComputerTranslator(**kwargs) # Set wheteher to expect a pk (integer) or a uuid pattern (string) in # the URL path self.parse_pk_uuid = "uuid" class Group(BaseResource): """ Resource for Group """ def __init__(self, **kwargs): super(Group, self).__init__(**kwargs) from aiida.restapi.translator.group import GroupTranslator self.trans = GroupTranslator(**kwargs) self.parse_pk_uuid = 'uuid' class User(BaseResource): """ Resource for User """ def __init__(self, **kwargs): super(User, self).__init__(**kwargs) from aiida.restapi.translator.user import UserTranslator self.trans = UserTranslator(**kwargs) self.parse_pk_uuid = 'pk' class Calculation(Node): """ Resource for Calculation """ def __init__(self, **kwargs): super(Calculation, self).__init__(**kwargs) from aiida.restapi.translator.calculation import CalculationTranslator self.trans = CalculationTranslator(**kwargs) from aiida.orm import CalcJobNode as CalculationTclass self.tclass = CalculationTclass self.parse_pk_uuid = 'uuid' class Code(Node): """ Resource for Code """ def __init__(self, **kwargs): super(Code, self).__init__(**kwargs) from aiida.restapi.translator.code import CodeTranslator self.trans = CodeTranslator(**kwargs) from aiida.orm import Code as CodeTclass self.tclass = CodeTclass self.parse_pk_uuid = 'uuid' class Data(Node): """ Resource for Data node """ def __init__(self, **kwargs): super(Data, self).__init__(**kwargs) from aiida.restapi.translator.data import DataTranslator self.trans = DataTranslator(**kwargs) from aiida.orm import Data as DataTclass self.tclass = DataTclass self.parse_pk_uuid = 'uuid' class StructureData(Data): """ Resource for structure data """ def __init__(self, **kwargs): super(StructureData, self).__init__(**kwargs) from aiida.restapi.translator.data.structure import \ StructureDataTranslator self.trans = StructureDataTranslator(**kwargs) from aiida.orm import StructureData as StructureDataTclass self.tclass = StructureDataTclass self.parse_pk_uuid = 'uuid' class KpointsData(Data): """ Resource for kpoints data """ def __init__(self, **kwargs): super(KpointsData, self).__init__(**kwargs) from aiida.restapi.translator.data.kpoints import KpointsDataTranslator self.trans = KpointsDataTranslator(**kwargs) from aiida.orm import KpointsData as KpointsDataTclass self.tclass = KpointsDataTclass self.parse_pk_uuid = 'uuid' class BandsData(Data): """ Resource for Bands data """ def __init__(self, **kwargs): super(BandsData, self).__init__(**kwargs) from aiida.restapi.translator.data.bands import \ BandsDataTranslator self.trans = BandsDataTranslator(**kwargs) from aiida.orm import BandsData as BandsDataTclass self.tclass = BandsDataTclass self.parse_pk_uuid = 'uuid' class CifData(Data): """ Resource for cif data """ def __init__(self, **kwargs): super(CifData, self).__init__(**kwargs) from aiida.restapi.translator.data.cif import \ CifDataTranslator self.trans = CifDataTranslator(**kwargs) from aiida.orm import CifData as CifDataTclass self.tclass = CifDataTclass self.parse_pk_uuid = 'uuid' class UpfData(Data): """ Resource for upf data """ def __init__(self, **kwargs): super(UpfData, self).__init__(**kwargs) from aiida.restapi.translator.data.upf import \ UpfDataTranslator self.trans = UpfDataTranslator(**kwargs) from aiida.orm import UpfData as UpfDataTclass self.tclass = UpfDataTclass self.parse_pk_uuid = 'uuid'
/** * Adds additional data to the currently selected clipboard and Window * if it does not already exist. * * \param cliptype The Atom to set the data for. * \param data The data to set. * \param srclen The length of the data. */ static void _add_clip_data (Atom cliptype, char *data, int srclen) { Atom clip = GET_CLIPATOM(_currentmode); PyObject* dict = (_currentmode == SCRAP_CLIPBOARD) ? _clipdata : _selectiondata; PyObject *tmp; char *key = _atom_to_string (cliptype); tmp = Bytes_FromStringAndSize (data, srclen); PyDict_SetItemString (dict, key, tmp); Py_DECREF (tmp); XChangeProperty (SDL_Display, SDL_Window, clip, cliptype, 8, PropModeReplace, (unsigned char *) data, srclen); free (key); }
//--------------------------------------------------------------------------- // // Member: CHTMLDlg::AccessAllowed() // // Synopsis: Checks if the dialog did not leave the original URL domain // (via redirection or META-REFRESH for example). This is to prevent // security hole when dialogArguments and other values get cached during // cross-domain redirection and can be used by gullable target html dialog. // //--------------------------------------------------------------------------- BOOL CHTMLDlg::AccessAllowed() { HRESULT hr; BOOL retVal = FALSE; CDoc *pDoc; CMarkup *pDlgMarkup; LPTSTR pchUrl = NULL; AAINDEX aaindex; IUnknown *pUnk; IServiceProvider *pSP = NULL; IDispatch *pDisp = NULL; BYTE abSID1[MAX_SIZE_SECURITY_ID]; DWORD cbSID1 = ARRAY_SIZE(abSID1); BYTE abSID2[MAX_SIZE_SECURITY_ID]; DWORD cbSID2 = ARRAY_SIZE(abSID2); extern DWORD g_dwAppCompat; if(!_pUnkObj || !_pOriginalMoniker) goto Cleanup; if (g_dwAppCompat) { if(_pUnkObj->QueryInterface(CLSID_HTMLDocument, (void **)&pDoc)) goto Cleanup; pDlgMarkup = pDoc->PrimaryMarkup(); if(!pDlgMarkup) goto Cleanup; if(_pOriginalMoniker->GetDisplayName(NULL, NULL, &pchUrl)) goto Cleanup; retVal = pDlgMarkup->AccessAllowed(pchUrl); } else { aaindex = FindAAIndex( DISPID_INTERNAL_INVOKECONTEXT, CAttrValue::AA_Internal); if (aaindex == AA_IDX_UNKNOWN) { retVal = TRUE; goto Cleanup; } if (FAILED(GetUnknownObjectAt(aaindex, &pUnk)) || pUnk == NULL) goto Cleanup; hr = pUnk->QueryInterface(IID_IServiceProvider, (void**)&pSP); pUnk->Release(); if (FAILED(hr) || !pSP) goto Cleanup; hr = GetCallerIDispatch(pSP, &pDisp); pSP->Release(); if (FAILED(hr) || !pDisp) goto Cleanup; hr = GetSIDOfDispatch(pDisp, abSID1, &cbSID1); pDisp->Release(); if (FAILED(hr)) goto Cleanup; if(_pOriginalMoniker->GetDisplayName(NULL, NULL, &pchUrl)) goto Cleanup; if (FAILED(THR(TLS(windowInfo.pSecMgr)->GetSecurityId( pchUrl, abSID2, &cbSID2, NULL)))) goto Cleanup; retVal = (cbSID1 == cbSID2 && !memcmp(abSID1, abSID2, cbSID1)); } Cleanup: if(pchUrl) CoTaskMemFree(pchUrl); return retVal; }
<gh_stars>1-10 import { bytesEqual } from '@rigidity/bls-signatures'; import { Program } from '../index'; import { Group } from './helpers'; const atomMatch = new TextEncoder().encode('$'); const sexpMatch = new TextEncoder().encode(':'); export function unifyBindings( bindings: Group, key: string, valueProgram: Program ): Group | null { if (key in bindings) { if (!bindings[key].equals(valueProgram)) return null; return bindings; } return { ...bindings, [key]: valueProgram }; } export function match( pattern: Program, sexp: Program, knownBindings: Group = {} ): Group | null { if (!pattern.isCons) { if (sexp.isCons) return null; return bytesEqual(pattern.atom, sexp.atom) ? knownBindings : null; } const left = pattern.first; const right = pattern.rest; if (left.isAtom && bytesEqual(left.atom, atomMatch)) { if (sexp.isCons) return null; if (right.isAtom && bytesEqual(right.atom, atomMatch)) { if (bytesEqual(sexp.atom, atomMatch)) return {}; return null; } return unifyBindings(knownBindings, right.toText(), sexp); } if (left.isAtom && bytesEqual(left.atom, sexpMatch)) { if (right.isAtom && bytesEqual(right.atom, sexpMatch)) { if (bytesEqual(sexp.atom, sexpMatch)) return {}; return null; } return unifyBindings(knownBindings, right.toText(), sexp); } if (!sexp.isCons) return null; const newBindings = match(left, sexp.first, knownBindings); if (!newBindings) return newBindings; return match(right, sexp.rest, newBindings); }
/// Construct a set of room emotes from twitch. async fn emote_sets_from_twitch(&self, emote_sets: &str) -> Result<EmoteByCode, Error> { let result = self .inner .twitch .v5_chat_emoticon_images(emote_sets) .await?; let mut emotes = EmoteByCode::default(); for (_, set) in result.emoticon_sets { for e in set { emotes.insert(e.code, Self::twitch_emote(e.id)); } } Ok(emotes) }
/** * This is the Accumulo implementation of the data store. It requires an AccumuloOperations instance * that describes how to connect (read/write data) to Apache Accumulo. It can create default * implementations of the IndexStore and AdapterStore based on the operations which will persist * configuration information to Accumulo tables, or an implementation of each of these stores can be * passed in A DataStore can both ingest and query data based on persisted indices and data * adapters. When the data is ingested it is explicitly given an index and a data adapter which is * then persisted to be used in subsequent queries. */ public class AccumuloDataStore extends BaseMapReduceDataStore implements Closeable { private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloDataStore.class); public AccumuloDataStore( final AccumuloOperations accumuloOperations, final AccumuloOptions accumuloOptions) { super( new IndexStoreImpl(accumuloOperations, accumuloOptions), new AdapterStoreImpl(accumuloOperations, accumuloOptions), new DataStatisticsStoreImpl(accumuloOperations, accumuloOptions), new AdapterIndexMappingStoreImpl(accumuloOperations, accumuloOptions), accumuloOperations, accumuloOptions, new InternalAdapterStoreImpl(accumuloOperations), new PropertyStoreImpl(accumuloOperations, accumuloOptions)); } @Override protected SplitsProvider createSplitsProvider() { return new AccumuloSplitsProvider(); } @Override protected void initOnIndexWriterCreate(final InternalDataAdapter adapter, final Index index) { final String indexName = index.getName(); final String typeName = adapter.getTypeName(); try { if (adapter.getAdapter() instanceof RowMergingDataAdapter) { if (!((AccumuloOperations) baseOperations).isRowMergingEnabled( adapter.getAdapterId(), indexName)) { if (!((AccumuloOperations) baseOperations).createTable( indexName, false, baseOptions.isEnableBlockCache())) { ((AccumuloOperations) baseOperations).enableVersioningIterator(indexName, false); } if (baseOptions.isServerSideLibraryEnabled()) { ServerOpHelper.addServerSideRowMerging( ((RowMergingDataAdapter<?, ?>) adapter.getAdapter()), adapter.getAdapterId(), (ServerSideOperations) baseOperations, RowMergingCombiner.class.getName(), RowMergingVisibilityCombiner.class.getName(), indexName); } } } if (((AccumuloOptions) baseOptions).isUseLocalityGroups() && !((AccumuloOperations) baseOperations).localityGroupExists( indexName, adapter.getTypeName())) { ((AccumuloOperations) baseOperations).addLocalityGroup( indexName, adapter.getTypeName(), adapter.getAdapterId()); } } catch (AccumuloException | TableNotFoundException | AccumuloSecurityException e) { LOGGER.error("Unable to determine existence of locality group [" + typeName + "]", e); } } /** * This is not a typical resource, it references a static Accumulo connector used by all DataStore * instances with common connection parameters. Closing this is only recommended when the JVM no * longer needs any connection to this Accumulo store with common connection parameters. */ @Override public void close() { ((AccumuloOperations) baseOperations).close(); } @Override public List<InputSplit> getSplits( final CommonQueryOptions commonOptions, final DataTypeQueryOptions<?> typeOptions, final IndexQueryOptions indexOptions, final QueryConstraints constraints, final TransientAdapterStore adapterStore, final AdapterIndexMappingStore aimStore, final DataStatisticsStore statsStore, final InternalAdapterStore internalAdapterStore, final IndexStore indexStore, final JobContext context, final Integer minSplits, final Integer maxSplits) throws IOException, InterruptedException { context.getConfiguration().setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); context.getConfiguration().setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true); return super.getSplits( commonOptions, typeOptions, indexOptions, constraints, adapterStore, aimStore, statsStore, internalAdapterStore, indexStore, context, minSplits, maxSplits); } @Override public void prepareRecordWriter(final Configuration conf) { // because accumulo requires a more recent version of guava 22.0, this user // classpath must override the default hadoop classpath which has an old // version of guava or there will be incompatibility issues conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true); } }
/** * Minimum value is 1 if included. */ public Discover page(Integer page) { if (page != null && page > 0) { params.put(AbstractTmdbApi.PARAM_PAGE, String.valueOf(page)); } return this; }
/** * This method initializes the arguments and calls the execute method. * * @throws AutomicException * exception while executing an action */ public final void execute() throws AutomicException { try { prepareCommonInputs(); client = HttpClientConfig.getClient(this.connectionTimeOut, this.readTimeOut); client.addFilter(new AuthenticationFilter(atToken)); client.addFilter(new GenericResponseFilter()); executeSpecific(); ConsoleWriter.writeln("AUTH_TOKEN::=" + atToken.encrypt()); } finally { if (client != null) { client.destroy(); } } }
<reponame>aykutsahin98/ReCapProject-Frontend import { Injectable } from '@angular/core'; import {HttpClient} from "@angular/common/http"; import { Observable } from 'rxjs'; import { listResponseModel } from '../models/listResponseModel'; import { Rental } from '../models/rental'; import { RentalDto } from '../models/rentalDto'; import { ResponseModel } from '../models/responseModel'; @Injectable({ providedIn: 'root' }) export class RentalService { apiUrl="https://localhost:44340/api"; constructor(private httpClient:HttpClient) { } getRentals():Observable<listResponseModel<RentalDto>> { let newPath= this.apiUrl+"/rentals/getallrentaldetails"; return this.httpClient.get<listResponseModel<RentalDto>>(newPath) } addRentals(rental:Rental):Observable<ResponseModel> { let newPath= this.apiUrl+"/rentals/add"; return this.httpClient.post<ResponseModel>(newPath,{rental:rental}) } }
t = int(input()) x = input().split(" ") test = [] police = 0 crimes = 0 for y in x: test.append(int(y)) for case in test: if case > 0: police += case else: if police > 0: police -= 1 else: crimes += 1 print(crimes)
<filename>src/header.h<gh_stars>1-10 #include <stdio.h> #include <unistd.h> #include <stdlib.h> #include <sys/socket.h> #include <sys/types.h> #include <sys/stat.h> #include <netinet/in.h> #include <string.h> #include <dirent.h> #include <time.h> #include <arpa/inet.h> #include <fcntl.h> #include <errno.h> #include <pthread.h> #define BUFFER_SIZE 1024 //struct sockaddr_in address; struct dadosConecao{ int socketCliente; int pasv_listen_socket, data_transfer_socket; //socket dados int port_or_pasv; // flag para o modo de execução, port = 0, pasv = 1 }; void lb(char *buffer); int create_pasv_listen_socket(int port); int aceitar_conexao(int socket); void str_perm(int perm, char *str_perm); void *comandos(void * dclientes); int start_server(int port);
You must sign in or register to continue reading content. An Everett postal facility that serves communities from Lynnwood north to the Canadian border is still planned for closure. Just when that closure might happen isn’t clear. The Postal Service said it would hold off on shutting down any mail processing facilities until May 15. That deadline is fast approaching but there’s no date for the Everett facility closure, said U.S. Postal Service spokesman Ernie Swanson. “As far as we are concerned, nothing has changed,” he said. The U.S. Postal Service announced Wednesday it wants to keep open hundreds of rural post offices by reducing their hours. That doesn’t include the Everett mail processing facility at 8120 Hardeson Road. The closure of the Everett facility is important because nearly 100 people would lose their jobs. The public also would lose next-day delivery of first-class mail sent in Western Washington. Swanson said he knew of no major announcements coming in the next few weeks that concern the Everett mail processing facility. However, the Associated Press reported today the Postal Service “also will announce new changes next week involving its proposal to close up to 252 mail processing centers.” A bill passed by the Senate two weeks ago halts the immediate closing of 252 mail processing centers, including Everett. That amounts to a hill of beans without buy-in from the House. Rep. Rick Larsen’s spokesman, Bryan Thomas, gave me an update on what they see going on in the other Washington: “It’s not clear what the House is going to do at this point. The House will probably consider its own bills instead, and then the Senate and House will hammer out the differences for a final package. But at this point we don’t have a clear sense on the path forward. “Rick has asked that the Postal Service delays closures until Congress takes action, so it is still his hope that the Everett facility will stay open.” Everett postal employees, what are you hearing?
/** * Created by alfred on 3/13/16. */ public class CityDataStructure { private String cityName; private String countryName; private String cityID; // constructor public CityDataStructure(String cityId, String city, String country ) { this.cityName = city; this.countryName = country; this.cityID = cityId; } // getter public String getCityName() { return cityName; } public void setCityName(String name) { this.cityName = name; } public String getCountry() { return countryName; } // setter public String getCityID() { return cityID; } public void setCityID(String cityId) { this.cityID = cityId; } public void setCountryName(String code) { this.countryName = code; } }
def count(self, filter): counter = 0 for source in self.listing(): counter += source.count(filter) return counter
// CredentialsJSON returns an Option that authenticates API calls using the credentials file in jsonKey. func CredentialsJSON(jsonKey []byte) Option { return WithTokenSourceFactory(func(ctx context.Context, scopes ...string) (oauth2.TokenSource, error) { cfg, err := google.JWTConfigFromJSON(jsonKey, scopes...) if err != nil { return nil, fmt.Errorf("parse credentials: %w", err) } return cfg.TokenSource(ctx), nil }) }
import { useState } from "react"; import { Redirect } from "react-router-dom"; import { useSelector } from "../../redux/store"; import { auth, signInWithGoogle } from "../../api/firebase"; import { Container, Form, Heading, Input, Button, Link } from "./SignInStyles"; const SignIn = () => { const [userCredentials, setCredentials] = useState({ email: "", password: "", }); const userId = useSelector((state) => state.user.userId); const { email, password } = userCredentials; const handleSubmit = async (event: React.FormEvent) => { event.preventDefault(); try { await auth.signInWithEmailAndPassword(email, password); setCredentials({ email: "", password: "" }); } catch (error) { console.error(error); } }; const handleChange = (event: React.ChangeEvent<HTMLInputElement>) => { const { value, name } = event.target; setCredentials({ ...userCredentials, [name]: value }); }; if (userId) return <Redirect to="/" />; return ( <Container> <Form onSubmit={handleSubmit}> <Heading>Sign In</Heading> <Input name="email" type="email" onChange={handleChange} value={email} placeholder="Email" autoComplete="email" required /> <Input name="password" type="password" value={password} onChange={handleChange} placeholder="Password" autoComplete="current-password" required /> <Button type="submit">Sign In</Button> <Button type="button" google={true} onClick={signInWithGoogle}> Sign in with Google </Button> <Link to="/signup">{"New to SMDB? Sign up"}</Link> </Form> </Container> ); }; export default SignIn;
<reponame>MrCroxx/RunKV use std::collections::btree_map::{BTreeMap, Entry}; use std::sync::Arc; use async_trait::async_trait; use itertools::Itertools; use runkv_common::channel_pool::ChannelPool; use runkv_common::packer::Packer; use runkv_proto::wheel::raft_service_client::RaftServiceClient; use runkv_proto::wheel::RaftRequest; use tokio::sync::RwLock; use tonic::transport::Channel; use tonic::Request; use crate::error::{Error, RaftManageError, Result}; const MESSAGE_PACKER_QUEUE_DEFAULT_CAPACITY: usize = 128; #[async_trait] pub trait RaftNetwork: Send + Sync + Clone + 'static { type RaftClient: RaftClient; /// Register raft node info to raft network. `raft_nodes` maps raft node id to node id. /// /// Raft info must be registered first before building raft worker. async fn register(&self, group: u64, raft_nodes: BTreeMap<u64, u64>) -> Result<()>; async fn client(&self, raft_node: u64) -> Result<Self::RaftClient>; async fn recv(&self, msgs: Vec<raft::prelude::Message>) -> Result<()>; async fn get_message_packer( &self, raft_node: u64, ) -> Result<Packer<raft::prelude::Message, ()>>; } #[async_trait] pub trait RaftClient: Send + Sync + Clone + 'static { async fn send(&mut self, msgs: Vec<raft::prelude::Message>) -> Result<()>; } #[derive(Clone)] pub struct GrpcRaftClient { client: RaftServiceClient<Channel>, } impl GrpcRaftClient { pub fn new(client: RaftServiceClient<Channel>) -> Self { Self { client } } } #[async_trait] impl RaftClient for GrpcRaftClient { async fn send(&mut self, msgs: Vec<raft::prelude::Message>) -> Result<()> { let data = bincode::serialize(&msgs).map_err(Error::serde_err)?; let req = RaftRequest { data }; self.client .raft(Request::new(req)) .await .map_err(Error::RpcStatus)?; Ok(()) } } struct GrpcRaftNetworkCore { /// `{ raft node -> node }` raft_nodes: BTreeMap<u64, u64>, /// `{ raft node -> message packer }` message_packers: BTreeMap<u64, Packer<raft::prelude::Message, ()>>, /// `{ group -> [ raft node, .. ] }` groups: BTreeMap<u64, Vec<u64>>, } #[derive(Clone)] pub struct GrpcRaftNetwork { node: u64, core: Arc<RwLock<GrpcRaftNetworkCore>>, channel_pool: ChannelPool, } impl GrpcRaftNetwork { pub fn new(node: u64, channel_pool: ChannelPool) -> Self { Self { node, core: Arc::new(RwLock::new(GrpcRaftNetworkCore { raft_nodes: BTreeMap::default(), message_packers: BTreeMap::default(), groups: BTreeMap::default(), })), channel_pool, } } pub async fn raft_nodes(&self, group: u64) -> Result<Vec<u64>> { let guard = self.core.read().await; let raft_nodes = guard .groups .get(&group) .ok_or(RaftManageError::RaftGroupNotExists(group))?; let raft_nodes = raft_nodes.iter().copied().collect_vec(); Ok(raft_nodes) } } #[async_trait] impl RaftNetwork for GrpcRaftNetwork { type RaftClient = GrpcRaftClient; #[tracing::instrument(level = "trace", skip(self))] async fn register(&self, group: u64, raft_nodes: BTreeMap<u64, u64>) -> Result<()> { let mut guard = self.core.write().await; match guard.groups.entry(group) { Entry::Occupied(_) => return Err(RaftManageError::RaftGroupAlreadyExists(group).into()), Entry::Vacant(v) => { v.insert(raft_nodes.keys().copied().collect_vec()); } } for (raft_node, node) in raft_nodes { if guard.raft_nodes.get(&raft_node).is_some() { guard.groups.remove(&group); return Err(RaftManageError::RaftNodeAlreadyExists { group, raft_node, node, } .into()); } guard.raft_nodes.insert(raft_node, node); let message_packer = Packer::new(MESSAGE_PACKER_QUEUE_DEFAULT_CAPACITY); guard.message_packers.insert(raft_node, message_packer); } Ok(()) } // #[tracing::instrument(level = "trace", skip(self))] async fn client(&self, raft_node: u64) -> Result<GrpcRaftClient> { let guard = self.core.read().await; let node = *guard .raft_nodes .get(&raft_node) .ok_or(RaftManageError::RaftNodeNotExists { raft_node, node: self.node, })?; let channel = self.channel_pool.get(node).await.map_err(Error::err)?; let client = RaftServiceClient::new(channel); let client = GrpcRaftClient { client }; Ok(client) } #[tracing::instrument(level = "trace", skip(self))] async fn recv(&self, msgs: Vec<raft::prelude::Message>) -> Result<()> { let guard = self.core.read().await; for msg in msgs { let packer = &guard .message_packers .get(&msg.to) .ok_or(RaftManageError::RaftNodeNotExists { raft_node: msg.to, node: self.node, })?; packer.append(msg, None); } Ok(()) } #[tracing::instrument(level = "trace", skip(self))] async fn get_message_packer( &self, raft_node: u64, ) -> Result<Packer<raft::prelude::Message, ()>> { let guard = self.core.read().await; let packer = guard.message_packers.get(&raft_node).cloned().ok_or( RaftManageError::RaftNodeNotExists { raft_node, node: self.node, }, )?; Ok(packer) } } #[cfg(test)] pub mod tests { use super::*; #[derive(Clone)] pub struct MockRaftClient(Packer<raft::prelude::Message, ()>); #[async_trait] impl RaftClient for MockRaftClient { async fn send(&mut self, msgs: Vec<raft::prelude::Message>) -> Result<()> { for msg in msgs { self.0.append(msg, None); } Ok(()) } } #[derive(Clone)] pub struct MockRaftNetwork(Arc<RwLock<BTreeMap<u64, Packer<raft::prelude::Message, ()>>>>); impl Default for MockRaftNetwork { fn default() -> Self { Self(Arc::new(RwLock::new(BTreeMap::default()))) } } #[async_trait] impl RaftNetwork for MockRaftNetwork { type RaftClient = MockRaftClient; async fn register(&self, _group: u64, raft_nodes: BTreeMap<u64, u64>) -> Result<()> { let mut guard = self.0.write().await; for (raft_node, _) in raft_nodes { if guard.insert(raft_node, Packer::default()).is_some() { panic!("redundant raft node"); }; } Ok(()) } async fn client(&self, raft_node: u64) -> Result<MockRaftClient> { let packer = self.0.read().await.get(&raft_node).cloned().unwrap(); Ok(MockRaftClient(packer)) } async fn recv(&self, _msgs: Vec<raft::prelude::Message>) -> Result<()> { unreachable!() } async fn get_message_packer( &self, raft_node: u64, ) -> Result<Packer<raft::prelude::Message, ()>> { Ok(self.0.read().await.get(&raft_node).cloned().unwrap()) } } }
-------------------------------------------------------------------------------- -- | -- Module : Data.Record -- Copyright : 2012-2013 <NAME> -- License : BSD3 -- Maintainer : <EMAIL> -- Stability : experimental -- -- Data.Record provides a "record transformer" -- implemented as a heterogenous -- linked list similar to HList. Data.Record records should have no more -- overhead than actual linked lists, as accessors/keys only exist at the type -- level, which also helps to ensure safety. This module provides some -- convenience 'QuasiQuoter's for syntactically easier record updates/accesses. -- -- TODO: -- * Try and make 'Typeable' and 'Data' instances. -- * Fix unions to actually make unions of the records instead of -- not working (at compile time) when records have duplicate keys -------------------------------------------------------------------------------- -- and in no particular order ... {-# LANGUAGE GADTs , ImplicitParams , TypeFamilies , ConstraintKinds , FlexibleInstances , DataKinds , TypeOperators , PolyKinds , EmptyDataDecls , RankNTypes , FunctionalDependencies , KindSignatures , OverlappingInstances , UndecidableInstances , TemplateHaskell , ScopedTypeVariables , GeneralizedNewtypeDeriving , ExplicitNamespaces #-} module Data.Record ( Record , RecordT(..) -- * Construction , (&) , (&.) , F(..) , nil , Identity(..) , EmptyRecord(..) , Key , (:=) -- * Field accessing , Access(..) , Has , Knock(..) -- * Field updates , Update(..) -- * Field deletion , Delete(..) -- * Transformations , Box(..) , Transform(..) , Run(..) , Runtrans(..) , Transrun(..) -- * Unions , type (++) , Union(..) , CombineWith(..) , AllUnique , IsElem -- * Records with "deeper" transformations , type (:.:) , compose -- * Convenience , Symbol , key , (=:) , (~:) , fields , fieldr ) where import Language.Haskell.TH.Syntax import Language.Haskell.TH.Quote import Language.Haskell.TH.Lib import Control.Monad import Control.Applicative import Control.Monad.Identity import Control.Monad.Reader import Control.Monad.State import Data.Monoid import GHC.TypeLits instance Show a => Show (Identity a) where show = show . runIdentity -- | A key of a record. This does not exist at runtime, and as a tradeoff, -- you can't do field access from a string and a Typeable context, although -- it would certainly be very nice. data Key k data F a b = F a b type (:=) = 'F -- | Type composition -- Used for cases where a record transformer is "deeper" than normal, e.g. for -- functions like 'newIORef :: a -> IO (IORef a)' newtype (w :.: m) (x :: *) = Wmx (w (m x)) deriving (Show, Eq, Ord, Enum) infixr 9 :.: {-# INLINE compose #-} compose :: (a -> w (m a)) -> a -> (w :.: m) a compose f = Wmx . f -- | The base record transformer data type. Fields are indexed by type-level -- keys, which can be anything. It is very convenient to use -- 'GHC.TypeLits.Symbol' to index record fields, but it is just as valid to -- declare phantom types for them. data RecordT :: (* -> *) -> [F k *] -> * where C :: w e -> RecordT w r -> RecordT w (k := e ': r) E :: RecordT w '[] type Record = RecordT Identity {-# INLINE (&) #-} (&) :: e -> Record r -> Record (k := e ': r) (&) = C . Identity infixr 5 & {-# INLINE (&.) #-} (&.) :: w e -> RecordT w r -> RecordT w (k := e ': r) (&.) = C infixr 5 &. {-# INLINE nil #-} nil :: RecordT w '[] nil = E -------------------------------------------------------------------------------- -- Instances instance Eq (RecordT w '[]) where {-# INLINE (==) #-} _ == _ = True instance ( Eq (w x) , Eq (RecordT w xs)) => Eq (RecordT w (k := x ': xs)) where {-# INLINE (==) #-} C x xs == C y ys = x == y && xs == ys instance Ord (RecordT w '[]) where {-# INLINE compare #-} compare _ _ = EQ instance ( Ord (w x) , Ord (RecordT w xs)) => Ord (RecordT w (k := x ': xs)) where {-# INLINE compare #-} compare (C x xs) (C y ys) = compare (compare x y) (compare xs ys) instance Show (RecordT w '[]) where {-# INLINE show #-} show _ = "nil" instance ( Show a , Show (Record xs)) => Show (Record (k := a ': xs)) where {-# INLINE show #-} show (C x xs) = show x ++ " & " ++ show xs instance ( Show (w a) , Show (RecordT w xs)) => Show (RecordT w (k := a ': xs)) where {-# INLINE show #-} show (C x xs) = show x ++ " & " ++ show xs instance Monoid (RecordT w '[]) where {-# INLINE mappend #-} {-# INLINE mempty #-} mappend _ _ = nil mempty = nil instance ( Monoid (w x) , Monoid (RecordT w xs)) => Monoid (RecordT w (k := x ': xs)) where {-# INLINE mappend #-} {-# INLINE mempty #-} mappend (C x xs) (C y ys) = mappend x y `C` mappend xs ys mempty = (mempty :: w x) `C` mempty class EmptyRecord w r where rempty :: RecordT w r instance EmptyRecord w '[] where {-# INLINE rempty #-} rempty = nil instance (Alternative f, EmptyRecord f xs) => EmptyRecord f (k := x ': xs) where {-# INLINE rempty #-} rempty = (empty :: f x) `C` rempty -------------------------------------------------------------------------------- -- Field accessors/setters class Access r k a | r k -> a where access :: Key k -> RecordT w r -> w a instance Access (k := a ': xs) k a where {-# INLINE access #-} access _ (C x _) = x instance Access xs k a => Access (k0 := a0 ': xs) k a where {-# INLINE access #-} access n (C _ xs) = access n xs -- | Class to indicate whether a record "has" a key, and its type. class Has r k a | r k -> a instance Has k (k := a ': xs) (Just a) instance Has k xs m => Has k (k0 := a0 ': xs) m class Knock k r a | k r -> a where -- | Try ("knock politely") to get a field of a record. -- It's impossible to get proper "lookups" at runtime, so this function -- is probably not very useful. knock :: Key k -> RecordT w r -> Maybe (w a) instance Has k r Nothing => Knock k r () where {-# INLINE knock #-} knock _ _ = Nothing instance Access r k a => Knock k r a where {-# INLINE knock #-} knock k xs = Just (access k xs) class Update r k a | r k -> a where -- | Write to a record's field write :: Key k -> w a -> RecordT w r -> RecordT w r -- | Update a record's field alter :: Key k -> (w a -> w a) -> RecordT w r -> RecordT w r instance Update (k := a ': xs) k a where {-# INLINE write #-} {-# INLINE alter #-} write _ x (C _ xs) = x `C` xs alter _ f (C y ys) = f y `C` ys instance Update xs k a => Update (k0 := a0 ': xs) k a where {-# INLINE write #-} {-# INLINE alter #-} write n y (C x xs) = x `C` write n y xs alter n f (C x xs) = x `C` alter n f xs -------------------------------------------------------------------------------- -- Field deletion class Delete r0 r1 k | r0 k -> r1 where delete :: Key k -> RecordT w r0 -> RecordT w r1 instance Delete '[] '[] k where {-# INLINE delete #-} delete _ _ = nil instance Delete (k := x ': xs) xs k where {-# INLINE delete #-} delete _ (C _ xs) = xs instance Delete xs ys k => Delete (k0 := x ': xs) (k0 := x ': ys) k where {-# INLINE delete #-} delete k (C x xs) = C x (delete k xs) -------------------------------------------------------------------------------- -- Record combinators class Box w m r where -- | "Box" every element of a record. -- Usually means applying a newtype wrapper to everything box :: (forall a. m a -> w (m a)) -> RecordT m r -> RecordT (w :.: m) r -- Compositions of the record wrapper types instance Box w m '[] where {-# INLINE box #-} box _ _ = nil instance Box (w :: * -> *) (m :: * -> *) xs => Box w m (x ': xs) where {-# INLINE box #-} box f (C x xs) = C (Wmx (f x)) (box f xs) class Transform r where -- | Change the type wrapping every element of a record transform :: (forall a. (i :: * -> *) a -> (o :: * -> *) a) -> RecordT i r -> RecordT o r instance Transform '[] where {-# INLINE transform #-} transform _ _ = nil instance Transform xs => Transform (x ': xs) where {-# INLINE transform #-} transform f (C x xs) = f x `C` transform f xs class Run r where -- | Iterate over a RecordT's elements, and use the inner monad to iterate -- over everything, and return a "pure" record. run :: Monad m => RecordT m r -> m (Record r) instance Run '[] where {-# INLINE run #-} run _ = return nil instance Run xs => Run (x ': xs) where {-# INLINE run #-} run (C x xs) = liftM2 (&) x (run xs) class Runtrans r where -- | Iterate over every element of a record. Logically similar to @ run . transform f @, but -- 'runtrans' should be more efficient. runtrans :: Monad o => (forall a. (i :: * -> *) a -> (o :: * -> *) a) -> RecordT i r -> o (Record r) instance Runtrans '[] where {-# INLINE runtrans #-} runtrans _ _ = return nil instance Runtrans xs => Runtrans (x ': xs) where {-# INLINE runtrans #-} runtrans f (C x xs) = liftM2 (&) (f x) (runtrans f xs) class Transrun r where transrun :: Monad m => (forall a. a -> m (w a)) -> Record r -> m (RecordT w r) instance Transrun '[] where {-# INLINE transrun #-} transrun _ _ = return nil instance Transrun xs => Transrun (x ': xs) where {-# INLINE transrun #-} transrun f (C x xs) = liftM2 C (f (runIdentity x)) (transrun f xs) -------------------------------------------------------------------------------- -- Unions -- | Append two type-level lists type family (++) (x :: [a]) (y :: [a]) :: [a] type instance '[] ++ '[] = '[] type instance '[] ++ ys = ys type instance (x ': xs) ++ ys = x ': (xs ++ ys) class AllUnique (r0 :: [F key *]) (r1 :: [F key *]) instance (IsElem r1 k False, AllUnique r0 r1) => AllUnique (k := a ': r0) r1 instance AllUnique '[] r1 instance AllUnique r0 '[] class IsElem r k a | r k -> a instance IsElem '[] k False instance IsElem (k := a ': xs) k True instance IsElem xs k m => IsElem (k0 := a0 ': xs) k m class Union r0 r1 where -- | Merge a record. Unfortunately at the moment records that share fields -- are simply not accepted. I'm not sure how to write a type family to -- really make a "union". union :: AllUnique r0 r1 => RecordT w r0 -> RecordT w r1 -> RecordT w (r0 ++ r1) instance Union '[] '[] where {-# INLINE union #-} union _ _ = nil instance Union '[] a where {-# INLINE union #-} union _ x = x instance (AllUnique xs ys, Union xs ys) => Union (x ': xs) ys where {-# INLINE union #-} union (C x xs) ys = C x (union xs ys) class CombineWith r where -- | Take two records with identical fields, and "combine" them with some combining function. -- e.g. @ combineWith (<|>) (r0 :: RecordT Maybe r) (r1 :: RecordT Maybe r) @ combineWith :: (forall (a :: *). w a -> w a -> w a) -> RecordT w r -> RecordT w r -> RecordT w r instance CombineWith '[] where {-# INLINE combineWith #-} combineWith _ _ _ = nil instance CombineWith xs => CombineWith (x ': xs) where {-# INLINE combineWith #-} combineWith f (C x xs) (C y ys) = C (f x y) (combineWith f xs ys) -------------------------------------------------------------------------------- -- Convenience QuasiQuoters kq :: String -> Q Exp kq s = [| undefined :: Key $(litT . return . StrTyLit $ s) |] key :: QuasiQuoter key = QuasiQuoter { quoteExp = kq, quoteType = undefined, quoteDec = undefined, quotePat = undefined } -------------------------------------------------------------------------------- -- Monad transformer convenience operators {-# INLINE (=:) #-} (=:) :: (MonadState (RecordT w r) m, Update r k a) => Key k -> w a -> m () (=:) k a = modify (write k a) {-# INLINE (~:) #-} (~:) :: (MonadState (RecordT w r) m, Update r k a) => Key k -> (w a -> w a) -> m () (~:) k f = modify (alter k f) {-# INLINE fields #-} fields :: (MonadState (RecordT w r) m, Access r k a) => Key k -> m (w a) fields = gets . access {-# INLINE fieldr #-} fieldr :: (MonadReader (RecordT w r) m, Access r k a) => Key k -> m (w a) fieldr = asks . access
/** * Saves all unsaved changes to the models to disk */ public synchronized void save(){ for (Entry<File, CachedModel> entry : models.entrySet()){ saveModel(entry.getKey(), entry.getValue()); } tidyUpModelCache(); }
package hurrycaneblurryname.ryde.View; import android.app.Dialog; import android.content.DialogInterface; import android.content.Intent; import android.content.res.Resources; import android.location.Address; import android.location.Geocoder; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.LayoutInflater; import android.view.MenuItem; import android.location.Location; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import android.widget.ToggleButton; import com.edmodo.rangebar.RangeBar; import com.google.android.gms.maps.model.LatLng; import com.google.common.base.Predicate; import java.io.IOException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import hurrycaneblurryname.ryde.ElasticSearchRequestController; import hurrycaneblurryname.ryde.Model.Request.Request; import hurrycaneblurryname.ryde.Model.Request.RequestHolder; import hurrycaneblurryname.ryde.R; /** * Search for Open requests * Author: <NAME> * Storyboard by: Blaz * Version: 2 */ public class SearchRequestsActivity extends AppCompatActivity { private Button searchButton; private Button searchNearbyButton; private RadioGroup searchGroup; private EditText searchEditText; private EditText searchEditText2; private ListView searchView; private ArrayAdapter<Request> searchViewAdapter; private ArrayList<Request> searchResult; private Location mLastLocation; private ToggleButton distanceToggle; private ToggleButton priceToggle; private TextView minValueText; private TextView maxValueText; private boolean priceFilterApplied; private boolean distanceFilterApplied; private double min = 0; private double max = 1000; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.search_requests); setTitle("Search"); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setDisplayShowHomeEnabled(true); // searchBlock = (RelativeLayout) findViewById(R.id.searchRequestBlock); searchResult = new ArrayList<>(); searchEditText = (EditText) findViewById(R.id.SearchEditText); searchEditText2 = (EditText) findViewById(R.id.SearchEditText2); searchButton = (Button)findViewById(R.id.searchButton); searchNearbyButton = (Button)findViewById(R.id.searchNearbyButton); searchGroup = (RadioGroup)findViewById(R.id.searchRadioGroup); searchView = (ListView) findViewById(R.id.SearchResultListView); distanceToggle = (ToggleButton) findViewById(R.id.toggleDistanceFilter); priceToggle = (ToggleButton) findViewById(R.id.togglePriceFilter); priceFilterApplied = false; Intent intent = getIntent(); Bundle extras = intent.getExtras(); if (extras != null) { mLastLocation = extras.getParcelable("currLocation"); } searchButton.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { int selectedId = searchGroup.getCheckedRadioButtonId(); searchResult.clear(); distanceToggle.setChecked(false); priceToggle.setChecked(false); priceFilterApplied = distanceFilterApplied = false; String[] searchText = new String[2]; switch(selectedId) { case (R.id.radio_location): searchText[0] = searchEditText.getText().toString(); searchByLocation(searchText); break; case R.id.radio_keyword: searchText[0] = searchEditText.getText().toString(); searchByKeyword(searchText); break; case R.id.radio_geo: searchText[0] = searchEditText.getText().toString(); searchText[1] = searchEditText2.getText().toString(); searchByGeo(searchText); break; } } }); searchNearbyButton.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { searchResult.clear(); if (mLastLocation != null) { String lon = String.valueOf(mLastLocation.getLongitude()); String lat = String.valueOf(mLastLocation.getLatitude()); searchByGeo(lat, lon); } else { Toast.makeText(SearchRequestsActivity.this, "Current location not found", Toast.LENGTH_SHORT).show(); } } }); searchView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { // save selected request Request requestSelected = searchResult.get(position); RequestHolder.getInstance().setRequest(requestSelected); Intent info = new Intent(SearchRequestsActivity.this, RideInfoFromSearch.class); startActivity(info); } }); distanceToggle.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Log.i("TOGGLE", "CLICKED"); if(priceFilterApplied) { priceFilterApplied=false; priceToggle.setChecked(false); } Log.i("FilterTOGGLE", String.valueOf(priceToggle.isChecked())); Log.i("FilterBOOL", String.valueOf(priceFilterApplied)); if (!distanceFilterApplied && distanceToggle.isChecked()) { filterPricePerKMParamsDialog(); distanceToggle.setChecked(true); distanceToggle.setSelected(true); } else if (distanceFilterApplied && !distanceToggle.isChecked()) { distanceToggle.setChecked(false); distanceFilterApplied = false; // The toggle is disabled // Set the view back to al the results searchViewAdapter = new ArrayAdapter<Request>(SearchRequestsActivity.this, R.layout.list_item, searchResult); searchView.setAdapter(searchViewAdapter); } else { distanceFilterApplied = false; distanceToggle.setChecked(false); } } }); priceToggle.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if(distanceFilterApplied) { distanceFilterApplied=false; distanceToggle.setChecked(false); } if (!priceFilterApplied && priceToggle.isChecked()) { filterPriceParamsDialog(); priceToggle.setChecked(true); priceToggle.setSelected(true); } else if (priceFilterApplied && !priceToggle.isChecked()) { priceToggle.setChecked(false); priceFilterApplied = false; // The toggle is disabled // Set the view back to al the results searchViewAdapter = new ArrayAdapter<Request>(SearchRequestsActivity.this, R.layout.list_item, searchResult); searchView.setAdapter(searchViewAdapter); } else { priceFilterApplied = false; priceToggle.setChecked(false); } } }); } /** * Change of search mode when a radio button is clicked * @param view */ public void onRadioButtonClicked(View view) { // Is the button now checked? boolean checked = ((RadioButton) view).isChecked(); // Set layout programmatically. Move the layout to be attached relatively to another // http://stackoverflow.com/questions/3277196/can-i-set-androidlayout-below-at-runtime-programmatically // Accessed November 24, 2016 // Author: Qberticus searchEditText.getText().clear(); searchResult.clear(); distanceToggle.setChecked(false); priceToggle.setChecked(false); priceFilterApplied = distanceFilterApplied = false; RelativeLayout.LayoutParams p = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); p.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE); // Check which radio button was clicked // Readjust layout based on search mode switch(view.getId()) { case R.id.radio_location: if (checked) { p.addRule(RelativeLayout.BELOW, R.id.SearchEditText); findViewById(R.id.searchRadioGroup).setLayoutParams(p); searchEditText2.setVisibility(View.INVISIBLE); searchEditText.setHint(R.string.searchLocation); } break; case R.id.radio_keyword: if (checked) { p.addRule(RelativeLayout.BELOW, R.id.SearchEditText); findViewById(R.id.searchRadioGroup).setLayoutParams(p); searchEditText2.setVisibility(View.INVISIBLE); searchEditText.setHint(R.string.searchKeyword); } break; case R.id.radio_geo: if (checked) { p.addRule(RelativeLayout.BELOW, R.id.SearchEditText2); findViewById(R.id.searchRadioGroup).setLayoutParams(p); searchEditText2.setVisibility(View.VISIBLE); searchEditText.setHint(R.string.latitude); searchEditText2.setHint(R.string.longitude); } break; } } // Back Navigation Handle @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { // Respond to the action bar's Up/Home button case android.R.id.home: onBackPressed(); return true; } return super.onOptionsItemSelected(item); } /** * Filter and sort based on predicate and comparator * @param predicate filter criteria * @param comparator sort criteria * @return filtered and sorted list */ private ArrayList<Request> filterAndSortList (Predicate<Request> predicate, Comparator<Request> comparator) { // Filter the search results ArrayList<Request> filterList = new ArrayList<>(); for (Request r : searchResult) { if (predicate.apply(r)) { filterList.add(r); } } Collections.sort(filterList, comparator); return filterList; } /** * Creates a dialog for getting filter price parameters for user * Filter is only applied when user clicks "filter" */ private void filterPriceParamsDialog() { final Dialog filterDialog = new Dialog(SearchRequestsActivity.this); LayoutInflater inflater = (LayoutInflater)SearchRequestsActivity.this.getSystemService(LAYOUT_INFLATER_SERVICE); View layout = inflater.inflate(R.layout.filter_dialog, (ViewGroup)findViewById(R.id.filter_dialog_root_element)); filterDialog.setTitle("Set price filter:"); filterDialog.setContentView(layout); min = 0; max = 100; final double limit = max; final int ticks = 1000; minValueText = (TextView) filterDialog.findViewById(R.id.minFilterText); minValueText.setText(new DecimalFormat("$#0.00").format(min)); maxValueText = (TextView) filterDialog.findViewById(R.id.maxFilterText); maxValueText.setText("No max"); Button dialogButton = (Button)layout.findViewById(R.id.filter_dialog_button); RangeBar dialogSeekBar = (RangeBar)layout.findViewById(R.id.filter_dialog_rangebar); dialogSeekBar.setTickCount(ticks); filterDialog.show(); filterDialog.setOnCancelListener(new DialogInterface.OnCancelListener() { @Override public void onCancel(DialogInterface dialogInterface) { priceFilterApplied=false; priceToggle.setChecked(false); filterDialog.dismiss(); } }); dialogSeekBar.setOnRangeBarChangeListener(new RangeBar.OnRangeBarChangeListener() { @Override public void onIndexChangeListener(RangeBar rangeBar, int leftThumbIndex, int rightThumbIndex) { minValueText.setText(new DecimalFormat("$#0.00").format(leftThumbIndex/(ticks/limit))); maxValueText.setText(new DecimalFormat("$#0.00").format((rightThumbIndex+1)/(ticks/limit))); if (rightThumbIndex+1 == limit) { max = 99999; // set no limit! } else { max = rightThumbIndex/(ticks/max); } min = (leftThumbIndex+1)/(ticks/max); } }); dialogButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { priceFilterApplied = true; // Set predicate for filtering // http://guidogarcia.net/blog/2011/10/29/java-different-ways-filter-collection/ // Accessed: November 26 // Post by <NAME> Predicate<Request> pred = new Predicate<Request>() { public boolean apply(Request r) { // do the filtering return (min<r.getEstimate() && r.getEstimate()<max) ; } }; Comparator<Request> comp = new Comparator<Request>() { @Override public int compare(Request r1, Request r2) { return r1.getEstimate().compareTo(r2.getEstimate()); } }; ArrayList<Request> filtered = filterAndSortList(pred, comp); searchViewAdapter = new ArrayAdapter<Request>(SearchRequestsActivity.this, R.layout.list_item, filtered); searchView.setAdapter(searchViewAdapter); filterDialog.dismiss(); } }); } /** * Creates a dialog for getting filter price parameters for user. * Filter is not applied until user clicks "filter" */ private void filterPricePerKMParamsDialog() { final Dialog filterDialog = new Dialog(SearchRequestsActivity.this); LayoutInflater inflater = (LayoutInflater)SearchRequestsActivity.this.getSystemService(LAYOUT_INFLATER_SERVICE); View layout = inflater.inflate(R.layout.filter_dialog, (ViewGroup)findViewById(R.id.filter_dialog_root_element)); filterDialog.setTitle("Set price/km filter:"); filterDialog.setContentView(layout); min = 0; max = 100; final double limit = max; final int ticks = 1000; minValueText = (TextView) filterDialog.findViewById(R.id.minFilterText); minValueText.setText(new DecimalFormat("$#0.00").format(min)); maxValueText = (TextView) filterDialog.findViewById(R.id.maxFilterText); maxValueText.setText("No max"); Button dialogButton = (Button)layout.findViewById(R.id.filter_dialog_button); RangeBar dialogSeekBar = (RangeBar)layout.findViewById(R.id.filter_dialog_rangebar); dialogSeekBar.setTickCount(ticks); filterDialog.show(); filterDialog.setOnCancelListener(new DialogInterface.OnCancelListener() { @Override public void onCancel(DialogInterface dialogInterface) { distanceFilterApplied=false; distanceToggle.setChecked(false); filterDialog.dismiss(); } }); dialogSeekBar.setOnRangeBarChangeListener(new RangeBar.OnRangeBarChangeListener() { @Override public void onIndexChangeListener(RangeBar rangeBar, int leftThumbIndex, int rightThumbIndex) { minValueText.setText(new DecimalFormat("$#0.00").format(leftThumbIndex/(ticks/limit))); maxValueText.setText(new DecimalFormat("$#0.00").format((rightThumbIndex+1)/(ticks/limit))); if (rightThumbIndex+1 == limit) { max = 99999; // set no limit! } else { max = rightThumbIndex/(ticks/max); } min = (leftThumbIndex+1)/(ticks/max); } }); dialogButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { distanceFilterApplied = true; ArrayList<Request> filtered = new ArrayList<Request>(); // Set predicate for filtering // http://guidogarcia.net/blog/2011/10/29/java-different-ways-filter-collection/ // Accessed: November 26 // Post by <NAME> Predicate<Request> pred = new Predicate<Request>() { public boolean apply(Request r) { // do the filtering return (min < r.getEstimate()/r.getDistance() && r.getEstimate()/r.getDistance() < max) ; // return true; //TODO implement distance filtering } }; Comparator<Request> comp = new Comparator<Request>() { @Override public int compare(Request r1, Request r2) { Double ppk1 = r1.getEstimate()/r1.getDistance(); Double ppk2 = r2.getEstimate()/r2.getDistance(); return (ppk1).compareTo(ppk2); } }; filtered = filterAndSortList(pred, comp); searchViewAdapter = new ArrayAdapter<Request>(SearchRequestsActivity.this, R.layout.list_item, filtered); searchView.setAdapter(searchViewAdapter); filterDialog.dismiss(); } }); } /** * Wrapper that helps to find requests based on searchParam geolocation * * @param searchParam string array can contain [lat, lon] */ private void searchByGeo(String... searchParam) { Log.i("SEARCH", Arrays.toString(searchParam)); ElasticSearchRequestController.GetOpenRequestsGeoTask getRequestsTask = new ElasticSearchRequestController.GetOpenRequestsGeoTask(); getRequestsTask.execute(searchParam); ArrayList<Request> newResults; try { newResults = getRequestsTask.get(); if (newResults.isEmpty()) { Toast.makeText(SearchRequestsActivity.this, "No results!", Toast.LENGTH_SHORT).show(); } searchResult = newResults; searchViewAdapter = new ArrayAdapter<Request>(SearchRequestsActivity.this, R.layout.list_item, searchResult); searchView.setAdapter(searchViewAdapter); } catch (Exception e) { Toast.makeText(SearchRequestsActivity.this, "Could not communicate with server", Toast.LENGTH_SHORT).show(); Log.i("ErrorGetUser", "Something went wrong when looking for requests"); //e.printStackTrace(); } } /** * Wrapper for searching through request descriptions with searchParam[0] * @param searchParam */ private void searchByKeyword(String... searchParam) { ElasticSearchRequestController.GetOpenRequestsDescTask getRequestsTask = new ElasticSearchRequestController.GetOpenRequestsDescTask(); getRequestsTask.execute(searchParam); ArrayList<Request> newResults; try { newResults = getRequestsTask.get(); if (newResults.isEmpty()) { Toast.makeText(SearchRequestsActivity.this, "No results!", Toast.LENGTH_SHORT).show(); } searchResult = newResults; searchViewAdapter = new ArrayAdapter<Request>(SearchRequestsActivity.this, R.layout.list_item, searchResult); searchView.setAdapter(searchViewAdapter); } catch (Exception e) { Toast.makeText(SearchRequestsActivity.this, "Could not communicate with server", Toast.LENGTH_SHORT).show(); Log.i("ErrorGetUser", "Something went wrong when looking for requests"); //e.printStackTrace(); } } /** * Wrapper for searching location using geocoding (addresses, landmarks, etc.) * @param searchParam search string */ private void searchByLocation(String... searchParam) { String location = searchEditText.getText().toString(); List<Address> addressList = null; String[] search = new String[2]; if(location == null || location.isEmpty()){ Toast.makeText(SearchRequestsActivity.this, "No results!", Toast.LENGTH_SHORT).show(); } else { Geocoder geocoder = new Geocoder(this); try { addressList = geocoder.getFromLocationName(location , 1); } catch (IOException e) { e.printStackTrace(); } if (addressList.isEmpty()) { return; } //Place Marker Address address = addressList.get(0); LatLng latlng = new LatLng(address.getLatitude() , address.getLongitude()); search[1] = String.valueOf(address.getLongitude()); search[0] = String.valueOf(address.getLatitude()); searchByGeo(search); } } }
There have been lots and lot of numbers thrown around over the past 24 hours, as every presidential candidate this side of Bobby Jindal and Lincoln Chafee declare themselves the winner of the third quarter of fundraising. (Reports of money raised and spent were due to the Federal Election Commission by midnight Thursday.) It might surprise you to know that most of these candidates are not, in fact, winners. The real fundraising winner of the past three months? Donald Trump. This chart, courtesy of my friends at NBC News, shows why: Courtesy of NBC News Of the major candidates, Trump spent the least. Sure, it's true that some of Trump's low spending has to do with the fact that, unlike anyone else on that chart, he is almost entirely self-funding his campaign. Without the need to hold fundraisers (or employ fundraising consultants) you can keep costs down. [ See how much money the candidates have raised - and how quickly they are spending it ] But, even with that caveat, the fact that Trump has spent so little to get SO much is remarkable. He is in first place in every single national and key-early-state poll I have seen and he continues to dominate the conversation about the race. Not only is he dominating the conversation about the race, but he has also started to dictate the terms. Twenty-four hours after Trump floated the idea of skipping the next debate, sponsored by CNBC — because, among other things, it was too long — CNBC announced that the debate would run only two hours. The return on Trump's investment — if you judge returns by winning, which he, of course, does — at the moment is astronomical. He spent roughly a third as much as Jeb Bush did over the past three months even as the former Florida governor watched his polls numbers sink, both nationally and in places such as Iowa and New Hampshire. Republican presidential candidates Jeb Bush and Donald Trump have a heated exchange over campaign funding. (CNN) There is an argument to be made that although Trump might have won the fundraising battle, he will eventually lose the war. This is because unlike, say, Clinton or Bush who are investing heavily in early-state organizations, Trump is not doing anything of the sort. (Trump's biggest expenditure of the past three months — $825,000 — was on gear with his "Make America Great" slogan on it.) Trump might be getting a lot of bang for his buck at the moment, his detractors will argue, but when it comes to putting together an apparatus to turn out voters, he's nowhere. I won't argue that Trump has anywhere close to the field operation of Bush or even Ted Cruz or Marco Rubio. What I might argue is that field and turnout operations might be overrated for a celebrity like Trump who seems to draw huge crowds wherever he goes and has a knack for hoovering up free publicity.
// NewUserContext returns a new *UserContext func NewUserContext() *UserContext { return &UserContext{ AARFlowStatsInterval: 30, AARProbeStatsInterval: 30, VSSFeatureEnabled: false, VSSStatsInterval: 30, MaintenanceModeEnabled: false, RbacEnabled: false, DeniedFlowCollectionEnabled: false, ThreatIntelligenceEnabled: false, AllowEnterpriseAvatarOnNSG: true, ExplicitACLMatchingEnabled: false, } }
// WriteSettings commits any changes to the settings. func (*DatabaseSettings) WriteSettings(c context.Context, values map[string]string, who, why string) error { databaseSettings := &DatabaseSettings{ Server: values["Server"], Username: values["Username"], Password: values["Password"], Database: values["Database"], } return settings.SetIfChanged(c, settingsKey, databaseSettings, who, why) }
<gh_stars>1-10 # Copyright 2018 <NAME> <<EMAIL>> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from bson.objectid import ObjectId import pymongo import databasehelper import threading import time import sys from nodes.node import Node import configmanager import logmanager DB_NAME = configmanager.get_key('DATABASE', 'DatabaseName') DB_COLLECTION_TEMP_DATASTORE = configmanager.get_key('DATABASE', 'DataStoreCollectionTemp') INTERVAL = float(configmanager.get_key('INTERVALS', 'DatastoreInterval')) TAG = 'DataStoreManager' class DataStoreThread(threading.Thread): def __init__(self, node: Node, node_id: str) -> None: super(DataStoreThread, self).__init__() self.interval = INTERVAL self.__node = node self.__id = node_id def __store(self) -> None: db = databasehelper.get_database(DB_NAME) colname = DB_COLLECTION_TEMP_DATASTORE + str(self.__id) if not databasehelper.isExistCollection(db, colname): col = databasehelper.create_collection(db, colname) else: col = databasehelper.get_collection(db, colname) while self.__isrunning: try: readVal = self.__node.read() doc = {'time': time.time(), 'value': readVal} databasehelper.insert(col, doc) time.sleep(self.interval) except: logmanager.error(TAG, sys.exc_info()) def run(self) -> None: self.__isrunning = True self.__store() def kill(self) -> None: self.__isrunning = False def is_running() -> bool: return self.__isrunning data_stores = {} def run_datastorer(node_id: str, node: Node) -> None: datastorethread = DataStoreThread(node, node_id) datastorethread.start() data_stores[node_id] = datastorethread def is_running(node_id: str) -> bool: if node_id in data_stores: return data_stores[node_id].is_running() else: return False def kill_datastorer(node_id: str) -> None: if node_id in data_stores: data_stores[node_id].kill() data_stores[node_id] = None del data_stores[node_id] def killall() -> None: for node_id in data_stores.keys(): kill_datastorer(node_id) def remove_datastore(node_id: str) -> None: colname = DB_COLLECTION_TEMP_DATASTORE + str(node_id) db = databasehelper.get_database(DB_NAME) col = databasehelper.get_collection(db, colname) databasehelper.drop(col) def find_datastore_values(node_id: str, limit: int) -> list: colname = DB_COLLECTION_TEMP_DATASTORE + str(node_id) db = databasehelper.get_database(DB_NAME) col = databasehelper.get_collection(db, colname) values = list(databasehelper.find(col, {}, {'sort': [('time', -1)], 'limit': limit, 'projection': {'_id': False}})) return values
/** * @author auto create * @version */ public class ListScenarioRequest extends RpcAcsRequest<ListScenarioResponse> { private String scenario; private String appId; private String name; private String sign; public ListScenarioRequest() { super("ARMS", "2019-08-08", "ListScenario"); setMethod(MethodType.POST); try { com.aliyuncs.AcsRequest.class.getDeclaredField("productEndpointMap").set(this, Endpoint.endpointMap); com.aliyuncs.AcsRequest.class.getDeclaredField("productEndpointRegional").set(this, Endpoint.endpointRegionalType); } catch (Exception e) {} } public String getScenario() { return this.scenario; } public void setScenario(String scenario) { this.scenario = scenario; if(scenario != null){ putQueryParameter("Scenario", scenario); } } public String getAppId() { return this.appId; } public void setAppId(String appId) { this.appId = appId; if(appId != null){ putQueryParameter("AppId", appId); } } public String getName() { return this.name; } public void setName(String name) { this.name = name; if(name != null){ putQueryParameter("Name", name); } } public String getSign() { return this.sign; } public void setSign(String sign) { this.sign = sign; if(sign != null){ putQueryParameter("Sign", sign); } } @Override public Class<ListScenarioResponse> getResponseClass() { return ListScenarioResponse.class; } }
def collect_segments(target_dir): fregex = re.compile(r"^%s$" % (filename_pattern() % (r'(\d{3})', r'(\d{10})', 'records'))) res = {} for tdir, _, files in os.walk(target_dir): if tdir == target_dir: continue res[tdir] = {} for segfile in sorted(files): match = fregex.match(segfile) if match: (partition, offset) = match.groups() if partition not in res[tdir]: res[tdir][partition] = [] res[tdir][partition].append(offset) return res
A lot of Jews have the physical qualities of prehistoric humans, such as a big face, big nose, short neck, and slanted forehead. Is this just a coincidence? Or are Jews more genetically similar to primitive savages than the rest of us? Is there such a thing as a " Napoleon Complex "? Some people have created the theory that short men are suffering from an "inferiority complex", and that they behave in an abnormally aggressive manner in order to prove something to us. Psychologists refer to this as a Napoleon Complex. But why do only some short men have this complex? And why do some tall men behave in the same manner? Perhaps the men who are abnormally aggressive inherited a lot of primitive genetic qualities, thereby giving them a mind that is more similar to our prehistoric ancestors. Richard Branson and many other successful business executives have aggressive personalities, and often a massive skeleton, and they seem to enjoy fighting for dominance, just like a dumb animal. Is this just a coincidence? Or do these men have a mind that is similar to our prehistoric ancestors? I was a teenager when I first noticed that many men in leadership positions have massive bodies, and that their primary interest in life seemes to be fighting to be the dominant male. They didn't seem to care about society, or pollution, or crime, or the cities they live in. Their only concern in life seemed to be acquiring material items and showing off so that they could feel important. Their behavior reminded me of male animals . As a teenager, the only explanation I could come up with was that their testicles were producing excessive amounts of male hormones. Today I wonder if their aggressive, animal-like behavior is because they inherited a lot of primitive genetic traits, resulting in a mind that is more like a primitive savage than a modern human. Why do airline seats promote bad posture? Years ago I noticed that lots of chairs, such as automobile and airline seats, assume your head is not vertical over your back. In the case of airline seats, they put a pad behind your head, which I found irritating. I assumed that chairs were designed for the majority of people, and that the majority had bad posture , but perhaps some of the people who were used as measurements for chairs had that primitive, Neanderthal quality of a short neck that comes out of their backbone at an angle rather than upright. Below are more people with physical qualities of prehistoric humans
// PartialRange returns the range between the start and end positions where the // specified ends are partial. They can be Complete, Partial5, Partial3, or // PartialBoth. func PartialRange(start, end int, partial Partial) Ranged { if end <= start { panic(fmt.Errorf("Ranged bounds out of range [%d:%d]", start, end)) } return Ranged{start, end, partial} }
def reproject_array( arr: xr.DataArray, spec: RasterSpec, interpolation: Literal["linear", "nearest"] = "nearest", fill_value: Optional[Union[int, float]] = np.nan, ) -> xr.DataArray: from_epsg = array_epsg(arr) if ( from_epsg == spec.epsg and array_bounds(arr) == spec.bounds and arr.shape[:-2] == spec.shape ): return arr as_bool = False if arr.dtype.kind == "b": arr = arr.astype("uint8") as_bool = True minx, miny, maxx, maxy = spec.bounds height, width = spec.shape x = np.linspace(minx, maxx, width, endpoint=False) y = np.linspace(maxy, miny, height, endpoint=False) if from_epsg == spec.epsg: result = arr.interp( x=x, y=y, method=interpolation, kwargs=dict(fill_value=fill_value) ) return result.astype(bool) if as_bool else result reverse_transformer = cached_transformer( spec.epsg, from_epsg, skip_equivalent=True, always_xy=True ) xs, ys = np.meshgrid(x, y, copy=False) src_xs, src_ys = reverse_transformer.transform(xs, ys, errcheck=True) xs_indexer = xr.DataArray(src_xs, dims=["y", "x"], coords=dict(y=y, x=x)) ys_indexer = xr.DataArray(src_ys, dims=["y", "x"], coords=dict(y=y, x=x)) old_xdim = f"x_{from_epsg}" old_ydim = f"y_{from_epsg}" result = arr.rename(x=old_xdim, y=old_ydim).interp( {old_xdim: xs_indexer, old_ydim: ys_indexer}, method=interpolation, kwargs=dict(fill_value=fill_value), ) return result.astype(bool) if as_bool else result
The RPOC long axis is a simple indicator for predicting the need of invasive strategies for secondary postpartum hemorrhage in either post-abortion or post-partum women: a retrospective case control study Background The retained products of conception (RPOC) and related conditions (RPOC-ARC) are the main cause of secondary postpartum hemorrhage (sPPH), but there is no clear consensus for their management. The purpose of this study was to characterize those RPOC-ARC that require invasive treatment and those that could be managed more conservatively. Methods We retrospectively analyzed 96 cases of RPOC-ARC that occurred after miscarriage, abortion, or delivery at a gestational age between 12 and 42 completed weeks, that were managed within our institution from May 2015 to August 2020. We reviewed the associations between the occurrence of sPPH requiring invasive treatment with clinical factors such as the maternal background and the characteristics of the lesions. Results The range of gestational age at delivery in our study was 12–21 weeks in 61 cases, 22–36 in 5, and 37 or later in 30. Among them, nine cases required invasive procedures for treatment. The onset of sPPH was within one month of delivery in all but two cases, with a median of 24 days (range 9–47). We found significant differences between requirements for invasive versus non-invasive strategies according to gestational age at delivery, assisted reproductive technology (ART) pregnancy, amount of blood loss at delivery, and the long axis of the RPOC-ARC lesion (p = 0.028, p = 0.009, p = 0.004, and p = 0.002, respectively). Multivariate analysis showed that only the long axis of the lesion showed a significant difference (p = 0.029). The Receiver Operating Characteristic (ROC) curve for predicting the need for invasive strategies using the long axis of the lesion showed that with a cutoff of 4.4 cm, the sensitivity, specificity, positive predictive value (PPV), and negative predictive value (NPV) was 87.5, 90.0, 43.8, and 98.7%, respectively. Conclusion The long axis of the RPOC-ARC is a simple indicator for predicting which sPPH will require invasive procedures, which use is rare in cases with lesions less than 4.4 cm or those occurring after the first postpartum month. Conservative management should be considered in such cases. Conclusion: The long axis of the RPOC-ARC is a simple indicator for predicting which sPPH will require invasive procedures, which use is rare in cases with lesions less than 4.4 cm or those occurring after the first postpartum month. Conservative management should be considered in such cases. Keywords: Retained products of conception (RPOC), Secondary postpartum hemorrhage (sPPH), Retained placenta, Placental polyp, Arteriovenous malformation (AVM), Conservative management Background Postpartum hemorrhage (PPH) is an important cause of maternal mortality. There are two types of PPH, which are dependent on the time of hemorrhage onset. The first is primary PPH (pPPH) which occurs in the first 24 h following delivery, the other is the secondary PPH (sPPH) that occurs from 24 h or up to 12 weeks postpartum . The presence of a uterine mass, with or without vascularity, after a miscarriage, abortion, or delivery is a key cause of sPPH. The retained products of conception (RPOC) refers to any intrauterine tissue of trophoblastic origin that is present or that develops after delivery. It was reported that 33.3% of sPPH is due to such RPOC . An arteriovenous malformation (AVM) refers to a vascular anomaly where arteries and veins are directly connected through a complex void of the usual intermediate capillary bed. AVM is reported to account for only 3.3% of sPPH , making sPPH due to AVM very rare, compared to RPOC. Recently, AVM has sometimes been described as 'enhanced myometrial vascularity' (EMV), which refers to a focus of abnormally increased vascularity in the post-partum myometrium. Although RPOC, AVM, and other post-partum lesions are pathologically classified as different conditions, in practice it is difficult to accurately differentiate them clinically, and they sometimes coexist. Furthermore, since RPOC with vascularity and AVM often require a similar treatment approach, there might be little clinical significance for a strict differentiation. Therefore, we refer to these vascular abnormalities as 'retained products of conception and related conditions (RPOC-ARC)'. PPH is the leading cause of maternal mortality worldwide, accounting for 1/3 to 1/4 of all maternal deaths , and the incidence of PPH is reported to be increasing . Guidelines for predicting the onset of pPPH and for its management have been provided in various countries; however, there is as yet no established management strategy for sPPH. Various reports have proposed conservative management for these conditions versus aggressive management strategies, such as curettage, operative hysteroscopy, uterine artery embolization (UAE), and hysterectomy, but no clear consensus treatment strategy has been established. We believe that clarifying the predictors of sPPH development will be useful in establishing an evidence-based management plan for RPOC-ARC. In this study, we have investigated the outcome of clinically suspected 'RPOC-ARC' in our institution, in order to determine the risk factors for sPPH requiring invasive strategies. Methods This study included patients who experienced miscarriage, abortion, or delivery at a gestational age between 12 and 42 completed weeks between May 2015 and August 2020. In Japan, pregnancies after 12 weeks of gestation are routinely delivered via vaginal delivery or cesarean section, not by dilatation and evacuation and/ or curettage. Cases of placenta previa were excluded from our study. Patients were managed for sPPH at our institution, the Osaka University Hospital, after they had completed their delivery at our institution or elsewhere. In our institution, invasive strategies for RPOC-ARC are performed only in cases of developing sPPH, which is defined as excessive bleeding that occurs more than 24 h after delivery and up to 12 weeks postpartum. Only one case, where a prophylactic hysterectomy was performed according to the patient's request, was excluded from our study. As routine care at our institution, clinicians perform an initial two-dimensional transvaginal ultrasonography (TvUS) 1-5 days after delivery (at the time of discharge), and an additional TvUS is conducted one month after discharge. In addition, TvUS was routinely evaluated at 1-2 weeks postpartum in cases of delivery at 12-21 weeks of gestation. When patients came to our hospital urgently due to vaginal bleeding, imaging studies, including TvUS, transabdominal ultrasonography, computed tomography (CT), and/or magnetic resonance image (MRI), were performed, as needed. Among patients whose pregnancies were terminated at other hospitals, those whose puerperium were managed in the same way as at our hospital, and those who completed their treatment at our hospital, were included in this study. All cases of suspected abnormalities were followed until the lesions disappeared. Electronic medical records were searched for the terms "PROC", "AVM", "EMV", and their synonyms (e.g., placental retention and placental polyp) in English and Japanese. M.K. (lead author) reviewed the records for maternal age, the institution of delivery, gravidity (including this most recent conception), parity(including this conception), the use of assisted reproductive technology (ART), the gestational age at delivery, the mode of delivery, the amount of blood loss at delivery (including the amount of amniotic fluid), whether manual removal of the placenta (MRP) was performed at delivery, the long axis of the 'RPOC-ARC', and the clinical management and the outcome of the sPPH. As for the long axis of the lesion, the longest axis detected was adopted. Since delivery of the placenta is often challenging in patients with early delivery due to placental immaturity, so whether MRP was or was not performed was examined only in patients with a delivery after 37 weeks of gestation. To compare the maternal backgrounds, including information regarding the 'RPOC-ARC', and the need for invasive strategies due to the development of sPPH, univariate analysis was performed using a logistic regression analysis for continuous variables and the Chi-square test for nominal variables. In addition, multivariate analysis was performed using logistic analysis for all parameters that were statistically significant in the univariate analysis. All analyses were performed using JMP® Pro 15 (SAS Institute Inc., Cary, NC, USA), and p < 0.05 was considered statistically significant. Results A total of 96 patients were included in this study: 74 patients who delivered at our institution and 22 patients who delivered at other institutions. Table 1 shows the patients' characteristics and information about their RPOC-ARC. The median period from delivery to the use of an invasive strategy due to sPPH was 24 days (range 9-47), and the onset was within 1 month in all but two cases. Of the cases in which an invasive strategy was performed, six had hemostasis conducted by UAE, and three had a hysterectomy (two of these three had to have UAE performed for a primary PPH). Table 2 shows the univariate analysis of the predictors of clinical management and the outcome of the RPOC-ARC. A statistically significant difference was observed in the gestational age at delivery (p = 0.028), ART pregnancy (p = 0.009), blood loss at delivery (p = 0.004), and the longest axis of the lesion (p = 0.002). There was no significant difference in whether or not MRP was performed among the cases after 37 weeks (p = 0.723, odds ratio (OR) =1.333, 95% CI: 0.252-7.118). Table 3 shows the result of the multivariate analysis of the parameters that were statistically significant in univariate analysis. Only the longest axis of the lesion showed a significant difference (p = 0.029). Therefore, we prepared a receiver-operating characteristic (ROC) curve for predicting the need for invasive strategies using the long axis of the RPOC-ARC (AUC = 0.928) (Fig. 1). With a cutoff of 4.4 cm for the long axis of the lesion, the sensitivity, specificity, positive predictive value (PPV), and negative predictive value (NPV) were 87.5, 90.0, 43.8, and 98.7%, respectively. Discussion This study resulted in three important findings: 1) sPPH due to 'RPOC-ARC' only rarely occurs after the first month after delivery; 2) The long axis of the lesion is helpful in predicting the development of sPPH requiring invasive treatment; 3) When the long axis cut-off is set to 4.4 cm, the NPV is about 98.7%, which means that lesions smaller than 4.4 cm can be safely managed conservatively. We found that ultrasound is the most used imaging method in the diagnosis of these RPOC-ARC. A systematic review to evaluate postpartum ultrasound for the diagnosis of RPOC concludes that it should be suspected with a thickened endometrial echo complex with a cut-off value of 10 mm or the presence of an intracavitary mass, and that the detection of hypervascularity, with color Doppler ultrasound, in addition to these findings, is very sensitive for RPOC . If these features are not visible, RPOC is rare. CT and MRI can help in the diagnosis of RPOC, but they are not used as the first line of investigation since these methods are far less convenient than ultrasound. Sonohysterography (SHG) was reported to have a greater accuracy than ultrasound in the detection of RPOC, but after SHG, 17.9% of cases showed complications, such as fever and infection . Therefore, SHG is not recommended because of these adverse effects. A previous retrospective study from our institution regarding the conservative management of RPOC-ARC reported that, of the 319 medical abortions at 12-21 weeks of gestation, 75 (24%) had a sonographically-identified EMV, but all these EMV resolved spontaneously, regardless of symptoms, and none with an associated massive bleeding required an invasive treatment . On the other hand, another retrospective study reported that of their 59 RPOCs, including 40 (68%) labors after term, 36 (61%) required interventions due to bleeding-related events, such as significant bleeding (20/36) or continuous small amounts of bleeding (16/36) . These results suggest that conservative management for RPOC-ARC is effective, but that the need for aggressive intervention increases as the number of gestational weeks of delivery advances. Surgical managements, such as dilation-and-curettage or hysteroscopic resection, are known. Operative hysteroscopy for RPOC several months after delivery has been reported to have a high efficacy rate and yields a subsequent high retention of fertility (the efficacy rate: 91%, the subsequent fertility among those who desire another pregnancy: 83%) . Compared to conventional curettage, hysteroscopic resection is reported to have a higher rate of complete resection, fewer complications such as perforation and adhesions, a better rate of subsequent pregnancy, and a shorter time to subsequent pregnancy . Therefore, hysteroscopy should be the choice, rather than curettage. However, these procedures tend to be avoided in the case of RPOC with high blood flow soon after delivery due to the fear of bleeding during the procedure. A previous retrospective study reported that the selective UAE is effective, without incidents or postembolization complications, with a 74.2% (23/31) success rate in the primary UAE, and a combined success rate of 87.1% (27/31) in the primary and second UAE . Another report showed that the technical and clinical success rate of UAE using a gelatin sponge for RPOC with hemorrhage was achieved without major complications in 93 and 100% of such cases, respectively . These suggest that UAE may be useful for RPOC with marked vascularity, AVM, or EMV for preventing the risk of hemorrhage related to the surgical removal of the mass with methods such as curettage or operative hysteroscopy. Three-dimensional (3D) color Doppler ultrasound, which is minimally invasive, may be useful in determining the reduction of blood flow . In addition, prophylactic UAE used before obstetric procedures with a high risk for massive bleeding was shown to be a safe and effective . A series of treatments, such as with UAE to reduce vascularity, 3D color Doppler evaluation, and subsequent hysteroscopic surgery might be reasonable procedures for the RPOC with high blood flow; however, no evidence-based methods for the management of 'RPOC-ARC' have been established to date, so this is an issue for further study and the indications for prophylactic procedures should be carefully evaluated. In order to determine whether and when conservative management is likely to resolve RPOC-ARC, it is necessary to judge the risk of developing sPPH on a case by case basis. It was reported that patients with severe complications had a significantly larger axis of the lesion than those without serious complications . A previous retrospective cohort study concluded that having a maximum length of the RPOC of ≧4 cm (adjusted OR = 8.6, 95% CI:2.4-39.2) and its hypervascularity (adjusted OR = 4.6, 95% CI:1.3-18.8) were independent risk factors for requiring intervention due to bleeding-related events . Another retrospective study reported that serum free-hemoglobin, endometrial stripe thickness, and US vascularity score are significant predictors of the need for surgical intervention in women with clinically suspected RPOC . In this study, we found that lesions smaller than 4.4 cm can be managed conservatively with relative safety. Previous reports support the results of our study, however, the greatest strength of our study is its ability to classify the risk of developing sPPH in a simpler way than previous reports. The final decision regarding RPOC management should be made in discussion with the patient and her family, taking into account her living environments, such as her place of residence and the support and cooperation expected from her family, since emergency consultations will be necessary if sPPH develops during conservative treatment. Most importantly, clinicians must understand the various data reported about RPOC-ARC and must present them clearly to their patients and families. Our study had some limitations. First, this study was a retrospective case-control study. Therefore, the type and timing of examinations were decided by each clinician, and we could not examine the optimal timing and methods for the detection of the RPOC-ARC. Second, histopathological confirmation was not possible, an unavoidable limitation, since most of the cases were treated conservatively or using UAE. Third, we examined various subgroups combined: post-abortion at 12-21 weeks of gestation, post-miscarriage at 12-21 weeks of gestation, and post-partum delivery at 22 weeks of gestation or later. We tried to examine these separately, but could not find significant results in each subgroup (data not shown). We consider that this may be attributable to a lack of power. The present study showed that invasive procedures for RPOC-ARC were required more frequently in post-partum cases after 37 weeks of gestation, 26.7% (8/30). Therefore, we expect to research only such cases. To carry this out, further investigation with a numerous sample is necessary, and this is an issue for future study. Even with these limitations, this study revealed that the risk of sPPH requiring invasive strategies related to RPOC-ARC may be accurately assessed by evaluating solely the long axis of the lesion. More research is needed to obtain a high level of evidence for preventing sPPH due to RPOC-ARC. Conclusions When considering the management of RPOC-ARC, it is important to be familiar with the risk factors of sPPH. The long axis of the lesion is a simple and useful indicator for predicting sPPH requiring invasive treatment, as sPPH due to RPOC-ARC is rare in cases less where the long axis of the lesion is less than 4.4 cm. Conservative management of the lesion can be considered in such cases. For lesions larger than 4.4 cm, it is necessary to present a careful management plan tailored to each patients' background and condition.
#include<bits/stdc++.h> using namespace std; queue<int>st[200007]; int main() { //while(1) { string s; cin>>s; int cnt=0; for(int i=1;i<=s.length();i++) { if(s[i-1]=='0'){ st[++cnt].push(i); } else{ if(!cnt){ cout<<-1<<endl; return 0; } st[cnt--].push(i); } } if(s[s.length()-1]!='0') { cout<<-1<<endl; return 0; } int ans=0; for(int i=1;i<=cnt;i++) { ans+=(int)st[i].size(); } if(ans<s.length()){ cout<<-1<<endl; return 0; } cout<<cnt<<endl; for(int i=1;i<=cnt;i++) { cout<<st[i].size()<<' '; while(!st[i].empty()) { cout<<st[i].front()<<' '; st[i].pop(); } cout<<endl; } } return 0; }
/* * Created by Orchextra * * Copyright (C) 2016 Gigigo Mobile Services SL * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gigigo.orchextra.sdk; import android.annotation.TargetApi; import android.app.Application; import android.content.ComponentName; import android.content.Context; import android.content.pm.PackageManager; import android.os.Build; import android.text.TextUtils; import android.util.Log; import com.gigigo.ggglib.device.AndroidSdkVersion; import com.gigigo.ggglogger.GGGLogImpl; import com.gigigo.ggglogger.LogLevel; import com.gigigo.imagerecognitioninterface.ImageRecognition; import com.gigigo.orchextra.BuildConfig; import com.gigigo.orchextra.CrmUser; import com.gigigo.orchextra.OrchextraLogLevel; import com.gigigo.orchextra.R; import com.gigigo.orchextra.control.controllers.authentication.CrmUserController; import com.gigigo.orchextra.control.controllers.status.SdkAlreadyStartedException; import com.gigigo.orchextra.control.controllers.status.SdkInitializationException; import com.gigigo.orchextra.control.controllers.status.SdkNotInitializedException; import com.gigigo.orchextra.device.bluetooth.beacons.BeaconBackgroundPeriodBetweenScan; import com.gigigo.orchextra.device.bluetooth.beacons.ranging.BeaconRangingScanner; import com.gigigo.orchextra.device.imagerecognition.ImageRecognitionManager; import com.gigigo.orchextra.device.notificationpush.OrchextraGcmListenerService; import com.gigigo.orchextra.di.components.DaggerOrchextraComponent; import com.gigigo.orchextra.di.components.OrchextraComponent; import com.gigigo.orchextra.di.injector.InjectorImpl; import com.gigigo.orchextra.di.modules.OrchextraModule; import com.gigigo.orchextra.domain.abstractions.actions.CustomOrchextraSchemeReceiver; import com.gigigo.orchextra.domain.abstractions.device.OrchextraLogger; import com.gigigo.orchextra.domain.abstractions.device.OrchextraSDKLogLevel; import com.gigigo.orchextra.domain.abstractions.initialization.OrchextraManagerCompletionCallback; import com.gigigo.orchextra.domain.abstractions.initialization.OrchextraStatusAccessor; import com.gigigo.orchextra.domain.abstractions.initialization.StartStatusType; import com.gigigo.orchextra.domain.abstractions.lifecycle.AppRunningMode; import com.gigigo.orchextra.domain.abstractions.lifecycle.AppStatusEventsListener; import com.gigigo.orchextra.domain.model.entities.tags.CustomField; import com.gigigo.orchextra.domain.model.triggers.params.AppRunningModeType; import com.gigigo.orchextra.sdk.application.applifecycle.OrchextraActivityLifecycle; import com.gigigo.orchextra.sdk.model.CrmUserDomainToCrmUserSdkConverter; import com.gigigo.orchextra.sdk.scanner.ScannerManager; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import orchextra.javax.inject.Inject; public class OrchextraManager { public static final String ON_CREATE_METHOD = "onCreate"; private static OrchextraSDKLogLevel orchextraSDKLogLevel = (BuildConfig.DEBUG) ? OrchextraSDKLogLevel.ALL : OrchextraSDKLogLevel.NONE; private static OrchextraManager instance; private InjectorImpl injector; private OrchextraManagerCompletionCallback orchextraCompletionCallback; private String gcmSenderId; private long backgroundPeriodBetweenScan = BeaconBackgroundPeriodBetweenScan.LIGHT.getIntensity(); @Inject OrchextraActivityLifecycle orchextraActivityLifecycle; @Inject OrchextraTasksManager orchextraTasksManager; @Inject CrmUserDomainToCrmUserSdkConverter crmUserDomainToCrmUserSdkConverter; @Inject OrchextraStatusAccessor orchextraStatusAccessor; @Inject CrmUserController crmUserController; @Inject AppRunningMode appRunningMode; @Inject AppStatusEventsListener appStatusEventsListener; @Inject ScannerManager scannerManager; @Inject ImageRecognitionManager imageRecognitionManager; @Inject OrchextraLogger orchextraLogger; @Inject BeaconRangingScanner beaconRangingScanner; /** * Fist call to orchextra, it is compulsory call this for starting to do any sdk Stuff * * @param application * @param orchextraCompletionCallback */ public static void sdkInit(Application application, OrchextraManagerCompletionCallback orchextraCompletionCallback) { OrchextraManager.instance = new OrchextraManager(); OrchextraManager.instance.initOrchextra(application, orchextraCompletionCallback); } public static boolean checkInitMethodCall(Application application, OrchextraManagerCompletionCallback orchextraCompletionCallback) { boolean found = false; StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); for (StackTraceElement stackTraceElement : stackTraceElements) { if (stackTraceElement.getClassName().equals(application.getClass().getCanonicalName()) && stackTraceElement.getMethodName().equals(ON_CREATE_METHOD)) { found = true; break; } } if (!found) { showInitializationError(); if (orchextraCompletionCallback != null) { orchextraCompletionCallback.onError("Orchextra is NOT INITIALIZED CORRECTLY."); } } return found; } public static void sdkStart() { if (AndroidSdkVersion.hasJellyBean18()) { if (OrchextraManager.instance != null) { OrchextraManager.instance.startOrchextra(); } else { showInitializationError(); } } } /** * This method is called from client app in order to start application at one concrete moment, * this is not dependant on context neither app lifecycle, could be called in any moment. * * @param apiKey credentials * @param apiSecret credentials */ public static synchronized void changeCredentials(String apiKey, String apiSecret) { if (AndroidSdkVersion.hasJellyBean18()) { if (OrchextraManager.instance != null) { OrchextraManager.instance.changeOrchextraCredentials(apiKey, apiSecret); } else { showInitializationError(); } } } private static void showInitializationError() { for (int i = 0; i < 100; i++) { Log.e("Orchextra", "### ### ### ###"); Log.e("Orchextra", "PAY ATTENTION: Orchextra is NOT INITIALIZED correctly."); Log.e("Orchextra", "PAY ATTENTION: You HAVE TO initialize Orchextra on the onCreate method in the Application."); Log.e("Orchextra", "PAY ATTENTION: You MUST be call the initializeOrchextra method BEFORE you call the start method."); Log.e("Orchextra", "PAY ATTENTION: Otherwise, Orchextra doesn't work correctly and CAN CRASH YOUR APP."); Log.e("Orchextra", "### ### ### ###"); } } /** * Called for inform sdk about client app user information, useful for tacking segmentation about * users. This call can provokes call to configuration * * @param user information about client app user */ public static synchronized void bindUser(CrmUser user) { OrchextraManager orchextraManager = OrchextraManager.instance; if (AndroidSdkVersion.hasJellyBean18()) { if (orchextraManager != null) { CrmUserDomainToCrmUserSdkConverter crmUserDomainToCrmUserSdkConverter = orchextraManager.crmUserDomainToCrmUserSdkConverter; CrmUserController saveCrmUserController = orchextraManager.crmUserController; com.gigigo.orchextra.domain.model.entities.authentication.CrmUser crmUser = crmUserDomainToCrmUserSdkConverter.convertSdkUserToDomain(user); if (orchextraManager.orchextraStatusAccessor.isStarted()) { saveCrmUserController.saveUserAndReloadConfig(crmUser); } else { saveCrmUserController.saveUserOnly(crmUser); } } else { showInitializationError(); } } } /** * Called for set custom scheme receiver * * @param customSchemeReceiver custom scheme receiver */ public static synchronized void setCustomSchemeReceiver(CustomOrchextraSchemeReceiver customSchemeReceiver) { OrchextraModule orchextraModule = getOrchextraModule(); if (orchextraModule != null) { orchextraModule.setCustomSchemeReceiver(customSchemeReceiver); } } /** * Called when client app want to stop all orchextra proccess */ public static synchronized void sdkStop() { OrchextraManager orchextraManager = OrchextraManager.instance; if (orchextraManager != null && orchextraManager.orchextraStatusAccessor != null && orchextraManager.orchextraStatusAccessor.isStarted()) { orchextraManager.orchextraStatusAccessor.setStoppedStatus(); instance.stopOrchextraTasks(); } } /** * Internal sdk dependency injector * * @return dependency injector */ public static InjectorImpl getInjector() { if (OrchextraManager.instance != null) { return OrchextraManager.instance.injector; } else { showInitializationError(); } return null; } public static void setLogLevel(OrchextraLogLevel logLevel) { if (logLevel != null) { orchextraSDKLogLevel = logLevel.getSDKLogLevel(); } } public static OrchextraSDKLogLevel getLogLevel() { return orchextraSDKLogLevel; } private void stopOrchextraTasks() { orchextraTasksManager.stopAllTasks(); if (appRunningMode.getRunningModeType() == AppRunningModeType.BACKGROUND) { appStatusEventsListener.onBackgroundEnd(); } } /** * Fist call to orchextra, it is compulsory call this for starting to do any sdk Stuff * * @param app * @param completionCallback */ private void initOrchextra(Application app, OrchextraManagerCompletionCallback completionCallback) { orchextraCompletionCallback = completionCallback; // enabledOrchextraNotificationPush(app); if (AndroidSdkVersion.hasJellyBean18()) { initDependencyInjection(app.getApplicationContext(), completionCallback); initLifecyle(app); //initialize(); orchextraStatusAccessor.initialize(); } else { completionCallback.onInit(app.getString(R.string.ox_not_supported_android_sdk)); } } private void initDependencyInjection(Context applicationContext, OrchextraManagerCompletionCallback orchextraCompletionCallback) { OrchextraComponent orchextraComponent = DaggerOrchextraComponent.builder() .orchextraModule(new OrchextraModule(applicationContext, orchextraCompletionCallback)) .build(); injector = new InjectorImpl(orchextraComponent); orchextraComponent.injectOrchextra(OrchextraManager.instance); } private void start() { new Thread(getStartRunnable()).start(); } private Runnable getStartRunnable() { return new Runnable() { @Override public void run() { startSDK(); } }; } private void startSDK() { if (appRunningMode.getRunningModeType() == AppRunningModeType.FOREGROUND) { appStatusEventsListener.onForegroundStart(); } else if (appRunningMode.getRunningModeType() == AppRunningModeType.BACKGROUND) { appStatusEventsListener.onBackgroundStart(); } } private void startOrchextra() { if (orchextraStatusAccessor.hasCredentials()) { Exception exception = null; try { StartStatusType status = orchextraStatusAccessor.getOrchextraStatusWhenStartMode(); if (status == StartStatusType.SDK_READY_FOR_START) { start(); } } catch (SdkAlreadyStartedException alreadyStartedException) { orchextraLogger.log(alreadyStartedException.getMessage(), OrchextraSDKLogLevel.WARN); orchextraCompletionCallback.onInit(alreadyStartedException.getMessage()); } catch (SdkNotInitializedException notInitializedException) { exception = notInitializedException; } catch (SdkInitializationException initializationException) { exception = initializationException; exception.printStackTrace(); } finally { if (exception != null) { orchextraLogger.log(exception.getMessage(), OrchextraSDKLogLevel.ERROR); orchextraCompletionCallback.onError(exception.getMessage()); } } } else { //TODO Error No se han seteado las credenciales en el init } } /** * This method is called from client app in order to start application at one concrete moment, * this is not dependant on context neither app lifecycle, could be called in any moment. * * @param apiKey * @param apiSecret */ private void changeOrchextraCredentials(String apiKey, String apiSecret) { Exception exception = null; try { StartStatusType status = orchextraStatusAccessor.getOrchextraStatusWhenReinitMode(apiKey, apiSecret); if (status == StartStatusType.SDK_READY_FOR_START) { start(); } if (status == StartStatusType.SDK_WAS_ALREADY_STARTED_WITH_DIFERENT_CREDENTIALS) { start(); } } catch (SdkAlreadyStartedException alreadyStartedException) { orchextraLogger.log(alreadyStartedException.getMessage(), OrchextraSDKLogLevel.WARN); orchextraCompletionCallback.onInit(alreadyStartedException.getMessage()); } catch (SdkNotInitializedException notInitializedException) { exception = notInitializedException; } catch (SdkInitializationException initializationException) { exception = initializationException; exception.printStackTrace(); } finally { if (exception != null) { orchextraLogger.log(exception.getMessage(), OrchextraSDKLogLevel.ERROR); orchextraCompletionCallback.onError(exception.getMessage()); } } } @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void initLifecyle(Application app) { app.registerActivityLifecycleCallbacks(orchextraActivityLifecycle); } public static void openScannerView() { OrchextraManager orchextraManager = OrchextraManager.instance; OrchextraModule orchextraModule = getOrchextraModule(); if (orchextraModule != null) { ScannerManager scannerManager = orchextraManager.scannerManager; scannerManager.open(); } } private static OrchextraModule getOrchextraModule() { InjectorImpl injector = getInjector(); if (injector != null) { OrchextraComponent orchextraComponent = injector.getOrchextraComponent(); return orchextraComponent.getOrchextraModule(); } else { return null; } } public static void setImageRecognition(ImageRecognition imageRecognition) { if (OrchextraManager.instance != null && imageRecognition != null) { OrchextraManager.instance.imageRecognitionManager.setImplementation(imageRecognition); } else { GGGLogImpl.log("Orchextra is not initialized when image recognition was setted", LogLevel.ERROR); } } public static void startImageRecognition() { if (OrchextraManager.instance != null) { OrchextraManager.instance.imageRecognitionManager.startImageRecognition(); } else { GGGLogImpl.log("Orchextra is not initialized when image recognition was started", LogLevel.ERROR); } } public static void saveApiKeyAndSecret(String apiKey, String apiSecret) { if (AndroidSdkVersion.hasJellyBean18()) { saveCredentials(apiKey, apiSecret); } } private static void saveCredentials(String apiKey, String apiSecret) { if (!TextUtils.isEmpty(apiKey) && !TextUtils.isEmpty(apiSecret)) { OrchextraManager.instance.orchextraStatusAccessor.saveCredentials(apiKey, apiSecret); } } public static void setGcmSendId(Application application, String gcmSenderId) { if (OrchextraManager.instance != null) { OrchextraManager.instance.gcmSenderId = gcmSenderId; enabledOrchextraNotificationPush(application, gcmSenderId); } else { GGGLogImpl.log("Orchextra is not initialized when GCM Sender Id was setted", LogLevel.ERROR); } } /** * this method enabled or disabled the service with the intent-filter for RECEIVER the push, this is necesary * //because you must to declare always in the manifest file, you can not do it with code. Beacause that we * //keep the service OrchextraGcmListenerService and the intent filter in manifest, but weenabled or disabled * the service if the sender ID in Orchextra are not setted * * @param application * @param gcmSenderId */ private static void enabledOrchextraNotificationPush(Application application, String gcmSenderId) { int componentEnabledState; if (gcmSenderId == null) { componentEnabledState = PackageManager.COMPONENT_ENABLED_STATE_DISABLED; } else { componentEnabledState = PackageManager.COMPONENT_ENABLED_STATE_ENABLED; } ComponentName component = new ComponentName(application, OrchextraGcmListenerService.class); application.getPackageManager().setComponentEnabledSetting(component, componentEnabledState, PackageManager.DONT_KILL_APP); } public static String getGcmSenderId() { return OrchextraManager.instance.gcmSenderId; } public static void updateBackgroundPeriodBetweenScan(long intensity) { if (OrchextraManager.instance != null) { OrchextraManager.instance.backgroundPeriodBetweenScan = intensity; OrchextraManager.instance.beaconRangingScanner.updateBackgroundScanPeriodBetweenScans(intensity); } else { GGGLogImpl.log("Orchextra is not initialized when background period between scan was updated", LogLevel.ERROR); } } public static long getBackgroundPeriodBetweenScan() { return OrchextraManager.instance.backgroundPeriodBetweenScan; } public static List<String> getDeviceTags() { if (OrchextraManager.instance != null) { return OrchextraManager.instance.crmUserController.getDeviceTags(); } return null; } public static void setDeviceTags(List<String> deviceTagList) { if (OrchextraManager.instance != null) { OrchextraManager.instance.crmUserController.setDeviceTags(deviceTagList); } } public static List<String> getDeviceBusinessUnits() { if (OrchextraManager.instance != null) { return OrchextraManager.instance.crmUserController.getDeviceBusinessUnits(); } return null; } public static void setDeviceBusinessUnits(List<String> deviceBusinessUnits) { if (OrchextraManager.instance != null) { OrchextraManager.instance.crmUserController.setDeviceBusinessUnits(deviceBusinessUnits); } } public static List<String> getUserTags() { if (OrchextraManager.instance != null) { return OrchextraManager.instance.crmUserController.getUserTags(); } return null; } public static void setUserTags(List<String> userTagList) { if (OrchextraManager.instance != null) { OrchextraManager.instance.crmUserController.setUserTags(userTagList); } } public static List<String> getUserBusinessUnits() { if (OrchextraManager.instance != null) { return OrchextraManager.instance.crmUserController.getUserBusinessUnits(); } return null; } public static void setUserBusinessUnits(List<String> userBusinessUnits) { if (OrchextraManager.instance != null) { OrchextraManager.instance.crmUserController.setUserBusinessUnits(userBusinessUnits); } } public static Map<String, String> getUserCustomFields() { if (OrchextraManager.instance != null) { List<CustomField> userCustomFieldList = OrchextraManager.instance.crmUserController.getUserCustomFields(); Map<String, String> customFieldMap = new HashMap<>(); for (CustomField customField : userCustomFieldList) { customFieldMap.put(customField.getKey(), customField.getValue()); } return customFieldMap; } return null; } public static void setUserCustomFields(Map<String, String> userCustomFields) { if (OrchextraManager.instance != null) { List<CustomField> customFieldList = new ArrayList<>(); for (String key : userCustomFields.keySet()) { CustomField customField = new CustomField(); customField.setKey(key); customField.setValue(userCustomFields.get(key)); customFieldList.add(customField); } OrchextraManager.instance.crmUserController.setUserCustomFields(customFieldList); } } }
/** * Creates a wew Sort instance to be used in a NoSQL query. * * @param name - the field name be used in a sort process * @param type - the way to be sorted * @return a sort instance * @throws NullPointerException when there are null parameters */ public static Sort of(String name, SortType type) { requireNonNull(name, "name is required"); requireNonNull(type, "type is required"); return new Sort(name, type); }
def is_comment_end(line): stripped = line.strip() return not stripped.startswith('/*') and stripped.endswith('*/')
Left Cardiac Sympathetic Denervation in Long QT Syndrome: Analysis of Therapeutic Nonresponders Background—Long QT syndrome (LQTS) is a potentially lethal but highly treatable cardiac channelopathy. Treatment options include pharmacotherapy, device therapy, and left cardiac sympathetic denervation (LCSD). Here, we sought to determine the characteristics of LQTS patients who have had ≥1 LQTS-related breakthrough cardiac event (BCE) after LCSD. Methods and Results—We performed retrospective chart review for 52 consecutive patients (24 males; mean age at diagnosis, 10.0±10 years; mean QTc, 528±74 ms) with LQTS who underwent LCSD between 2005 and 2010 (mean age at LCSD, 14.1±10 years) and have been followed up for 3.6±1.3 years. A BCE was defined as either (1) an appropriate ventricular fibrillation-terminating implantable cardioverter defibrillator shock or (2) arrhythmogenic syncope, seizures, or aborted cardiac arrest after LCSD. Thirty-three patients (61%) had LCSD as primary prevention because of either high-risk assessment or &bgr;-blocker intolerance. So far, 12 of 52 (23%) patients (7 males) have experienced ≥1 BCE post LCSD. The clinical phenotype of patients with BCEs was significantly more severe than patients without a BCE. No BCEs were seen in patients undergoing LCSD for &bgr;-blocker intolerance (0/12 versus 17/40; P<0.001). Conclusions—Although a marked reduction in number of cardiac events is usually seen after LCSD, ≈50% of high-risk LQTS patients have experienced ≥1 post-LCSD breakthrough. Therefore, LCSD must not be viewed as curative or as an alternative in implantable cardioverter defibrillator for high-risk patients. Prophylactic LCSD may provide another option to counter a suboptimal quality of life resulting from medication-related side effects.
President Trump has offered the job of national security adviser to retired Vice Adm. Robert Harward following Michael Flynn's resignation earlier this week, according to reports. It wasn't immediately clear if Harward, who previously served as a Navy SEAL and as deputy commander of U.S. Central Command, had accepted the position, according to Reuters. Trump was "a bit surprised when Harward responded by saying he needed a couple of days to think it over," Foreign Policy's Thomas Ricks reported. ADVERTISEMENT Flynn resigned from his post Monday after reports that he misled White House officials about discussing sanctions on Russia with the country's U.S. ambassador before Trump took office. Trump had named retired Army Lt. Gen. Keith Kellogg, who previously served as Flynn's chief of staff on the National Security Council, as his acting national security adviser.
import cv2 from PyQt5.QtCore import QTimer class Camera(object): def __init__(self): self.device = 0 self.cap = cv2.VideoCapture() self.timer = QTimer() def stop(self): self.timer.stop() self.cap.release() return True def pause(self): self.timer.stop() def begin(self): self.timer.start(20) def start(self, device): if self.cap.isOpened(): self.cap.release() self.timer.start(20) self.cap.open(device) self.device = device return True def restart(self): self.start(self.device) @property def is_pause(self): return self.cap.isOpened() and not self.timer.isActive() @property def is_open(self): return self.cap.isOpened() @property def frame(self): if self.is_open and not self.is_pause: return self.cap.read()[1] @property def frame_count(self): if self.is_open: return self.cap.get(cv2.CAP_PROP_FRAME_COUNT) @property def frame_pos(self): if self.is_open: return self.cap.get(cv2.CAP_PROP_POS_FRAMES) @frame_pos.setter def frame_pos(self, value): if self.is_open: self.cap.set(cv2.CAP_PROP_POS_FRAMES, value) @property def resolution(self): if self.is_open: return self.cap.get(cv2.CAP_PROP_FRAME_WIDTH), self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
/** * Generate an RRULE string for an array of GregorianCalendars, if possible. For now, we are * only looking for rules based on the same date in a month or a specific instance of a day of * the week in a month (e.g. 2nd Tuesday or last Friday). Indeed, these are the only kinds of * rules used in the current tzinfo database. * @param calendars an array of GregorianCalendar, set to a series of transition times in * consecutive years starting with the current year * @return an RRULE or null if none could be inferred from the calendars */ static RRule inferRRuleFromCalendars(GregorianCalendar[] calendars) { GregorianCalendar calendar = calendars[0]; if (calendar == null) return null; int month = calendar.get(Calendar.MONTH); int date = calendar.get(Calendar.DAY_OF_MONTH); int dayOfWeek = calendar.get(Calendar.DAY_OF_WEEK); int week = calendar.get(Calendar.DAY_OF_WEEK_IN_MONTH); int maxWeek = calendar.getActualMaximum(Calendar.DAY_OF_WEEK_IN_MONTH); boolean dateRule = false; boolean dayOfWeekRule = false; for (int i = 1; i < calendars.length; i++) { GregorianCalendar cal = calendars[i]; if (cal == null) return null; if (cal.get(Calendar.MONTH) != month) { return null; } else if (dayOfWeek == cal.get(Calendar.DAY_OF_WEEK)) { if (dateRule) { return null; } dayOfWeekRule = true; int thisWeek = cal.get(Calendar.DAY_OF_WEEK_IN_MONTH); if (week != thisWeek) { if (week < 0 || week == maxWeek) { int thisMaxWeek = cal.getActualMaximum(Calendar.DAY_OF_WEEK_IN_MONTH); if (thisWeek == thisMaxWeek) { week = -1; continue; } } return null; } } else if (date == cal.get(Calendar.DAY_OF_MONTH)) { if (dayOfWeekRule) { return null; } dateRule = true; } else { return null; } } if (dateRule) { return new RRule(month + 1, date); } return new RRule(month + 1, dayOfWeek, week); }
def fit_response(partab, toolparams): m = toolparams['m'] if 'weights' in partab.columns: sigmas = 1.0 / partab.weights.values else: sigmas = np.ones_like(partab.freq.values) fit_delta = lambda x, d, k: np.arccos((k - m * x*x) / np.sqrt(x*x*d*d + (k - m*x*x)**2)) popt_delta, pcov = curve_fit(fit_delta, partab.freq.values * 2 * np.pi, partab.delta.values / 180 * np.pi, sigma=sigmas, absolute_sigma=False, ) d = abs(popt_delta[0]) k = popt_delta[1] fit_ampl = lambda x, F0: F0 / np.sqrt((k - m*x*x)**2 + (x*x*d*d)) popt_ampl, pcov = curve_fit(fit_ampl, partab.freq.values * 2 * np.pi, partab.ampl_m.values / partab.amp.values, sigma=sigmas, absolute_sigma=False, ) F0 = popt_ampl[0] six.print_('Inertial mass m:', m, 'kg') six.print_('Fit spring constant k (from phase angle):', k, 'N/m') six.print_('Fit drag coefficient d (from phase angle):', d, 'N s/m') six.print_('Fit force coefficient F0', F0, 'N/A') params = toolparams.copy() params.update(dict(k=k, d=d, F0=F0)) return params
class Circle { int r=12; static float pi=3.14f; public static void main(String[] args) { area(); circum(); } static void area() { Circle c=new Circle(); System.out.println("Area:"+(pi)*(c.r)*(c.r)); } static void circum() { Circle c=new Circle(); System.out.println("Circumstance:"+2*pi*c.r); } }
<filename>hi_snex/src/asmjit/core/string.h // [AsmJit] // Machine Code Generation for C++. // // [License] // Zlib - See LICENSE.md file in the package. #ifndef _ASMJIT_CORE_STRING_H #define _ASMJIT_CORE_STRING_H #include "../core/support.h" #include "../core/zone.h" ASMJIT_BEGIN_NAMESPACE //! \addtogroup asmjit_support //! \{ // ============================================================================ // [asmjit::String] // ============================================================================ //! A simple non-reference counted string that uses small string optimization (SSO). //! //! This string has 3 allocation possibilities: //! //! 1. Small - embedded buffer is used for up to `kSSOCapacity` characters. //! This should handle most small strings and thus avoid dynamic //! memory allocation for most use-cases. //! //! 2. Large - string that doesn't fit into an embedded buffer (or string //! that was truncated from a larger buffer) and is owned by //! AsmJit. When you destroy the string AsmJit would automatically //! release the large buffer. //! //! 3. External - like Large (2), however, the large buffer is not owned by //! AsmJit and won't be released when the string is destroyed //! or reallocated. This is mostly useful for working with //! larger temporary strings allocated on stack or with immutable //! strings. class String { public: ASMJIT_NONCOPYABLE(String) //! String operation. enum Op : uint32_t { kOpAssign = 0, kOpAppend = 1 }; //! String format flags. enum FormatFlags : uint32_t { kFormatShowSign = 0x00000001u, kFormatShowSpace = 0x00000002u, kFormatAlternate = 0x00000004u, kFormatSigned = 0x80000000u }; //! \cond INTERNAL enum : uint32_t { kLayoutSize = 32, kSSOCapacity = kLayoutSize - 2 }; //! String type. enum Type : uint8_t { kTypeLarge = 0x1Fu, //!< Large string (owned by String). kTypeExternal = 0x20u //!< External string (zone allocated or not owned by String). }; union Raw { uint8_t u8[kLayoutSize]; uint64_t u64[kLayoutSize / sizeof(uint64_t)]; uintptr_t uptr[kLayoutSize / sizeof(uintptr_t)]; }; struct Small { uint8_t type; char data[kSSOCapacity + 1u]; }; struct Large { uint8_t type; uint8_t reserved[sizeof(uintptr_t) - 1]; size_t size; size_t capacity; char* data; }; union { uint8_t _type; Raw _raw; Small _small; Large _large; }; //! \endcond //! \name Construction & Destruction //! \{ inline String() noexcept : _small {} {} inline String(String&& other) noexcept { for (size_t i = 0; i < ASMJIT_ARRAY_SIZE(_raw.uptr); i++) _raw.uptr[i] = other._raw.uptr[i]; other._resetInternal(); } inline ~String() noexcept { reset(); } //! Reset the string into a construction state. ASMJIT_API Error reset() noexcept; //! \} //! \name Overloaded Operators //! \{ inline bool operator==(const char* other) const noexcept { return eq(other); } inline bool operator!=(const char* other) const noexcept { return !eq(other); } inline bool operator==(const String& other) const noexcept { return eq(other); } inline bool operator!=(const String& other) const noexcept { return !eq(other); } //! \} //! \name Accessors //! \{ inline bool isLarge() const noexcept { return _type >= kTypeLarge; } inline bool isExternal() const noexcept { return _type == kTypeExternal; } inline bool empty() const noexcept { return size() == 0; } inline size_t size() const noexcept { return isLarge() ? size_t(_large.size) : size_t(_type); } inline size_t capacity() const noexcept { return isLarge() ? _large.capacity : size_t(kSSOCapacity); } inline char* data() noexcept { return isLarge() ? _large.data : _small.data; } inline const char* data() const noexcept { return isLarge() ? _large.data : _small.data; } inline char* end() noexcept { return data() + size(); } inline const char* end() const noexcept { return data() + size(); } //! \} //! \name String Operations //! \{ //! Clear the content of the string. ASMJIT_API Error clear() noexcept; ASMJIT_API char* prepare(uint32_t op, size_t size) noexcept; ASMJIT_API Error _opString(uint32_t op, const char* str, size_t size = SIZE_MAX) noexcept; ASMJIT_API Error _opFormat(uint32_t op, const char* fmt, ...) noexcept; ASMJIT_API Error _opVFormat(uint32_t op, const char* fmt, va_list ap) noexcept; ASMJIT_API Error _opChar(uint32_t op, char c) noexcept; ASMJIT_API Error _opChars(uint32_t op, char c, size_t n) noexcept; ASMJIT_API Error _opNumber(uint32_t op, uint64_t i, uint32_t base = 0, size_t width = 0, uint32_t flags = 0) noexcept; ASMJIT_API Error _opHex(uint32_t op, const void* data, size_t size, char separator = '\0') noexcept; //! Replace the string content to a string specified by `data` and `size`. If //! `size` is `SIZE_MAX` then it's considered null-terminated and its length //! will be obtained through `strlen()`. ASMJIT_API Error assignString(const char* data, size_t size = SIZE_MAX) noexcept; //! Replace the current content by a formatted string `fmt`. template<typename... Args> inline Error assignFormat(const char* fmt, Args&&... args) noexcept { return _opFormat(kOpAssign, fmt, std::forward<Args>(args)...); } //! Replace the current content by a formatted string `fmt` (va_list version). inline Error assignVFormat(const char* fmt, va_list ap) noexcept { return _opVFormat(kOpAssign, fmt, ap); } //! Replace the current content by a single `c` character. inline Error assignChar(char c) noexcept { return _opChar(kOpAssign, c); } //! Replace the current content by `c` character `n` times. inline Error assignChars(char c, size_t n) noexcept { return _opChars(kOpAssign, c, n); } //! Replace the current content by a formatted integer `i` (signed). inline Error assignInt(int64_t i, uint32_t base = 0, size_t width = 0, uint32_t flags = 0) noexcept { return _opNumber(kOpAssign, uint64_t(i), base, width, flags | kFormatSigned); } //! Replace the current content by a formatted integer `i` (unsigned). inline Error assignUInt(uint64_t i, uint32_t base = 0, size_t width = 0, uint32_t flags = 0) noexcept { return _opNumber(kOpAssign, i, base, width, flags); } //! Replace the current content by the given `data` converted to a HEX string. inline Error assignHex(const void* data, size_t size, char separator = '\0') noexcept { return _opHex(kOpAssign, data, size, separator); } //! Append string `str` of size `size` (or possibly null terminated). inline Error appendString(const char* str, size_t size = SIZE_MAX) noexcept { return _opString(kOpAppend, str, size); } template<typename... Args> inline Error appendFormat(const char* fmt, Args&&... args) noexcept { return _opFormat(kOpAppend, fmt, std::forward<Args>(args)...); } //! Append a formatted string `fmt` (va_list version). inline Error appendVFormat(const char* fmt, va_list ap) noexcept { return _opVFormat(kOpAppend, fmt, ap); } //! Append a single `c` character. inline Error appendChar(char c) noexcept { return _opChar(kOpAppend, c); } //! Append `c` character `n` times. inline Error appendChars(char c, size_t n) noexcept { return _opChars(kOpAppend, c, n); } ASMJIT_API Error padEnd(size_t n, char c = ' ') noexcept; //! Append `i`. inline Error appendInt(int64_t i, uint32_t base = 0, size_t width = 0, uint32_t flags = 0) noexcept { return _opNumber(kOpAppend, uint64_t(i), base, width, flags | kFormatSigned); } //! Append `i`. inline Error appendUInt(uint64_t i, uint32_t base = 0, size_t width = 0, uint32_t flags = 0) noexcept { return _opNumber(kOpAppend, i, base, width, flags); } //! Append the given `data` converted to a HEX string. inline Error appendHex(const void* data, size_t size, char separator = '\0') noexcept { return _opHex(kOpAppend, data, size, separator); } //! Truncate the string length into `newSize`. ASMJIT_API Error truncate(size_t newSize) noexcept; ASMJIT_API bool eq(const char* other, size_t size = SIZE_MAX) const noexcept; inline bool eq(const String& other) const noexcept { return eq(other.data(), other.size()); } //! \} //! \name Internal Functions //! \{ //! Resets string to embedded and makes it empty (zero length, zero first char) //! //! \note This is always called internally after an external buffer was released //! as it zeroes all bytes used by String's embedded storage. inline void _resetInternal() noexcept { for (size_t i = 0; i < ASMJIT_ARRAY_SIZE(_raw.uptr); i++) _raw.uptr[i] = 0; } inline void _setSize(size_t newSize) noexcept { if (isLarge()) _large.size = newSize; else _small.type = uint8_t(newSize); } //! \} }; // ============================================================================ // [asmjit::StringTmp] // ============================================================================ //! Temporary string builder, has statically allocated `N` bytes. template<size_t N> class StringTmp : public String { public: ASMJIT_NONCOPYABLE(StringTmp<N>) //! Embedded data. char _embeddedData[Support::alignUp(N + 1, sizeof(size_t))]; //! \name Construction & Destruction //! \{ inline StringTmp() noexcept { _resetToTemporary(); } inline void _resetToTemporary() noexcept { _large.type = kTypeExternal; _large.capacity = ASMJIT_ARRAY_SIZE(_embeddedData) - 1; _large.data = _embeddedData; _embeddedData[0] = '\0'; } //! \} }; // ============================================================================ // [asmjit::FixedString] // ============================================================================ //! A fixed string - only useful for strings that would never exceed `N - 1` //! characters; always null-terminated. template<size_t N> union FixedString { enum : uint32_t { kNumU32 = uint32_t((N + sizeof(uint32_t) - 1) / sizeof(uint32_t)) }; char str[kNumU32 * sizeof(uint32_t)]; uint32_t u32[kNumU32]; //! \name Utilities //! \{ inline bool eq(const char* other) const noexcept { return strcmp(str, other) == 0; } //! \} }; //! \} ASMJIT_END_NAMESPACE #endif // _ASMJIT_CORE_STRING_H
A simulation study on the choice of regularization parameter in ℓ2-norm ultrasound image restoration Ultrasound image deconvolution has been widely investigated in the literature. Among the existing approaches, the most common are based on ℓ2-norm regularization (or Tikhonov optimization) or the well-known Wiener filtering. However, the success of the Wiener filter in practical situations largely depends on the choice of the regularization hyperparameter. An appropriate choice is necessary to guarantee the balance between data fidelity and smoothness of the deconvolution result. In this paper, we revisit different approaches for automatically choosing this regularization parameter and compare them in the context of ultrasound image deconvolution via Wiener filtering. Two synthetic ultrasound images are used in order to compare the performances of the addressed methods.
/// The associated weight value specified; defaults to 1. pub fn weight(&self) -> u32 { use ReplacementValue::*; match self { String(..) => 1, Weighted(.., w) => *w, } }
def _handleSIGTERM( signalType: int, frame: FrameType, sim_session: SimulatorSession ) -> None: log.info("Handling SIGTERM.") sim_session.unregister() log.info("SIGTERM Handled, exiting.") sys.exit()
fn create () {} fn get () {} fn update () {} fn delete () {} fn connect () {} fn disconnect () {} fn send () {} fn recieve () {}
/* Parse a zFCP host device entry. */ static void parse_zfcp_host(struct fw_file *f, struct fw_dehdr *de, config_t config, struct util_list *objects) { struct fw_zfcp_host *zfcp_host = (struct fw_zfcp_host *) de; struct ccw_devid devid; struct device *dev; char *id; if (!check_de_size(f, de, sizeof(struct fw_zfcp_host))) return; if (!check_iodevid(f, &zfcp_host->id_flags, &zfcp_host->id)) return; io_to_ccw(&devid, &zfcp_host->id); id = ccw_devid_to_str(&devid); dev = add_device(f, &zfcp_host_subtype, id, config, objects); free(id); if (dev && zfcp_host->hdr.len > sizeof(struct fw_zfcp_host)) parse_settings(f, zfcp_host->settings, dev, config); }
import torch from torch import nn from torch.nn import functional as F from qytPytorch.modules.conv_layer import GlobalMaxPool1d class TextCNN(nn.Module): """ TextCNN文本分类. @params: vocab_size - 词典大小. labels_size - 标签个数. embed_dim - embed维度大小. kernel_sizes - 卷积核大小. num_channels - 卷积核个数. """ def __init__(self, vocab_size, labels_size, embed_dim=50, kernel_sizes=(1, 3, 5), num_channels=(100, 100, 100)): super().__init__() self.embedding = nn.Embedding(vocab_size, embed_dim) self.dropout = nn.Dropout(0.5) self.decoder = nn.Linear(sum(num_channels), labels_size) # 时序最大池化层没有权重,所以可以共用一个实例 self.pool = GlobalMaxPool1d() self.convs = nn.ModuleList() # 创建多个一维卷积层 for c, k in zip(num_channels, kernel_sizes): self.convs.append(nn.Conv1d(in_channels=embed_dim, out_channels=c, kernel_size=k)) def forward(self, inputs): # 将两个形状是(批量大小, 词数, 词向量维度)的嵌入层的输出按词向量连结 embeddings = self.embedding(inputs) # (batch, seq_len, embed_size) # 根据Conv1D要求的输入格式,将词向量维,即一维卷积层的通道维(即词向量那一维),变换到前一维 embeddings = embeddings.permute(0, 2, 1) # 对于每个一维卷积层,在时序最大池化后会得到一个形状为(批量大小, 通道大小, 1)的 # Tensor。使用flatten函数去掉最后一维,然后在通道维上连结 encoding = torch.cat([self.pool(F.relu(conv(embeddings))).squeeze(-1) for conv in self.convs], dim=1) # 应用丢弃法后使用全连接层得到输出 outputs = self.decoder(self.dropout(encoding)) return outputs
/** * The method for check work threads with queue. * * @throws Exception - check any errors; */ @Test public void whenCheckWorkTwoThreads() throws Exception { int cycle = 10; ProducerConsumer pc = new ProducerConsumer(); pc.getProducer().start(); pc.getConsumer().start(); while (cycle-- != 0) { Thread.sleep(10); if (cycle == 0) { pc.setFinish(true); } } pc.getProducer().join(); pc.getConsumer().join(); }
AUG 1 2014 BY JAY COLE On Thursday, Tesla reported 2nd quarter earnings that mostly beat analyst expectations, earning 11 cents a share (ex-items) on revenues of $858 million dollars while delivering 7,579 Model S sedans (full details on the report can be found here). But historically, the real highlights happen on the conference call after the earnings are released – when CEO Elon Musk typically gives his candid view of how things are unfolding inside the company as well as his outlook to the future. So without further ado, here is our highlights from that call! Guidance on 100,000 vehicle run by end of 2015 The Tesla CEO was asked about the company’s 100,000 run rate projection by the end of 2015 and how much was coming from China, splits etc. “We’re expecting that to be roughly split between the X and S. So we’re talking roughly 1,000 units a week of each.” Mr. Musk also notes that looking at the wider demographics, SUVs are slightly outselling sedans in the wider market, and he expects that may also be the case for the Model X over the S at some point. Worldwide Stores/Service Centers by end of 2015 The CEO notes that he is just “speaking off the cuff”, but expects “100 in China alone” and more than 300 worldwide On Hydrogen/Fuel Cell Tech Often Elon Musk likes to give his opinion on hydrogen, so when the Morgan Stanley analyst asked him the question of why the industry seems to be pushing hydrogen so much, and if it was a “bs move” by CARB to rewrite EVs rules – or did they actually believe in the tech? The CEO let it fly again: “As you know, I am not the biggest proponent of hydrogen…but really if you take a theoretically optimal fuel cell car and compare that to a car in production – a battery electric car; on key metrics of mass, volume complexity, cost and refilling infrastructure, it’s just…it is a loss. So it’s the best case in our opinion, the best case fuel cell car (and obviously the fuel cell cars are far from best case), cannot beat the current case electric car, so well why even try it? That just makes no sense. Success is not one of the possible outcomes.” Follow up question from the analyst: “Why are they (CARB) doing this? That is why I asked if it’s BS. Is this just kind of a diversionary tactic or do you think they are just not up on what is up?” “We are quite confused about this.”- Musk “It does not make a lot of sense. I mean we didn’t even touch on the infrastructure challenges that hydrogen brings, but building out that infrastructure is substantially more expensive than building out any electric vehicle infrastructure. And there’s almost none of it today.” – JB Straubel (Tesla CTO) “…also another thing – hydrogen is an energy carrier not an energy source. So you have to create the hydrogen which is really inefficient because you would either have to crack a hydrocarbon or electrolyzed water” – Musk “Yeah – which is super inefficient, and then hydrogen has very low density. So if you are going to pick it as a chemical energy storage mechanism, the hydrogen is a terrible choice. At least you know, methane, CH4 lock up the hydrogen with one carbon atom or something. Quality and Model S Drivetrain Given the recent scrutiny over some drivetrain failure/replacements in the Model S, you just knew the question ‘What’s up with that?’ would be posed to the Tesla CEO “We definitely had some quality issues in the beginning for the early serial number of cars, because we’re just basically figuring out how to make the Model S. And I think we’ve addressed almost all of those early (? – garbled) production cars – I mean not all – but the vast majority have been addressed in cars that are being produced today. And we’re also getting better at diagnosing what’s wrong, because in some cases we, particularly with respect to the drive unit, we think that something is wrong with the drive unit but it’s actually something wrong with another part of the car. And then we’d replace the drive unit and that wouldn’t solve the problem because the drive unit was not the problem.” “…there’s a bunch of things like that which are just mis-diagnosis of the problems that we’ve obviously addressed.” “There are a few items that will need – a fair number of drivetrains will need to be serviced. It’s actually related to one (problem) to the differential, and we need to shim the differential. It doesn’t require drive unit replacement, it just requires a technician to insert a shim. We are going to have to do that on a fair number of cars – but that is like a 50 cent shim. So it’s really; I wouldn’t assume that there’s going to be some vast number of drivetrains that will need to replaced, but there is several service bulletins that we will be instituting, many of which we’ve already have to address the issue.” After speaking about weekly quality control meetings, Mr. Musk was asked if he was happy with the quality control systems in place and his team. To which the CEO said there had been some trouble, but that they were pretty much there now. “I think at this point we’ve got a excellent quality control team…and we weren’t there in the beginning but I’m confident that we’re there now. I mean our aspiration is on the order magnitude better quality than any other car. And we will keep at it unrelentingly until we get there.” Gigafactory: Nevada, Ground-breaking Even before Tesla’s disclosure of breaking ground in Nevada, the public had been made aware of the site’s existence and subsequent work stoppage. The question was posed to Mr Musk about the ‘why’ and if the company has a “drop-dead” date for being up and running. “We have essentially completed the pad, the construction pad for the Gigafactory in Nevada. So in terms of creating a flat pad and getting the rocky foundation that is substantially complete. There’s still a little bit of work ongoing. We are going to be doing something similar in one or two other states – which is something I previously said we’d do because I think it makes sense to have multiple things going in parallel. Before we actually go to the next stage of pouring a lot of concrete though we want to make sure we have things sorted out at the state level – that the incentives are there that makes sense,…But I do want to emphasize that Tesla is not going to go for a deal that is unfair to the state or unfair to Tesla. We want to make sure it’s compelling for all parties. I think on the Nevada side, at this point the ball is on the court of the governor and the state legislature.” Model X Demand Knowing he is unlikely to get a straight numerical answer on Model X demand, the analyst from JP Morgan asks the Tesla CEO to put Model X backlogged orders in context of what the company was seeing with the Model S in 2012. And what the demos were as to location and if many already owned a Model S. “I guess what is important to appreciate for the X, that, let’s just put the orders in context. There are no cars available for a test drive. There is no information about the cars in our stores because we are only selling the S. In fact, if somebody comes in who wants to buy the X, we try to convince them to buy the S, so we anti-sell it. We don’t really provide all that much information or details about the car or provide a definitive date on when you can get it. Despite all that, there is huge demand from around the world for the X. Now I think that actually people are right, even though they don’t … really have enough information to know they’re right, but they are.” Will Gigafactory Batteries Have A Different Chemistry? Improvements? “There are some improvements to the chemistry as well as improvements to the geometry of the cell – so we would expect to see an energy density improvement, and of course a significant cost improvement.” – Musk “The cathode and anode materials themselves are next generation, so I mean we’re seeing improvements in the maybe 10% to 15% range on the chemistry itself (energy density) – and then we’re also customizing the cell shape and size to further improve the cost efficiency of the cell and the packaging efficiency.” – JB Straubel (CTO) “We’ve done a lot of modeling trying to figure out what’s the optimal cell size. And it’s really not much, it is not a lot different from where we are right now, but we’re sort of in the roughly 10% more diameter; maybe 10% more height. But then since the cubic function effectively ends up being, just from a geometry standpoint, probably a third more energy for the cell,well maybe 30%ish – then the actual energy density per unit mass increases” “Fundamentally the chemistry of what is inside is what really defines the cost position though. It’ is often debated what shape and size, but at this point we are developing basically what we feel is the optimum shape and size for the best cost efficiency for an automotive cell.” – JB Straubel (CTO) Costing on the battery cells is further discussed, and Mr. Musk notes that he would be “disappointed if it took us 10 years to get to $100 a kilowatt-hour pack.” Big hat tip to InsideEVs contributor Josh Bryant for his help in getting this out!
<reponame>hiyouga/PY-Learning import numpy as np import json with open('docs.json', 'r') as f: docs = json.loads(f.read()) f.close() link_dict = {} ind2link = [] link_num = 0 for v in docs.values(): for ck in v.keys(): if ck not in ind2link: link_dict[ck] = link_num ind2link.append(ck) link_num += 1 markov = np.zeros((link_num, link_num)) for k, v in docs.items(): for ck, cv in v.items(): markov[link_dict[k]][link_dict[ck]] += cv for i in range(link_num): if np.sum(markov[i]) == 0: markov[i] += 1.0 / link_num else: markov[i] /= np.sum(markov[i]) alpha, eps = 0.85, 1e-15 K = 20 trans_mat = alpha * np.transpose(markov) + (1-alpha) / link_num P_old = np.zeros((link_num)) P_old[0] = 1 P = np.matmul(trans_mat, P_old) while np.linalg.norm(P - P_old) > eps: P_old = P P = np.matmul(trans_mat, P_old) solution = [] for i in range(link_num): solution.append((P[i], ind2link[i])) solution.sort(reverse = True) out_dict = {} for v, l in solution[0:K]: out_dict[l] = v with open('pagerank.json', 'w') as f: f.write(json.dumps(out_dict, sort_keys = False, indent=4)) f.close() print('Completed!')
More than any other technical design or social institution, the railway stands for modernity. No competing form of transport, no subsequent technological innovation, no other industry has wrought or facilitated change on the scale that has been brought about by the invention and adoption of the railway. Peter Laslett once referred to “the world we have lost”—the unimaginably different character of things as they once were. Try to think of a world before the railway and the meaning of distance and the impediment it imposed when the time it took to travel from, for example, Paris to Rome—and the means employed to do so—had changed little for two millennia. Think of the limits placed on economic activity and human life chances by the impossibility of moving food, goods, and people in large numbers or at any speed in excess of ten miles per hour; of the enduringly local nature of all knowledge, whether cultural, social, or political, and the consequences of such compartmentalization. Above all, think of how different the world looked to men and women before the coming of the railways. In part this was a function of restricted perception. Until 1830, few people knew what unfamiliar landscapes, distant towns, or foreign lands looked like because they had no opportunity or reason to visit them. But in part, too, the world before the railways appeared so very different from what came afterward and from what we know today because the railways did more than just facilitate travel and thereby change the way the world was seen and depicted. They transformed the very landscape itself. Railways were born of the industrial revolution—the steam engine itself was already sixty years old when it acquired wheels in 1825, and without the coal that it helped pump to the surface the steam engine could not work. But it was the railways that gave life and impetus to that same industrial revolution: they were the largest consumers of the very goods whose transportation they facilitated. Moreover, most of the technical challenges of industrial modernity—long-distance telegraphic communication, the harnessing of water, gas, and electricity for domestic and industrial use, urban and rural drainage, the construction of very large buildings, the gathering and moving of human beings in large numbers—were first met and overcome by railway companies. Trains—or, rather, the tracks on which they ran—represented the conquest of space. Canals and roads might be considerable technical achievements; but they had almost always been the extension, through physical effort or technical improvement, of an ancient or naturally occurring resource: a river, a valley, a path, or a pass. Even Telford and MacAdam did little more than pave over existing roads. Railway tracks reinvented the landscape. They cut through hills, they burrowed under roads and canals, they were carried across…
<gh_stars>0 // Author(s): <NAME>, <NAME> // Copyright: see the accompanying file COPYING or copy at // https://github.com/mCRL2org/mCRL2/blob/master/COPYING // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // /// \file mcrl2/atermpp/detail/aterm_list_iterator.h /// \brief Iterator for term_list. #ifndef MCRL2_ATERMPP_ATERM_LIST_ITERATOR_H #define MCRL2_ATERMPP_ATERM_LIST_ITERATOR_H #include "mcrl2/atermpp/aterm.h" #include "mcrl2/atermpp/detail/global_aterm_pool.h" namespace atermpp { /// \cond INTERNAL_DOCS namespace detail { template <class Term> class _aterm_list; } /// \endcond /// \brief Iterator for term_list. template <typename Term> class term_list_iterator { template<class T> friend class term_list; protected: detail::_aterm_list<Term>* m_list; /// \brief Constructor from an aterm which must be a list. /// \param l A sequence of terms term_list_iterator(detail::_aterm* l) : m_list(static_cast<detail::_aterm_list<Term>*>(l)) { assert(l->function()==detail::g_term_pool().as_list() || l->function()==detail::g_term_pool().as_empty_list()); } public: typedef Term value_type; typedef Term& reference; typedef Term* pointer; typedef ptrdiff_t difference_type; typedef std::forward_iterator_tag iterator_category; /// \brief Default constructor. term_list_iterator() : m_list(nullptr) {} /// \brief Copy constructor. /// \param other A sequence of terms term_list_iterator(const term_list_iterator& other) : m_list(other.m_list) { } /// \brief Assignment /// \param other A sequence of terms term_list_iterator& operator=(const term_list_iterator& other) { m_list=other.m_list; return *this; } /// \brief Dereference operator on an iterator const Term& operator*() const { assert(m_list->function()==detail::g_term_pool().as_list()); return m_list->head(); } /// Arrow operator on an iterator const Term* operator->() const { assert(m_list->function()==detail::g_term_pool().as_list()); return &m_list->head(); } /// \brief Prefix increment operator on iterator. term_list_iterator& operator++() { assert(m_list->function() == detail::g_term_pool().as_list()); m_list = static_cast<detail::_aterm_list<Term>*>(detail::address(m_list->tail())); return *this; } /// \brief Postfix increment operator on iterator. term_list_iterator operator++(int) { assert(m_list->function() == detail::g_term_pool().as_list()); const term_list_iterator temp = *this; m_list = static_cast<detail::_aterm_list<Term>*>(detail::address(m_list->tail())); return temp; } /// \brief Equality of iterators. /// \param other The iterator with which this iterator is compared. /// \return true if the iterators point to the same term_list. bool operator ==(const term_list_iterator& other) const { return m_list == other.m_list; } /// \brief Inequality of iterators. /// \param other The iterator with which this iterator is compared. /// \return true if the iterators do not point to the same term_list. bool operator !=(const term_list_iterator& other) const { return m_list != other.m_list; } /// \brief Comparison of iterators. /// \param other The iterator with which this iterator is compared. /// \return true if the pointer to this termlist is smaller than the other pointer. bool operator <(const term_list_iterator& other) const { return m_list < other.m_list; } /// \brief Comparison of iterators. /// \param other The iterator with which this iterator is compared. /// \return true if the iterators point to the same term_list. bool operator <=(const term_list_iterator& other) const { return m_list <= other.m_list; } /// \brief Comparison of iterators. /// \param other The iterator with which this iterator is compared. /// \return true if the iterators point to the same term_list. bool operator >(const term_list_iterator& other) const { return m_list > other.m_list; } /// \brief Comparison of iterators. /// \param other The iterator with which this iterator is compared. /// \return true if the iterators point to the same term_list. bool operator >=(const term_list_iterator& other) const { return m_list >= other.m_list; } }; } // namespace atermpp #endif // MCRL2_ATERMPP_ATERM_LIST_ITERATOR_H
SAN FRANCISCO — Here is how the venture capital game used to be played around here: A friend calls a friend who knows a guy. A meeting is taken. Wine is drunk (at, say, Madera lounge in Menlo Park). A business plan? Sure, whatever. But how does it feel? This is decidedly not how Google, that apotheosis of our data-driven economy, wants to approach the high-stakes business of investing in the next, well, Google. Unlike venture capitalists of old, the company’s rising V.C. arm focuses not on the art of the deal, but on the science of the deal. First, data is collected, collated, analyzed. Only then does the money start to flow. Google Ventures and its take on investing represent a new formula for the venture capital business, and skeptics say it will never capture the chemistry — or, perhaps, the magic — of Silicon Valley. Would computer algorithms have bankrolled David Packard or Steve Jobs? Foreseen the folly of Pets.com? The data provides one answer to those questions, at least for now: Since its founding in 2009, Google Ventures has stood out in an industry that, for all its star power, has been dealing its investors a bad hand. In recent years, an investor would have done better with a ho-hum mutual fund that tracks the stock market than with some splashy V.C. fund. Venture capital funds posted an annual average return of 6.9 percent from 2002 to 2012, trailing major stock indexes, according to Cambridge Associates.
/* * MIT License * * Copyright (c) 2019 <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * IMPORTANT: This source code is intended to serve training information purposes only. * Please make sure to review our IdCloud documentation, including security guidelines. */ package com.gemalto.mobileprotector.sample.provisioning; import com.gemalto.idp.mobile.core.net.TlsConfiguration; /** * Values needed for Token provisioning. */ public class ProvisioningConfig { /** * The URL of the Enrollment API endpoint, e.g: https://api/provisioning/pp */ private static final String PROVISIONING_URL = "https://provisioning-url"; /** * Identifier for the EPS server’s public RSA key. */ private static final String RSA_KEY_ID = "eps-public-key"; /** * The RSA modulus of the EPS public key (on provisioning protocol level, not transport level). */ private static final byte[] RSA_KEY_MODULUS = {(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00}; /** * The RSA exponent of the EPS public key (on provisioning protocol level, not transport level). */ private static final byte[] RSA_KEY_EXPONENT = {(byte) 0x00, (byte) 0x00, (byte) 0x00}; /** * Retrieves the provisioning URL. * * @return Provisioning URL. */ public static String getProvisioningUrl() { return PROVISIONING_URL; } /** * Retrieves the RSA key modulus. * * @return RSA key modulus. */ public static byte[] getRsaKeyModulus() { return RSA_KEY_MODULUS.clone(); } /** * Retrieves the RSA key exponent. * * @return RSA key exponent. */ public static byte[] getRsaKeyExponent() { return RSA_KEY_EXPONENT.clone(); } /** * This configuration will allow to weaken TLS configuration for debug purposes. * It’s not allowed to modify in release mode. * * @return TLS configuration. */ public static TlsConfiguration getTlsConfiguration() { return new TlsConfiguration(TlsConfiguration.Permit.SELF_SIGNED_CERTIFICATES, TlsConfiguration.Permit.HOSTNAME_MISMATCH, TlsConfiguration.Permit.INSECURE_CONNECTIONS); } /** * Retrieves the RSA key ID. * * @return RSA key ID. */ public static String getRsaKeyId() { return RSA_KEY_ID; } }
//decide to support simultaneous events public class DFA { private LinkedHashMap<DFAState, DFATransition> transitions = new LinkedHashMap<DFAState, DFATransition>(); public DFAState I; private ArrayList<Atom> atoms = new ArrayList(); private DFAState violation = DFAState.get("violation"); public DFA(NFA nfa){ TreeSet<Integer> ISet = new TreeSet(nfa.I); I = createDFAState(ISet); for(LinkedHashSet<Atom> a : nfa.sigma){ if(a.size() == 1){ atoms.add(a.iterator().next()); } } //we add the null Atom that will correspond to the default transition atoms.add(null); gen(nfa, ISet); //determinization may introduce states that must always go to violation //so we perform an extra cleanup here collapseViolations(); rename(); } private void gen(NFA nfa, TreeSet<Integer> nfaStates){ DFAState s = createDFAState(nfaStates); HashMap<DFAState, TreeSet<Integer>> workList = new HashMap(); DFATransition dtrans = new DFATransition(); //for each atom in atoms, we need to see what the next compound state will be for(Atom a : atoms){ TreeSet<Integer> states = new TreeSet(); for(Integer nfaState : nfaStates){ NFATransition trans = nfa.get(nfaState); if(trans == null) continue; states.addAll(trans.getSat(a)); } DFAState dest = createDFAState(states); dtrans.put(a, dest); transitions.put(s, dtrans); workList.put(dest, states); } //for each destination in the worklist, we need recursively call gen //if it is not already in the dfa. This must be performed last //because we don't add the state transition pair to the DFA //until we know all destinations. The alternative to this would //be to keep a map of states we have already considered... I think this //is slightly more efficient. Either way we need one data structure //to hold the states, and one to hold the nfa states set that generated //that state. Alternatively, we would recompute the DFAState every time... //I don't think it's worth comparing the performance. Pretty sure //recomputing the DFAState would be worse for(DFAState dest : workList.keySet()){ if(transitions.containsKey(dest)) continue; if(dest == violation) { transitions.put(dest, new DFATransition()); continue; } gen(nfa, workList.get(dest)); } } private DFAState createDFAState(TreeSet<Integer> states){ if(states.size() == 0) return violation; String name = "s"; for(Integer i : states){ name += i + "_"; } return DFAState.get(name); } private void collapseViolations(){ LinkedHashSet<DFAState> violations = new LinkedHashSet<DFAState>(); violations.add(violation); findViolations(I, new LinkedHashSet<DFAState>(), violations); removeViolations(violations); } private void findViolations(DFAState s, LinkedHashSet<DFAState> seen, LinkedHashSet<DFAState> violations){ //depth first traversal for(DFAState destination : transitions.get(s).values()){ if(!seen.contains(destination)){ seen.add(destination); findViolations(destination, seen, violations); } } //at this point it is conventient for violation to be in the violations set //when we go to remove the violations this will no longer be the case boolean allViolations = true; for(DFAState destination : transitions.get(s).values()){ if(!violations.contains(destination)){ allViolations = false; break; } } if(allViolations) violations.add(s); } private void removeViolations(LinkedHashSet<DFAState> violations){ ArrayList<DFAState> toRemove = new ArrayList(); //remove violation now because we don't want to remove it from the map violations.remove(violation); for(DFAState s : transitions.keySet()){ if(violations.contains(s)){ toRemove.add(s); } else{ DFATransition trans = transitions.get(s); for(Atom a : trans.keySet()){ if(violations.contains(trans.get(a))){ trans.put(a, violation); } } } } for(DFAState s : toRemove){ transitions.remove(s); } } private void rename(){ Numbering<DFAState> stateNum = new Numbering(); I = DFAState.get("s" + stateNum.get(I)); ArrayList<DFAState> toRemove = new ArrayList(transitions.size()); ArrayList<DFAState> states = new ArrayList(transitions.keySet()); states.remove(violation); for(DFAState s : states){ toRemove.add(s); DFAState newS = DFAState.get("s" + stateNum.get(s)); DFATransition trans = transitions.get(s); DFATransition newT = new DFATransition(); for(Atom a : trans.keySet()){ DFAState dest = trans.get(a); if(dest == violation) newT.put(a, violation); else newT.put(a, DFAState.get("s" + stateNum.get(dest))); } transitions.put(newS, newT); } for(DFAState s : toRemove){ transitions.remove(s); } } public String toString(){ //put the initial state first DFATransition trans = transitions.get(I); if(trans == null) return I + "[\n default violation\n]\n\nviolation[\n]\n"; String ret = I.toString() + trans; for(DFAState key : transitions.keySet()){ //don't repeat the initial state if(key == I) continue; ret += "\n" + key + transitions.get(key); } return ret; } }
/* * General tests for things not covered directly in the specification. */ public class MQTT5Test extends MQTT5TestSupport { private static final Logger log = Logger.getLogger(MQTT5Test.class); public MQTT5Test(String protocol) { super(protocol); } /* * Ensure that the broker adds a timestamp on the message when sending via MQTT */ @Test(timeout = DEFAULT_TIMEOUT) public void testTimestamp() throws Exception { final String DESTINATION = RandomUtil.randomString(); createJMSConnection(); JMSContext context = cf.createContext(); JMSConsumer consumer = context.createConsumer(context.createQueue(DESTINATION)); long time = System.currentTimeMillis(); MqttClient producer = createPahoClient(RandomUtil.randomString()); producer.connect(); producer.publish(DESTINATION, new byte[0], 1, false); producer.disconnect(); producer.close(); Message m = consumer.receive(200); assertNotNull(m); assertTrue(m.getJMSTimestamp() > time); context.close(); } /* * Trying to reproduce error from https://issues.apache.org/jira/browse/ARTEMIS-1184 */ @Test(timeout = DEFAULT_TIMEOUT) public void testMaxMessageSize() throws Exception { // this doesn't work with websockets because the websocket frame size is too low Assume.assumeTrue(protocol.equals(TCP)); final String TOPIC = RandomUtil.randomString(); // subtract a little to leave room for the header final int SIZE = MQTTUtil.MAX_PACKET_SIZE - 48; StringBuilder builder = new StringBuilder(SIZE); for (int i = 0; i < SIZE; i++) { builder.append("="); } byte[] bytes = builder.toString().getBytes(StandardCharsets.UTF_8); final CountDownLatch latch = new CountDownLatch(1); MqttClient consumer = createPahoClient("consumer"); consumer.setCallback(new DefaultMqttCallback() { @Override public void messageArrived(String topic, MqttMessage message) throws Exception { assertEqualsByteArrays(bytes.length, bytes, message.getPayload()); latch.countDown(); } }); consumer.connect(); consumer.subscribe(TOPIC, 1); MqttClient producer = createPahoClient(RandomUtil.randomString()); producer.connect(); producer.publish(TOPIC, bytes, 1, false); producer.disconnect(); producer.close(); Wait.assertEquals(1L, () -> getSubscriptionQueue(TOPIC).getMessagesAdded(), 2000, 100); assertTrue(latch.await(30, TimeUnit.SECONDS)); consumer.disconnect(); consumer.close(); } /* * There is no normative statement in the spec about supporting user properties on will messages, but it is implied * in various places. */ @Test(timeout = DEFAULT_TIMEOUT) public void testWillMessageProperties() throws Exception { final byte[] WILL = RandomUtil.randomBytes(); final String[][] properties = new String[10][2]; for (String[] property : properties) { property[0] = RandomUtil.randomString(); property[1] = RandomUtil.randomString(); } // consumer of the will message MqttClient client1 = createPahoClient("willConsumer"); CountDownLatch latch = new CountDownLatch(1); client1.setCallback(new DefaultMqttCallback() { @Override public void messageArrived(String topic, MqttMessage message) { int i = 0; for (UserProperty property : message.getProperties().getUserProperties()) { assertEquals(properties[i][0], property.getKey()); assertEquals(properties[i][1], property.getValue()); i++; } latch.countDown(); } }); client1.connect(); client1.subscribe("/topic/foo", 1); // consumer to generate the will MqttClient client2 = createPahoClient("willGenerator"); MqttProperties willMessageProperties = new MqttProperties(); List<UserProperty> userProperties = new ArrayList<>(); for (String[] property : properties) { userProperties.add(new UserProperty(property[0], property[1])); } willMessageProperties.setUserProperties(userProperties); MqttConnectionOptions options = new MqttConnectionOptionsBuilder() .will("/topic/foo", new MqttMessage(WILL)) .build(); options.setWillMessageProperties(willMessageProperties); client2.connect(options); client2.disconnectForcibly(0, 0, false); assertTrue(latch.await(2, TimeUnit.SECONDS)); } /* * It's possible for a client to change their session expiry interval via the DISCONNECT packet. Ensure we respect * a new session expiry interval when disconnecting. */ @Test(timeout = DEFAULT_TIMEOUT) public void testExpiryDelayOnDisconnect() throws Exception { final String CONSUMER_ID = RandomUtil.randomString(); MqttAsyncClient consumer = createAsyncPahoClient(CONSUMER_ID); MqttConnectionOptions options = new MqttConnectionOptionsBuilder() .sessionExpiryInterval(300L) .build(); consumer.connect(options).waitForCompletion(); MqttProperties disconnectProperties = new MqttProperties(); disconnectProperties.setSessionExpiryInterval(0L); consumer.disconnect(0, null, null, MQTTReasonCodes.SUCCESS, disconnectProperties).waitForCompletion(); Wait.assertEquals(0, () -> getSessionStates().size(), 5000, 10); } /* * If the Will flag is false then don't send a will message even if the session expiry is > 0 */ @Test(timeout = DEFAULT_TIMEOUT) public void testWillFlagFalseWithSessionExpiryDelay() throws Exception { // enable send-to-dla-on-no-route so that we can detect an errant will message on disconnect server.createQueue(new QueueConfiguration("activemq.notifications")); server.createQueue(new QueueConfiguration("DLA")); server.getAddressSettingsRepository().addMatch("#", new AddressSettings().setSendToDLAOnNoRoute(true).setDeadLetterAddress(SimpleString.toSimpleString("DLA"))); MqttClient client = createPahoClient("willGenerator"); MqttConnectionOptions options = new MqttConnectionOptionsBuilder() .sessionExpiryInterval(1L) .build(); client.connect(options); client.disconnectForcibly(0, 0, false); scanSessions(); assertEquals(0, server.locateQueue("DLA").getMessageCount()); } }
Credit: Sony Here’s a question for you hybrid comic book/professional sports fans – what usually happens when rumors of a struggling head coach being put on the chopping block, or a star player being put on the trade market begin surfacing in the sports media? The answer is those rumors have a way of often being … or becoming true. And what happens when a team owner gives their head coach or manager a “vote of confidence”..? …the dreaded “vote of confidence”? The head coach usually has about 14 to 21 days to polish his “I’m looking forward to spending more time with my family” speech. Rumors like this in the sports world tend to become self-fulfilling prophecies because a.) where there is smoke there is often fire; and b.) there are human beings with very human egos on each end of the rumors. Reports of bad marriages in sports almost always lead to divorce, because no matter how tenuous the reports or rumors were in the first place, having a situation that inherently generates uncertainty, bruised feelings and mistrust play out in the public arena produces a toxicity that’s hard to undo. Credit: Sony Pictures And this is part of the reason it’s nearly inevitable Marvel Studios and Sony will eventually work out some deal to bring Spider-Man into the Marvel Cinematic Universe. The now-infamous Sony hack has revealed both parties have entertained the idea at least enough to take meetings on the possibility. While that practice may or may not be commonplace between parties involved in complicated rights agreements like the one between Sony and Marvel Studios, they usually don’t become as public as these have. And now neither Marvel nor especially Sony can do much in the way of unringing that bell, and they can’t do anything about tempering their audience's newly reset expectations. But we’ll get back to that in a moment. First let’s take a brief moment to address the reasons Marvel and Sony should strike a deal. We’ve already seen several comic book press sites argue why Marvel doesn’t “need” Spider-Man, either in the Marvel Cinematic Universe in general or for Captain America: Civil War specifically … and they’d be absolutely right. No studio that can turn the Guardians of the ‘f-ing Galaxy into a $730m global franchise needs Spider-Man. But this isn’t an issue of “need” .. It’s an issue of “want.” And Marvel should want Spider-Man. Why? Q: Out of this past year’s smash-critical hits Guardians of the Galaxy and Captain America: The Winter Solider, and then the tepidly-received Amazing Spider-Man 2, which of these films earned the most in the increasingly important foreign market? A: It was that last, tepidly-received one. As Marvel Studios prepares to deal with life after MCU-glue Robert Downey Jr. and perhaps having to give the original Avengers trio of Captain America, Thor, and Iron Man a well-earned sabbatical from solo films, the future of the MCU is not guaranteed. Sure, they’ll be printing money through the Infinity Wars during the latter half of this decade, and while there’s every reason to believe Marvel can successfully launch Black Panther and Captain Marvel solo films, whether or not those characters can anchor the larger MCU as it begins approaching Phase 4 remains to be seen. Spider-Man is perhaps the ideal solution to assure an MCU anchor remains in place after RDJ. As licensing revenue demonstrates, Spider-Man is not only Marvel’s top character around the globe, he’s all of comic books’ top character around the globe . Not to mention he’d do wonders to inject some youth into the not-exactly-spry MCU power brokers. And Marvel themselves have indicated they well know how Spidey works best. Yes, Spider-Man alone is solo icon. But even icons can get tired when they have to go it alone, at least cinematically. It happened to Warner Bros.’ Batman and at least in the U.S. it’s happening to Spider-Man as we speak. But place Spider-Man in a larger superhero universe where he gets to play off other characters, a chemical reaction occurs that is greater than the sum of its individual parts - a reaction Marvel right now uses to great effect in their homegrown Ultimate Spider-Man animated series, which is all about Spider-Man interacting with the rest of the Marvel Universe. In 2014 and beyond this is arguably his ideal big screen role, particularly having already been the subject of five solo films since 2002 already. The introduction of Spider-Man into the MCU would have untold benefits to the brand and even Marvel knows their brand still has lots of room to grow around the world. As hardcore comic book fans we're all well versed in the delineation of the Marvel properties between Marvel Studios, Sony and 20th Century Fox. Those distinction, however, become blurred among the broader moviegoing populace, and again, particularly in foreign territories. Striking a deal with Sony to get Spider-Man back under their creative control cannot be understated as positive for the overall Marvel brand, which does exist separately from and in addition to the Marvel Studios brand. As for Sony, the franchise’s domestic recipes are absolutely headed in the wrong direction and momentum like that is difficult to reverse. While a Sinister Six film and the crafting of their own Spidey-centric shared universe might have seemed like the logical move prior to the performance of Amazing 2, now in hindsight Sony has to be reevaluating. Heck, as it stands right now a Sinister Six film risks the odds of being perceived as the lesser, second-to-the-punch, supervillain ensemble film with an alliterate two word title that starts with ‘S’ of the latter half of 2016 alone. And pushing Spider-Man 3 back two years to an unspecified date in 2018 didn’t look like a positive development even before the hack revelations. Sony has a 'buzz' problem on their hands that two years will only exasperate and now that their conversations with Marvel have been unearthed in the public arena, they’re going to be living both in the shadow of renewed fan expectations of a Spider-Man-MCU dream team-up and under the public microscope of having to figure out how to get the wheels back on a tiring franchise when the obvious and best answer is now under everyone’s noses. Which brings us back full circle. Remember, there are people behind the current and planned future incarnations of the Spider-Man movie franchise – executives, producers (lots of them), writers, potential directors. And not to mention actors. All of these people know what we all do – a clear path to a win-win scenarios exists (and may have already been walked) and getting the public back on board anything less than the dream scenario will be next to impossible. Winning back the confidence of anyone involved that they plan a long-term, go-it-alone future for Spider-Man in which they and not Marvel Studios retain creative control is a stunt Sony may not be able to pull off. The cat's out of the bag… the train has left the station … Spider-Man is probably on his way over to the MCU. Sure, the two sides will likely play it out for a while longer trying to main their leverage to get the best deal for themselves they can, but right now it's quite possibly not a matter of if, but simply just a matter of how and when.
/** * Rapture {@link UiPluginDescriptor} for {@code coreui-plugin}. * * @since 3.0 */ @Named @Singleton @Priority(Integer.MAX_VALUE - 100) // after nexus-rapture public class UiPluginDescriptorImpl extends UiPluginDescriptorSupport { public UiPluginDescriptorImpl() { super("coreui-plugin"); setNamespace("NX.coreui"); setConfigClassName("NX.coreui.app.PluginConfig"); } }
/* Runs 10 copies of WorkerThread in parallel. The address of a * shared volatile AtomicInt32 counter is passed to each thread. */ static void CheckAtomicityUnderConcurrency(void) { volatile AtomicInt32 counter = 0; pthread_t threads[NUM_THREADS]; int ii; workers_begin = 0; for (ii = 0; ii < ARRAY_SIZE(threads); ++ii) CHECK_OK(pthread_create_check_eagain(&threads[ii], NULL, &WorkerThread, (void*) &counter)); ANNOTATE_HAPPENS_BEFORE(&workers_begin); ANNOTATE_IGNORE_WRITES_BEGIN(); workers_begin = 1; ANNOTATE_IGNORE_WRITES_END(); for (ii = 0; ii < ARRAY_SIZE(threads); ++ii) CHECK_OK(pthread_join(threads[ii], NULL)); EXPECT_EQ(ATOMIC_ITERATIONS * ARRAY_SIZE(threads), counter); }