repo
stringlengths
26
115
file
stringlengths
54
212
language
stringclasses
2 values
license
stringclasses
16 values
content
stringlengths
19
1.07M
https://github.com/WinstonMDP/math
https://raw.githubusercontent.com/WinstonMDP/math/main/exers/f.typ
typst
#import "../cfg.typ": * #show: cfg $ "Find" lim_(x -> +0) x^x $ $lim_(x -> +0) x^x = lim_(x -> +0) e^(x ln x) = lim_(x -> +0) e^(e^(ln x) ln x)$ $lim_(x -> +0) e^(ln x) ln x = -lim_(x -> +0) (-ln x)/(e^(-ln x)) = -lim_(x -> +oo) x/e^x = 0$ $lim_(x -> +0) e^(-(-e^(ln x) ln x)) = lim_(x -> +0) e^(-x) = 1$
https://github.com/piepert/philodidaktik-hro-phf-ifp
https://raw.githubusercontent.com/piepert/philodidaktik-hro-phf-ifp/main/src/parts/ephid/beurteilen_bewerten/bedeutung_schulnoten.typ
typst
Other
#import "/src/template.typ": * == Bedeutung der Schulnoten #set par(justify: false) #grid(columns: 3, column-gutter: 0.5em, row-gutter: 1em, strong[Note], [#h(3cm)], strong[Bedeutung], [1], [sehr gut], [ Die Leistung #underline[entspricht den Anforderungen in besonderem Maße], d.h. sie erbringt mehr als ursprünglich erwartet wurde. ], [2], [gut], [ Die Leistung #underline[entspricht voll den Anforderungen], die Aufgaben wurde ausführlich und inhaltlich korrekt bearbeiet. ], [3], [befriedigend], [ Die Leistung #underline[entspricht allgemein den Anforderungen], jedoch treten geringe Lücken oder Fehler auf. ], [4], [ausreichend], [ Die Leistung #underline[entspricht im Ganzen noch den Anforderungen], aber wird jedoch durch Fehler und inhaltliche Lücken beeinträchtigt. ], [5], [mangelhaft], [ Die Leistung #underline[entspricht nicht den Anforderungen]. Es häufen sich inhaltliche Fehler und die Darstellung ist lückenhaft. ], [6], [ungenügend], [ Die Leistung #underline[entspricht nicht den Anforderungen]. Durch gravierende inhaltliche Fehler entsteht keine vermittelte Information. ])
https://github.com/DarrenKwonDev/resume
https://raw.githubusercontent.com/DarrenKwonDev/resume/master/suhun_kwon_cv_eng.typ
typst
//////////////////////////// // global settings //////////////////////////// #let default_font_size = 10pt #let name_size = 12pt #let personal_info_size = 10pt // https://typst.app/docs/reference/layout/page/ #set page( paper: "a4", margin: 1cm, numbering: "1 / 1", ) // english version #set text( font: "Times New Roman", size: default_font_size, cjk-latin-spacing: none, ) // korean version #set text( font: "Apple SD Gothic Neo", size: default_font_size ) #set heading(level: 1, supplement: none) #set heading(level: 2, supplement: none) #let sectionHeader = (title) => [ #align(left)[ #set text(size: section_size) == #title #v(-0.2cm) #line(length: 100%, stroke: 1pt + black) ] ] #let boxText = (txt) => [ #box( stroke: 1pt + rgb("#F3F4F6"), fill: rgb("#F3F4F6"), outset: 3pt, radius: 3pt, )[ #text(weight: "bold")[ #text(txt) ] ] ] //////////////////////////// // top of cv //////////////////////////// #align(center)[ #set text(size: name_size) = SUHUN KWON ] #v(0cm) #align(center)[ #set text(size: personal_info_size) Seoul, South Korea ] #v(0cm) #align(center)[ #set text(size: personal_info_size) #boxText("<EMAIL>") #text(" / ") #boxText("github.com/darrenkwondev") #text(" / ") #boxText("https://darrenkwondev.github.io") ] //////////////////////////// // intro (optional) //////////////////////////// #set quote(block: true) #quote[ Study and develop tech to make a business impact. #linebreak() But also focus on low-level details to avoid pitfalls of leaky abstraction. ] //////////////////////////// // sections related helpers //////////////////////////// #let section_size = 11pt #let sectionHeader = (title) => [ #align(left)[ #set text(size: section_size) == #title #v(-0.2cm) #line(length: 100%, stroke: 1pt + black) ] ] // justify-content: space-between 와 같은 기능은 없음. // 양쪽 정렬을 위해서 grid의 왼쪽은 align left로, 오른쪽은 align right로 설정하는게 최선. // grid : https://typst.app/docs/reference/layout/grid #let educationEntity = (title, subtitle, where, when) => [ #grid(columns: (2.5fr, 1fr), align(left)[ *#title* #linebreak() #subtitle ], align(right)[ #where #linebreak() #when ] ) ] #let careerHeader = (title, subtitle, department, when) => [ #grid(columns: (2.5fr, 1fr), align(left)[ *#title* #linebreak() #subtitle ], align(right)[ #department #linebreak() #when ] ) ] //////////////////////////// // sections //////////////////////////// #sectionHeader[Technical Skills] - PL - C/C++ : socket programming, parallelism, concurrency programming, makefile, CMake - javascript/typescript : express, react, redux, styled-components, gulp, storybook, webpack, svgr - python : ruff, poetry, pandas, numpy, scikit-learn, fastapi, uvloop, sqlalchemy, mypy, isort - Infra - Database : postgresql, mongodb, sqlite, redis - OS, virtualization: linux, containerization(mainly docker) - server components : nginx, let's encrpyt, cfssl - observability : linux performance metrics - performance : flamegraph - cloud native : k8s - aws : vpc, ec2, s3, cloudfront, route53, lambda - gcp : cloud run, bigquery - etc - shell scripts and tools : bash - mark up language: latex, typst, html - git, git hook, pre-commit, husky, lint-staged //////////////////////////// // Career Experience //////////////////////////// #sectionHeader[Career Experience] #careerHeader( "<NAME>", "Software Engineer", "development dept", "2021.09-2023.02") - DataOps - Operated a proxy server for collecting clickstream data - Architected and managed servers for data processing and computing business metrics - Web client - Removed legacy bundle code (Neutrino) and rewrote it using Webpack, thereby enhancing DX (Development Experience) - Progressively migrated to TypeScript, replacing about 80% of the code - Developed and deployed a design system using Storybook, Rollup, and SVGR (#link("https://www.npmjs.com/package/typed-design-system")[npm link]) //////////////////////////// // Education //////////////////////////// #sectionHeader[Education] #educationEntity( "Naver Connect Foundation", "boostcamp AI Tech, Recommender System", "Seoul, South Korea", "2023.03-2023.08" ) #educationEntity( "Seoul National University", "B.S in Venture Business Management & Korean literature", "Seoul, South Korea", "2014.03-2022.09" ) //////////////////////////// // Personal Projects //////////////////////////// #let projectBox = (contents) => [ #box( stroke: 1pt + rgb("#F3F4F6"), inset: 4pt, radius: 4pt, width: 95%, // 차지할 수 있는 영역의 95%만. 100%면 상자끼리 딱 맞아 떨어져버림 )[ #text(contents) ] ] #sectionHeader[Personal Projects] #grid(columns: (1fr, 1fr), align(left)[ #projectBox()[ *2d game engine* : ECS pattern based event driven game engine core - C++, SDL2, lua(binding) - #link("https://github.com/DarrenKwonDev/simple_2d_game_engine") ] #projectBox()[ *ko-fuzzy* : korean consonant matching, and fuzzy search - korean regex, tsup, typescript - #link("https://github.com/DarrenKwonDev/ko-fuzzy") ] #projectBox()[ *style-journey* : personalize fashion recommendation service - fastapi, docker, airflow, nginx, postgresql, s3 - #link("https://github.com/Lv2-Recsys-01/styl-backend") ] ], align(left)[ #projectBox()[ *redis-like server* : redis-like server implementation - C/C++, poll multiplexing base event loop - #link("https://github.com/DarrenKwonDev/redis-like") ] #projectBox()[ *other trivial projects* - fuze : one on one english tutor matching service - react, react-spring, s3 - cineps : cinephiles web community - Next.js, nginx, express, mongodb - edu-popkorn : korean learning app by video clips - flutter ] ] ) //////////////////////////// // OSS Contributions //////////////////////////// // #sectionHeader[OSS Contributions] //////////////////////////// // Other Experiences //////////////////////////// #sectionHeader[Other Experiences] #grid(columns: (1fr, 1fr), align(left)[ - Graduated from the PSWC Accelerating Program by the Korea Venture Business Association - Successfully completed the government-supported Preliminary Startup Package program with distinction ], align(left)[ - Provided advisory support and was successfully accepted into the TIPS (Tech Incubator Program for Startup) - SQLD (SQL Developer) certification ] )
https://github.com/PhotonQuantum/UofTNotes
https://raw.githubusercontent.com/PhotonQuantum/UofTNotes/master/src/CSC2108H/LEC0101_CourseStructure.typ
typst
#import "/sty.typ": * #show: template.with( title: [Automated Reasoning with ML: Course Structure], short_title: [CSC2108H LEC0101], description: [ Notes based on lectures for CSC 2108H\ (Automated Reasoning with Machine Learning)\ at the University of Toronto by Professor <NAME>, Fall 2024 ], date: datetime(year: 2024, month: 09, day: 09), ) = Grading / Participation: $10%$ / Assignment: $15%$ (solver-aided theorem proving) / Paper + QA: $15%$ (15 mins pre) / Project: $60%$ - Proposal: $15%$ - Presentation: $20%$ - Report: $25%$ At most 3 students per project Late submission: 15% off per day = Schedule - Week 1-4 - SAT & SMT - Program Reasoning - Theorem Proving - Week 5-10 - ML for SAT/SMT - Formal methods for ML - ML for code - Auto formalization - Reasoning with LLM - Neuro-symbolic systems = Presentation $tilde.eq 8$ presentations/week == Preparation - Start 2 weeks in advance - Post on Ed to inform on the paper - Meet TA and talk about this 1 week in advance - Prepare a video in advance = Timeline / Assignment: Oct 1st - Oct 28th / Project: Nov 5th = Tasks - 60+ papers, need to read 1 carefully (presentation) - get hands dirty: playing with solvers, DS+SYS experience, debugging required
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/fletcher/0.2.0/src/main.typ
typst
Apache License 2.0
#import calc: floor, ceil, min, max #import "utils.typ": * #import "layout.typ": * #import "draw.typ": * #import "marks.typ": * /// Draw a labelled node in an arrow diagram. /// /// - pos (point): Dimensionless "elastic coordinates" `(x, y)` of the node, /// where `x` is the column and `y` is the row (increasing upwards). The /// coordinates are usually integers, but can be fractional. /// /// See the `fletcher.diagram()` options to control the physical scale of elastic /// coordinates. /// /// - label (content): Node content to display. /// - inset (length, auto): Padding between the node's content and its bounding /// box or bounding circle. If `auto`, defaults to the `node-inset` option of /// `fletcher.diagram()`. /// - outset (length, auto): Margin between the node's bounds to the anchor /// points for connecting edges. /// - shape (string, auto): Shape of the node, one of `"rect"` or `"circle"`. If /// `auto`, shape is automatically chosen depending on the aspect ratio of the /// node's label. /// - stroke (stroke): Stroke of the node. Defaults to the `node-stroke` option /// of `fletcher.diagram()`. /// - fill (paint): Fill of the node. Defaults to the `node-fill` option of /// `fletcher.diagram()`. /// - defocus (number): Strength of the "defocus" adjustment for connectors /// incident with this node. If `auto`, defaults to the `node-defocus` option /// of `fletcher.diagram()` . #let node( pos, label, inset: auto, outset: auto, shape: auto, stroke: auto, fill: auto, defocus: auto, ) = { assert(type(pos) == array and pos.len() == 2) if type(label) == content and label.func() == circle { panic(label) } (( class: "node", pos: pos, label: label, inset: inset, outset: outset, shape: shape, stroke: stroke, fill: fill, defocus: defocus, ),) } #let CONN_ARGUMENT_SHORTHANDS = ( "dashed": (dash: "dashed"), "dotted": (dash: "dotted"), "double": (extrude: (-1.3, +1.3), mark-scale: 120%), "triple": (extrude: (-2.5, 0, +2.5), mark-scale: 150%), "crossing": (crossing: true), ) #let interpret-edge-args(args) = { let named-args = (:) if args.named().len() > 0 { panic("Unexpected named argument(s):", ..args.named().keys()) } let pos = args.pos() if pos.len() >= 1 and type(pos.at(0)) != str { named-args.label = pos.remove(0) } if (pos.len() >= 1 and type(pos.at(0)) == str and pos.at(0) not in CONN_ARGUMENT_SHORTHANDS) { named-args.marks = pos.remove(0) } for arg in pos { if type(arg) == str and arg in CONN_ARGUMENT_SHORTHANDS { named-args += CONN_ARGUMENT_SHORTHANDS.at(arg) } else { panic( "Unrecognised argument " + repr(arg) + ". Must be one of:", CONN_ARGUMENT_SHORTHANDS.keys(), ) } } named-args } #let parse-arrow-shorthand(str) = { let caps = ( "": (none, none), ">": ("tail", "head"), ">>": ("twotail", "twohead"), "<": ("head", "tail"), "<<": ("twohead", "twotail"), "|": ("bar", "bar"), "o": ("circle", "circle"), "O": ("bigcircle", "bigcircle"), ) let lines = ( "-": (:), "=": CONN_ARGUMENT_SHORTHANDS.double, "==": CONN_ARGUMENT_SHORTHANDS.triple, "--": CONN_ARGUMENT_SHORTHANDS.dashed, "..": CONN_ARGUMENT_SHORTHANDS.dotted, ) let cap-selector = "(|<|>|<<|>>|hook[s']?|harpoon'?|\||o|O)" let line-selector = "(-|=|--|==|::|\.\.)" let match = str.match(regex("^" + cap-selector + line-selector + cap-selector + "$")) if match == none { panic("Failed to parse", str) } let (from, line, to) = match.captures ( marks: ( if from in caps { caps.at(from).at(0) } else { from }, if to in caps { caps.at(to).at(1) } else { to }, ), ..lines.at(line), ) } /// Draw a connecting line or arc in an arrow diagram. /// /// - from (elastic coord): Start coordinate `(x, y)` of connector. If there is /// a node at that point, the connector is adjusted to begin at the node's /// bounding rectangle/circle. /// - to (elastic coord): End coordinate `(x, y)` of connector. If there is a /// node at that point, the connector is adjusted to end at the node's bounding /// rectangle/circle. /// /// - ..args (any): The connector's `label` and `marks` named arguments can also /// be specified as positional arguments. For example, the following are equivalent: /// ```typc /// edge((0,0), (1,0), $f$, "->") /// edge((0,0), (1,0), $f$, marks: "->") /// edge((0,0), (1,0), "->", label: $f$) /// edge((0,0), (1,0), label: $f$, marks: "->") /// ``` /// /// - label-pos (number): Position of the label along the connector, from the /// start to end (from `0` to `1`). /// /// #stack( /// dir: ltr, /// spacing: 1fr, /// ..(0, 0.25, 0.5, 0.75, 1).map(p => fletcher.diagram( /// cell-size: 1cm, /// edge((0,0), (1,0), p, "->", label-pos: p)) /// ), /// ) /// - label-sep (number): Separation between the connector and the label anchor. /// /// With the default anchor (`"bottom"`): /// #fletcher.diagram( /// debug: 2, /// cell-size: 8mm, /// { /// for (i, s) in (-5pt, 0pt, .4em, .8em).enumerate() { /// edge((2*i,0), (2*i + 1,0), s, "->", label-sep: s) /// } /// }) /// /// With `label-anchor: "center"`: /// #fletcher.diagram( /// debug: 2, /// cell-size: 8mm, /// { /// for (i, s) in (-5pt, 0pt, .4em, .8em).enumerate() { /// edge((2*i,0), (2*i + 1,0), s, "->", label-sep: s, label-anchor: "center") /// } /// }) /// /// - label (content): Content for connector label. See `label-side` to control /// the position (and `label-sep`, `label-pos` and `label-anchor` for finer /// control). /// /// - label-side (left, right, center): Which side of the connector to place the /// label on, viewed as you walk along it. If `center`, then the label is place /// over the connector. When `auto`, a value of `left` or `right` is chosen to /// automatically so that the label is /// - roughly above the connector, in the case of straight lines; or /// - on the outside of the curve, in the case of arcs. /// /// - label-anchor (anchor): The anchor point to place the label at, such as /// `"top-right"`, `"center"`, `"bottom"`, etc. If `auto`, the anchor is /// automatically chosen based on `label-side` and the angle of the connector. /// /// - paint (paint): Paint (color or gradient) of the connector stroke. /// - thickness (length): Thickness the connector stroke. Marks (arrow heads) /// scale with this thickness. /// - dash (dash type): Dash style for the connector stroke. /// - bend (angle): Curvature of the connector. If `0deg`, the connector is a /// straight line; positive angles bend clockwise. /// /// #fletcher.diagram(debug: 0, { /// node((0,0), $A$) /// node((1,1), $B$) /// let N = 4 /// range(N + 1) /// .map(x => (x/N - 0.5)*2*100deg) /// .map(θ => edge((0,0), (1,1), θ, bend: θ, ">->", label-side: center)) /// .join() /// }) /// /// - marks (pair of strings): /// The start and end marks or arrow heads of the connector. A shorthand such as /// `"->"` can used instead. For example, /// `edge(p1, p2, "->")` is short for `edge(p1, p2, marks: (none, "head"))`. /// /// #table( /// columns: 3, /// align: horizon, /// [Arrow], [Shorthand], [Arguments], /// ..( /// "-", /// "--", /// "..", /// "->", /// "<=>", /// ">>-->", /// "|..|", /// "hook->>", /// "hook'->>", /// ">-harpoon", /// ">-harpoon'", /// ).map(str => ( /// fletcher.diagram(edge((0,0), (1,0), str)), /// raw(str, lang: none), /// raw(repr(parse-arrow-shorthand(str))), /// )).join() /// ) /// /// - mark-scale (percent): /// Scale factor for connector marks or arrow heads. This defaults to `100%` for /// single lines, `120%` for double lines and `150%` for triple lines. Does not /// affect the stroke thickness of the mark. /// /// #{ /// set raw(lang: none) /// fletcher.diagram( /// edge-thickness: 1pt, /// edge((0,0), (1,0), `->`, "->"), /// edge((2,0), (3,0), `=>`, "=>"), /// edge((4,0), (5,0), `==>`, "==>"), /// ) /// } /// /// - extrude (array of numbers): Draw copies of the stroke extruded by the /// given multiple of the stroke thickness. Used to obtain doubling effect. /// Best explained by example: /// /// #fletcher.diagram({ /// ( /// (0,), /// (-1.5,+1.5), /// (-2,0,+2), /// (-4.5,), /// (4.5,), /// ).enumerate().map(((i, e)) => { /// edge( /// (2*i, 0), (2*i + 1, 0), [#e], "|->", /// extrude: e, thickness: 1pt, label-sep: 1em) /// }).join() /// }) /// /// Notice how the ends of the line need to shift a little depending on the /// mark. For basic arrow heads, this offset is computed with /// `round-arrow-cap-offset()`. /// /// - crossing (bool): If `true`, draws a white backdrop to give the illusion of /// lines crossing each other. /// #fletcher.diagram({ /// edge((0,1), (1,0), thickness: 1pt) /// edge((0,0), (1,1), thickness: 1pt) /// edge((2,1), (3,0), thickness: 1pt) /// edge((2,0), (3,1), thickness: 1pt, crossing: true) /// }) /// /// - crossing-thickness (number): Thickness of the white "crossing" background /// stroke, if `crossing: true`, in multiples of the normal stroke's thickness. /// /// #fletcher.diagram({ /// (1, 2, 5, 8, 12).enumerate().map(((i, x)) => { /// edge((2*i, 1), (2*i + 1, 0), thickness: 1pt, label-sep: 1em) /// edge((2*i, 0), (2*i + 1, 1), raw(str(x)), thickness: 1pt, label-sep: /// 1em, crossing: true, crossing-thickness: x) /// }).join() /// }) /// /// - crossing-fill (paint): Color to use behind connectors or labels to give the illusion of crossing over other objects. Defaults to the `crossing-fill` option of /// `fletcher.diagram()`. /// /// #let cross(x, fill) = { /// edge((2*x + 0,1), (2*x + 1,0), thickness: 1pt) /// edge((2*x + 0,0), (2*x + 1,1), $f$, thickness: 1pt, crossing: true, crossing-fill: fill) /// } /// #fletcher.diagram(crossing-thickness: 5, { /// cross(0, white) /// cross(1, blue.lighten(50%)) /// cross(2, luma(98%)) /// }) /// #let edge( from, to, ..args, label: none, label-side: auto, label-pos: 0.5, label-sep: auto, label-anchor: auto, paint: black, thickness: auto, dash: none, kind: auto, bend: 0deg, corner: none, marks: (none, none), mark-scale: 100%, extrude: (0,), crossing: false, crossing-thickness: auto, crossing-fill: auto, ) = { let options = ( label: label, label-pos: label-pos, label-sep: label-sep, label-anchor: label-anchor, label-side: label-side, paint: paint, thickness: thickness, dash: dash, kind: kind, bend: bend, corner: corner, marks: marks, mark-scale: mark-scale, extrude: extrude, crossing: crossing, crossing-thickness: crossing-thickness, crossing-fill: crossing-fill, ) options += interpret-edge-args(args) if type(options.marks) == str { options += parse-arrow-shorthand(options.marks) } options.marks = options.marks.map(interpret-mark) let stroke = ( paint: options.paint, cap: "round", thickness: options.thickness, dash: options.dash, ) if options.label-side == center { options.label-anchor = "center" options.label-sep = 0pt } let obj = ( class: "edge", points: (from, to), label: options.label, label-pos: options.label-pos, label-sep: options.label-sep, label-anchor: options.label-anchor, label-side: options.label-side, paint: options.paint, kind: options.kind, bend: options.bend, corner: options.corner, stroke: stroke, marks: options.marks, mark-scale: options.mark-scale, extrude: options.extrude, is-crossing-background: false, crossing-thickness: crossing-thickness, crossing-fill: crossing-fill, ) // add empty nodes at terminal points node(from, none) node(to, none) if options.crossing { (( ..obj, is-crossing-background: true ),) } (obj,) } #let apply-defaults(nodes, edges, options) = ( nodes: nodes.map(node => { if node.stroke == auto {node.stroke = options.node-stroke } if node.fill == auto { node.fill = options.node-fill } if node.inset == auto { node.inset = options.node-inset } if node.outset == auto { node.outset = options.node-outset } if node.defocus == auto { node.defocus = options.node-defocus } node }), edges: edges.map(edge => { if edge.stroke.thickness == auto { edge.stroke.thickness = options.edge-thickness } if edge.crossing-fill == auto { edge.crossing-fill = options.crossing-fill } if edge.crossing-thickness == auto { edge.crossing-thickness = options.crossing-thickness } if edge.label-sep == auto { edge.label-sep = options.label-sep } if edge.is-crossing-background { edge.stroke = ( thickness: edge.crossing-thickness*edge.stroke.thickness, paint: edge.crossing-fill, cap: "round", ) edge.marks = (none, none) edge.extrude = edge.extrude.map(e => e/edge.crossing-thickness) } if edge.kind == auto { if edge.corner != none { edge.kind = "corner" } else if edge.bend != 0deg { edge.kind = "arc" } else { edge.kind = "line" } } edge.mark-scale *= options.mark-scale edge.marks = edge.marks.map(mark => { if mark != none { mark.size *= edge.mark-scale/100% } mark }) edge }), ) /// Draw an arrow diagram. /// /// - ..objects (array): An array of dictionaries specifying the diagram's /// nodes and connections. /// /// - debug (bool, 1, 2, 3): Level of detail for drawing debug information. /// Level `1` shows a coordinate grid; higher levels show bounding boxes and /// anchors, etc. /// /// - spacing (length, pair of lengths): Gaps between rows and columns. Ensures /// that nodes at adjacent grid points are at least this far apart (measured as /// the space between their bounding boxes). /// /// Separate horizontal/vertical gutters can be specified with `(x, y)`. A /// single length `d` is short for `(d, d)`. /// /// - cell-size (length, pair of lengths): Minimum size of all rows and columns. /// /// - node-inset (length, pair of lengths): Default padding between a node's /// content and its bounding box. /// - node-stroke (stroke): Default stroke for all nodes in diagram. Overridden /// by individual node options. /// - node-fill (paint): Default fill for all nodes in diagram. Overridden by /// individual node options. /// /// - node-defocus (number): Default strength of the "defocus" adjustment for /// nodes. This affects how connectors attach to non-square nodes. If /// `0`, the adjustment is disabled and connectors are always directed at the /// node's exact center. /// /// #stack( /// dir: ltr, /// spacing: 1fr, /// ..(0.2, 0, -1).enumerate().map(((i, defocus)) => { /// fletcher.diagram(spacing: 8mm, { /// node((i, 0), raw("defocus: "+str(defocus)), stroke: black, defocus: defocus) /// for y in (-1, +1) { /// edge((i - 1, y), (i, 0)) /// edge((i, y), (i, 0)) /// edge((i + 1, y), (i, 0)) /// } /// }) /// }) /// ) /// /// - crossing-fill (paint): Color to use behind connectors or labels to give /// the illusion of crossing over other objects. See the `crossing-fill` option /// of `edge()`. /// /// - crossing-thickness (number): Default thickness of the occlusion made by /// crossing connectors. See the `crossing-thickness` option of `edge()`. /// /// /// - render (function): After the node sizes and grid layout have been /// determined, the `render` function is called with the following arguments: /// - `grid`: a dictionary of the row and column widths and positions; /// - `nodes`: an array of nodes (dictionaries) with computed attributes /// (including size and physical coordinates); /// - `edges`: an array of connectors (dictionaries) in the diagram; and /// - `options`: other diagram attributes. /// /// This callback is exposed so you can access the above data and draw things /// directly with CeTZ. #let diagram( ..objects, debug: false, spacing: 3em, cell-size: 0pt, node-inset: 12pt, node-outset: 0pt, node-stroke: none, node-fill: none, node-defocus: 0.2, label-sep: 0.2em, edge-thickness: 0.6pt, mark-scale: 100%, crossing-fill: white, crossing-thickness: 3, render: (grid, nodes, edges, options) => { cetz.canvas(draw-diagram(grid, nodes, edges, options)) }, ) = { if type(spacing) != array { spacing = (spacing, spacing) } if type(cell-size) != array { cell-size = (cell-size, cell-size) } if objects.named().len() > 0 { let args = objects.named().keys().join(", ") panic("Unexpected named argument(s): " + args) } let options = ( spacing: spacing, debug: int(debug), node-inset: node-inset, node-outset: node-outset, node-stroke: node-stroke, node-fill: node-fill, node-defocus: node-defocus, label-sep: label-sep, cell-size: cell-size, edge-thickness: edge-thickness, mark-scale: mark-scale, crossing-fill: crossing-fill, crossing-thickness: crossing-thickness, ) let positional-args = objects.pos().join() let nodes = positional-args.filter(e => e.class == "node") let edges = positional-args.filter(e => e.class == "edge") box(style(styles => { let options = options options.em-size = measure(box(width: 1em), styles).width let to-pt(len) = len.abs + len.em*options.em-size options.spacing = options.spacing.map(to-pt) options.node-inset = to-pt(options.node-inset) options.label-sep = to-pt(options.label-sep) let (nodes, edges) = apply-defaults(nodes, edges, options) let nodes = compute-node-sizes(nodes, styles) let grid = compute-grid(nodes, options) let nodes = compute-node-positions(nodes, grid, options) render(grid, nodes, edges, options) })) }
https://github.com/ericthomasca/resume-v1
https://raw.githubusercontent.com/ericthomasca/resume-v1/main/modules/education.typ
typst
Apache License 2.0
#import "../brilliant-CV/template.typ": * #cvSection("Education") #cvEntry( title: [Accelerated Software Development Post Diploma], society: [College of the North Atlantic], date: [Sep 2021 - Aug 2022], location: [St. John's, NL], description: list( [Studied a variety of software development topics, including Java, Python, JavaScript, SQL, security, and systems analysis.], [Completed a work term with Genoa Design International, gaining hands-on experience with modern technologies.] ) ) #cvEntry( title: [Bachelor of Business Administration], society: [Memorial University of Newfoundland], date: [Sep 2015 - Jun 2016], location: [Corner Brook, NL], description: list( [Participated in a partnership program with the College of the North Atlantic to complete the BBA program.], [Focused on electives in finance and economics to complement skills in the accounting field.] ) ) #cvEntry( title: [Business Management (Accounting) Diploma], society: [College of the North Atlantic], date: [Sep 2011 - Jun 2014], location: [Grand Falls-Windsor, NL], description: list( [Completed electives in tax, payroll, and the oil and gas industry to gain specialized knowledge for competitive industries.] ) )
https://github.com/fenjalien/metro
https://raw.githubusercontent.com/fenjalien/metro/main/tests/angle/test.typ
typst
Apache License 2.0
#import "/src/lib.typ": * #set page(width: auto, height: auto, margin: 1cm) #ang(2.67) #ang(2, 3, 4) #ang(2.67, angle-mode: "arc") #ang(2, 3, 4, angle-mode: "arc") #ang(2.67, angle-mode: "decimal") #ang(2, 3, 4, angle-mode: "decimal")
https://github.com/gerome-andry/typst-CV
https://raw.githubusercontent.com/gerome-andry/typst-CV/main/README.md
markdown
# typst-CV Simple academic cv in typst
https://github.com/1STEP621/typst-anshere
https://raw.githubusercontent.com/1STEP621/typst-anshere/main/example/example.typ
typst
#import "../src/anshere.typ": * #set text(font: ("Noto Serif CJK JP")) #anshere(( q(), // q()で質問を追加 q(), q-blank, // q-blankで空白 q-break, // q-breakで改行 ..(q(),) * 3, // 繰り返しを使うと便利 q-break, q(content: [#h(1fr)m / s]), // contentで解答欄に書き込み q(content: [#h(1fr)T#h(10pt)/#h(10pt)F#h(1fr)]), q(label: [$2x + 1$]), // labelで質問番号を自由に変更 q-break, q(counter-reset: 1), // counter-resetで質問番号をリセット q(), q-blank, q-break, q(numbering-width: 50pt), // numbering-widthで質問番号の幅を変更 q(hide-numbering: true) // hide-numberingで質問番号を非表示 )) #anshere(( ..(q(numbering: "a"),) * 4, // numberingで質問番号の形式を変更 ..(q-blank,) * 2, q-break, q(counter-reset: 1), q(), q(counter-skip: 1), // counter-skipで質問番号をスキップ q(), q(counter-skip: 1), q(), q-break, q(counter-reset: 1), q(), q(counter-skip: -1), q(), q(counter-skip: -1), q(), q-break, )) #anshere(( q(children: ( // childrenでネストされた質問を追加 q(), q(), q(children: ( q(), q(), )), )), )) #anshere( ( q(children: ( q(), q(children: ( q(), q(children: ( q(), q(), )), )), )), ), numberings: ("1", "1", "(1)", "あ", "a"), // 質問番号の形式を一括指定 ) #anshere( ( q(children: ( q(), q( hide-numbering: true, // children内で質問番号を非表示 children: ( q(), q(children: ( q(), q(), )), ), ), )), ), numberings: ("1", "1", "(1)", "あ", "a"), )
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/cetz/0.2.0/src/draw/transformations.typ
typst
Apache License 2.0
#import "/src/coordinate.typ" #import "/src/matrix.typ" #import "/src/vector.typ" #import "/src/util.typ" // Utility for applying translation to and from // the origin to apply a transformation matrix to. // // - ctx (context): Context // - transform (matrix): Transformation matrix // - origin (coordinate): Origin coordinate or none #let _transform-around-origin(ctx, transform, origin) = { if origin != none { let (_, origin) = coordinate.resolve(ctx, origin, update: false) let a = matrix.transform-translate(..origin) let b = matrix.transform-translate(..vector.scale(origin, -1)) matrix.mul-mat(a, matrix.mul-mat(transform, b)) } else { transform } } /// Sets the transformation matrix. /// /// - mat (none, matrix): The 4x4 transformation matrix to set. If `none` is passed, the transformation matrix is set to the identity matrix (`matrix.ident()`). #let set-transform(mat) = { let mat = if mat == none { matrix.ident() } else { mat } assert( type(mat) == array, message: "Transformtion matrix must be of type array, got: " + repr(mat) ) assert.eq( mat.len(), 4, message: "Transformation matrix must be of size 4x4, got: " + repr(mat) ) (ctx => { ctx.transform = mat return (ctx: ctx) },) } /// Rotates the transformation matrix on the z-axis by a given angle or other axes when specified. /// /// #example(``` /// // Rotate on z-axis /// rotate(z: 45deg) /// rect((-1,-1), (1,1)) /// // Rotate on y-axis /// rotate(y: 80deg) /// circle((0,0)) /// ```) /// /// - ..angles (angle): A single angle as a positional argument to rotate on the z-axis by. /// Named arguments of `x`, `y` or `z` can be given to rotate on their respective axis. /// You can give named arguments of `yaw`, `pitch` or `roll`, too. /// - origin (none,coordinate): Origin to rotate around, or (0, 0, 0) if set to `none`. #let rotate(..angles, origin: none) = { assert(angles.pos().len() == 1 or angles.named().len() > 0, message: "Rotate takes a single z-angle or angles " + "(x, y, z or yaw, pitch, roll) as named arguments, got: " + repr(angles)) let named = angles.named() let names = named.keys() let mat = if angles.pos().len() == 1 { matrix.transform-rotate-z(angles.pos().at(0)) } else if names.all(n => n in ("x", "y", "z")) { matrix.transform-rotate-xyz(named.at("x", default: 0deg), named.at("y", default: 0deg), named.at("z", default: 0deg)) } else if names.all(n => n in ("yaw", "pitch", "roll")) { matrix.transform-rotate-ypr(named.at("yaw", default: 0deg), named.at("pitch", default: 0deg), named.at("roll", default: 0deg)) } else { panic("Invalid rotate arguments." + "Rotate expects: A single (z-axis) angle or any combination of x, y,z or any combination of yaw, pitch, roll. " + "Got: " + repr(named)) } (ctx => { ctx.transform = matrix.mul-mat(ctx.transform, _transform-around-origin(ctx, mat, origin)) return (ctx: ctx) },) } /// Translates the transformation matrix by the given vector or dictionary. /// /// #example(``` /// // Outer rect /// rect((0, 0), (2, 2)) /// // Inner rect /// translate(x: .5, y: .5) /// rect((0, 0), (1, 1)) /// ```) /// /// - ..args (vector, float, length): A single vector or any combination of the named arguments `x`, `y` and `z` to translate by. /// A translation matrix with the given offsets gets multiplied with the current transformation depending on the value of `pre`. /// - pre (bool): Specify matrix multiplication order /// - false: `World = World * Translate` /// - true: `World = Translate * World` #let translate(..args, pre: false) = { assert((args.pos().len() == 1 and args.named() == (:)) or (args.pos() == () and args.named() != (:)), message: "Expected a single positional argument or one or more named arguments, got: " + repr(args)) let pos = args.pos() let named = args.named() let vec = if named != (:) { (named.at("x", default: 0), named.at("y", default: 0), named.at("z", default: 0)) } else { vector.as-vec(pos.at(0), init: (0, 0, 0)) } (ctx => { // Allow translating by length values let vec = vec.map(v => if type(v) == length { util.resolve-number(ctx, v) } else { v }) let t = matrix.transform-translate(..vec) if pre { ctx.transform = matrix.mul-mat(t, ctx.transform) } else { ctx.transform = matrix.mul-mat(ctx.transform, t) } return (ctx: ctx) },) } /// Scales the transformation matrix by the given factor(s). /// /// #example(``` /// // Scale the y-axis /// scale(y: 50%) /// circle((0,0)) /// ```) /// /// - ..args (float, ratio): A single value to scale the transformation matrix by or per axis /// scaling factors. Accepts a single float or ratio value or any combination of the named arguments /// `x`, `y` and `z` to set per axis scaling factors. A ratio of 100% is the same as the value $1$. /// - origin (none,coordinate): Origin to rotate around, or (0, 0, 0) if set to `none`. #let scale(..args, origin: none) = { assert((args.pos().len() == 1 and args.named() == (:)) or (args.pos() == () and args.named() != (:)), message: "Expected a single positional argument or one or more named arguments, got: " + repr(args)) let pos = args.pos() let named = args.named() let vec = if args.named() != (:) { (named.at("x", default: 1), named.at("y", default: 1), named.at("z", default: 1)) } else if type(pos.at(0)) == array { vector.as-vec(pos, init: (1, 1, 1)) } else { let factor = pos.at(0) (factor, factor, factor) } // Allow scaling using ratio values vec = vec.map(v => if type(v) == ratio { v / 100% } else { v }) (ctx => { let mat = matrix.transform-scale(vec) ctx.transform = matrix.mul-mat(ctx.transform, _transform-around-origin(ctx, mat, origin)) return (ctx: ctx) },) } /// Sets the given position as the new origin `(0, 0, 0)` /// /// #example(``` /// // Outer rect /// rect((0,0), (2,2), name: "r") /// // Move origin to top edge /// set-origin("r.north") /// circle((0, 0), radius: .1) /// ```) /// /// - origin (coordinate): Coordinate to set as new origin `(0,0,0)` #let set-origin(origin) = { ( ctx => { let (ctx, c) = coordinate.resolve(ctx, origin) let (x, y, z) = vector.sub( util.apply-transform(ctx.transform, c), util.apply-transform(ctx.transform, (0, 0, 0)), ) ctx.transform = matrix.mul-mat(matrix.transform-translate(x, y, z), ctx.transform) return (ctx: ctx) }, ) } /// Sets the previous coordinate. /// /// The previous coordinate can be used via `()` (empty coordinate). /// It is also used as base for relative coordinates if not specified /// otherwise. /// /// #example(``` /// circle((), radius: .25) /// move-to((1,0)) /// circle((), radius: .15) /// ```) /// /// - pt (coordinate): The coordinate to move to. #let move-to(pt) = { let t = coordinate.resolve-system(pt) return (ctx => { let (ctx, pt) = coordinate.resolve(ctx, pt) return (ctx: ctx) },) } /// Span viewport between two coordinates and set-up scaling and translation /// /// #example(``` /// rect((0,0), (2,2)) /// set-viewport((0,0), (2,2), bounds: (10, 10)) /// circle((5,5)) /// ```) /// /// - from (coordinate): Bottom-Left corner coordinate /// - to (coordinate): Top right corner coordinate /// - bounds (vector): Viewport bounds vector that describes the inner width, /// height and depth of the viewport #let set-viewport(from, to, bounds: (1, 1, 1)) = { (from, to).map(coordinate.resolve-system) return (ctx => { let bounds = vector.as-vec(bounds, init: (1, 1, 1)) let (ctx, from, to) = coordinate.resolve(ctx, from, to) let (fx, fy, fz) = from let (tx, ty, tz) = to // Compute scaling let (sx, sy, sz) = vector.sub((tx, ty, tz), (fx, fy, fz)).enumerate().map(((i, v)) => if bounds.at(i) == 0 { 0 } else { v / bounds.at(i) }) ctx.transform = matrix.mul-mat(ctx.transform, matrix.transform-translate(fx, fy, fz)) ctx.transform = matrix.mul-mat(ctx.transform, matrix.transform-scale((sx, sy, sz))) return (ctx: ctx) },) }
https://github.com/dark-flames/resume
https://raw.githubusercontent.com/dark-flames/resume/main/data/work.typ
typst
MIT License
#import "../libs.typ": * #import "../chicv.typ": * #let workList(env) = ( ( company: "Luogu", link: iconlink("https://luogu.com", text: "luogu.com"), role: "Backend Team Leader, Remote, Part-Time", location: "Shanghai, China", time: "Jul, 2017 - Apr, 2023", content: cv-and-others(env, [ - Led and participated in the backend development of the biggest online-judge platform in China. - Designed and developed a backend framework in PHP with dependency injection and container compilation. - Optimized and refactored the existing codebase progressively while continuously introducing new features, enabling scalability from tens of thousands to over a million users, and supporting an annual judgment volume of fifty million. ], [ - Led and participated in the backend development of the biggest online-judge platform in China. - Designed and developed a backend framework in PHP with dependency injection and container compilation. - Designed and led the development of several curial middlewares, including a distributed asynchronous task worker in Rust, a WebSocket server in Python. - Optimized and refactored the existing codebase progressively while continuously introducing new features, enabling scalability from tens of thousands to over a million users, and supporting an annual judgment volume of fifty million. ]) ), ) #let work(env) = { multiLang(env,en: [== Work Experience], cn: [== 工作经历]) let s = workList(env).map(w => { cventry( tl: [*#w.company*, #w.location], tr: w.link, bl: w.role, br: w.time )[#w.content] }).join() [#s] }
https://github.com/mariunaise/HDA-Thesis
https://raw.githubusercontent.com/mariunaise/HDA-Thesis/master/graphics/quantizers/bach/sign-based-overlay.typ
typst
#import "@preview/cetz:0.2.2": * #let ymax = 1/calc.sqrt(2*calc.pi) #let line_style = (stroke: (paint: black, thickness: 2pt)) #let dashed = (stroke: (dash: "dashed")) #canvas({ plot.plot(size: (8,4), legend: "legend.north", legend-style: (orientation: ltr, item: (spacing: 0.5)), x-tick-step: none, x-ticks: ((0, [0]), (100, [0])), y-label: $cal(Q)(1, x)$, x-label: $x$, y-tick-step: none, y-ticks: ((0, [0]), (ymax, [1])), axis-style: "left", x-min: -3, x-max: 3, y-min: 0, y-max: ymax,{ plot.add( plot.sample-fn( (x) => 1/calc.sqrt(2*calc.pi)*calc.exp(-(calc.pow(x,2)/2)), (-3, 3), 300), style: (stroke: (paint: red, thickness: 2pt)), label: [PDF of a normal distribution] ) plot.add(((-3,0), (0,0), (0,ymax), (3,ymax)), style: line_style, label: [$cal(Q)(1,x)$]) }) })
https://github.com/EstebanMunoz/typst-template-informe
https://raw.githubusercontent.com/EstebanMunoz/typst-template-informe/main/title-page.typ
typst
MIT No Attribution
#let place-people-in-grid(people, single-people-str, multiple-people-str) = { let num-people = people.len() if num-people == 0 {()} else if num-people == 1 {(single-people-str,)} else {(grid.cell(rowspan: num-people, multiple-people-str),)} } #let title-page( title: none, subject: none, students: (), teachers: (), auxiliaries: (), assistants: (), lab-assistants: (), semester: none, due-date: none, place: none, university: none, faculty: none, department: none, logo: none ) = { let first-page-header = [ #set align(left) #set text(11pt) #stack( dir: ttb, spacing: 4pt, [#stack( dir: ltr, spacing: 1fr, [#stack( dir: ttb, spacing: 6pt, [#university], [#faculty], [#department] )], logo )], [#line(length: 100%, stroke: 0.4pt)] ) ] set page(margin: (top: 4.46cm, bottom: 2.7cm), header: first-page-header, footer: "") align(center + horizon, text(24.24pt)[ #title \ #text(12pt)[#subject] ]) align(right + bottom)[ #block(width: 270pt)[ #grid( columns: (75pt, auto), align: top + left, row-gutter: 7pt, ..place-people-in-grid(students, "Integrante:", "Integrantes:"), ..students, ..place-people-in-grid(teachers, "Profesor:", "Profesores:"), ..teachers, ..place-people-in-grid(auxiliaries, "Auxiliar:", "Auxiliares:"), ..auxiliaries, ..place-people-in-grid(assistants, "Ayudante:", "Ayudantes:"), ..assistants, ..place-people-in-grid(lab-assistants, [Ayudante de \ laboratorio:], [Ayudantes de \ laboratorio:]), ..lab-assistants, ..if semester != none {( grid.cell("Semestre:"), grid.cell(semester) )}, grid.cell(colspan: 2, v(5pt)), ..if due-date != none {( grid.cell(colspan: 2)[Fecha de entrega: #due-date], )}, ..if place != none {( grid.cell(colspan: 2, place), )} ) ] ] counter(page).update(0) }
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/let-18.typ
typst
Other
// Ref: false // Destructuring with a sink in the middle. #let (a: _, ..b, c: _) = (a: 1, b: 2, c: 3) #test(b, (b: 2))
https://github.com/Enter-tainer/typst-preview
https://raw.githubusercontent.com/Enter-tainer/typst-preview/main/assets/demo/polylux.typ
typst
MIT License
#import "@preview/polylux:0.2.0": * #import themes.clean: * #set text(font: "Source Sans 3") #show math.equation: set text(font: "GFS Neohellenic Math") #show: clean-theme.with( footer: [Sum of natural numbers, CF Gauß], ) #title-slide( authors: "<NAME>", title: [On a revolutionary way to \ sum up natural numbers], subtitle: "What they won't teach you in school", date: "1784", ) #new-section-slide("Introduction") #slide(title: "Problem statement")[ Let $n in NN$. We are interested in sums of the form $ 1 + ... + n = sum_(i=1)^n i $ ] #slide(title: "The theorem")[ I discovered that $ sum_(i=1)^n i = n(n+1)/2 $ Let's prove that! ] #new-section-slide("Proof") #slide(title: "Method of proof")[ We will prove the theorem by induction, following these steps: + base case + induction hypothesis + induction step ] #slide(title: "Proof")[ #set text(.7em) #one-by-one[ *base case:* Let $n = 1$. Then $sum_(i=1)^1 i = (1 dot.c 2)/2 = 1$ $checkmark$ ][ *ind. hypothesis:* Let $sum_(i=1)^k i = k(k+1)/2$ for some $k >= 1$. ][ *ind. step:* Show that $sum_(i=1)^(k+1) i = ((k+1)(k+2))/2$ $sum_(i=1)^(k+1) i = sum_(i=1)^k i quad + quad (k+1)$ ][ $= k(k+1)/2 + (k+1)$ ][ $= (k+1) dot.c (k/2 + 1) = (k+1) dot.c (k/2 + 2/2) = ((k+1)(k+2))/2 #h(1em) checkmark$ ] ] #focus-slide[ Proof is over, wake up! ] #new-section-slide("Conclusion") #slide(title: "That's it!")[ Now you know how to calculate those sums more quickly. Nice! ]
https://github.com/MALossov/YunMo_Doc
https://raw.githubusercontent.com/MALossov/YunMo_Doc/main/README.md
markdown
Apache License 2.0
# 云MO监控 - 完赛文档 > 模板格式源自: 上海大学本科毕业论文[_typst_](https://typst.app/)模板 1. 使用typst进行了完赛文档的撰写 2. 进行了部分的格式调整,来满足比赛要求 ~~就是个FPGA创新赛的完赛文档,没啥好看的~~ *主要是用typst写的*
https://github.com/jw2476/cslog
https://raw.githubusercontent.com/jw2476/cslog/master/AutoGen.typ
typst
= Appendix: Code #[ #set align(center) The following is every single `.rs` (Rust) and `.toml` (configuration) file in my project. The total length is 7939 loc and 224 548 characters across 68 files. ] === arbiter::main.rs ```pretty-rs // main.rs #![warn(clippy::pedantic)] #![warn(clippy::nursery)] #![deny(clippy::unwrap_used)] #![warn(clippy::expect_used)] use anyhow::Result; use async_std::net::UdpSocket; use common::{ item::{Item, ItemStack}, net, }; use glam::Vec3; use num_traits::{FromPrimitive, ToPrimitive}; use sqlx::SqlitePool; use std::{ collections::{ hash_map::{Keys, Values}, HashMap, }, hash::Hash, net::SocketAddr, ops::Deref, time::Instant, }; use tracing::{error, info, warn}; #[derive(Clone, PartialEq, Eq)] struct Connection { last_heartbeat: Instant, addr: SocketAddr, user_id: i64, character_id: i64, } trait Unique { type Key: Eq + PartialEq + Hash; fn get_unique_key(&self) -> Self::Key; } impl Unique for Connection { type Key = SocketAddr; fn get_unique_key(&self) -> Self::Key { self.addr } } impl Deref for Connection { type Target = SocketAddr; fn deref(&self) -> &Self::Target { &self.addr } } struct IndexedMap<T> where T: Unique, { inner: HashMap<T::Key, T>, } impl<T> IndexedMap<T> where T: Unique + Clone, { pub fn new() -> Self { Self::default() } pub fn get(&self, key: &T::Key) -> Option<&T> { self.inner.get(key) } pub fn get_mut(&mut self, key: &T::Key) -> Option<&mut T> { self.inner.get_mut(key) } pub fn insert(&mut self, value: T) { self.inner.insert(value.get_unique_key(), value); } pub fn remove(&mut self, key: &T::Key) { self.inner.remove(key); } pub fn values<'a>(&'a self) -> Values<'a, T::Key, T> { self.inner.values() } pub fn keys<'a>(&'a self) -> Keys<'a, T::Key, T> { self.inner.keys() } pub fn take(&mut self, key: &T::Key) -> Option<T> { let value = self.get(key).cloned(); self.remove(key); value } } impl<T> Default for IndexedMap<T> where T: Unique, { fn default() -> Self { Self { inner: HashMap::new(), } } } struct Server { socket: UdpSocket, online: IndexedMap<Connection>, pool: SqlitePool, } #[derive(thiserror::Error, Debug)] enum SendError { #[error("IO error")] IOError(#[from] std::io::Error), #[error("Encode error")] EncodeError(#[from] postcard::Error), } impl Server { pub fn new(socket: UdpSocket, pool: SqlitePool) -> Self { Self { socket, online: IndexedMap::new(), pool, } } pub async fn send( &self, addr: &SocketAddr, packet: &net::client::Packet, ) -> Result<(), SendError> { let bytes = postcard::to_stdvec(packet)?; self.socket.send_to(&bytes, addr).await?; Ok(()) } } #[async_std::main] async fn main() -> Result<()> { tracing_subscriber::fmt::init(); let socket = UdpSocket::bind("0.0.0.0:8000").await?; let pool = SqlitePool::connect(&std::env::var("DATABASE_URL")?).await?; sqlx::migrate!().run(&mut pool.acquire().await?).await?; let mut server = Server::new(socket, pool); info!("Listening on 0.0.0.0:8000"); let mut last_heartbeat_check = Instant::now(); loop { let mut buf = [0; 4096]; match server.socket.recv_from(&mut buf).await { Err(e) => panic!("{e}"), Ok((_, addr)) => { let packet = match postcard::from_bytes(&buf) { Ok(packet) => packet, Err(e) => { warn!("Failed to decode packet due to {}", e); continue; } }; if let Err(e) = handle_packet(&mut server, &packet, addr).await { warn!("Handling packet failed with {e}"); continue; } } } if last_heartbeat_check.elapsed().as_secs_f32() > 1.0 { info!("Checking heartbeats"); check_heartbeats(&mut server).await?; last_heartbeat_check = Instant::now(); } } } async fn check_heartbeats(server: &mut Server) -> Result<()> { let dead = server .online .values() .filter(|connection| connection.last_heartbeat.elapsed().as_secs_f32() > 20.0) .map(|connection| connection.addr) .collect::<Vec<SocketAddr>>(); for addr in dead { if let Err(e) = disconnect(server, addr, Some("Heartbeat timeout".to_owned())).await { warn!("Failed to disconnect {} due to {}", addr, e); continue; } } Ok(()) } async fn handle_login(server: &mut Server, packet: &net::server::Login, addr: SocketAddr) { let Ok(user) = sqlx::query!( "SELECT id, password FROM users WHERE username = ?", packet.username ) .fetch_optional(&server.pool) .await else { error!("Fetching user {} failed", packet.username); return; }; let Some(user) = user else { let _ = send_error(server, addr, "No account found for that username", true).await; return; }; if user.password != packet.password { let _ = send_error(server, addr, "Username or password is incorrect", true).await; return; } let Ok(character) = sqlx::query!( "SELECT id, name, position_x, position_y, position_z FROM characters WHERE owner = ?", user.id ) .fetch_one(&server.pool) .await else { error!("Fetching character for user {} failed", packet.username); return; }; let position = Vec3::new( character.position_x as f32, character.position_y as f32, character.position_z as f32, ); server.online.insert(Connection { last_heartbeat: Instant::now(), addr, user_id: user.id, character_id: character.id, }); let connection = server .online .get(&addr) .expect("Failed to get connection that was just inserted, this is very bad"); for peer in server.online.values() { // Notify peers about new client let packet = net::client::Packet::SpawnPlayer(net::client::SpawnPlayer { username: packet.username.clone(), position, }); if let Err(e) = server.send(peer, &packet).await { warn!("Failed to notify {} of new player due to {}", peer.addr, e); } let Ok(peer_user) = sqlx::query!("SELECT username FROM users WHERE id = ?", peer.user_id) .fetch_one(&server.pool) .await else { error!("Fetching peer user {} failed", peer.user_id); continue; }; let Ok(peer_character) = sqlx::query!( "SELECT position_x, position_y, position_z FROM characters WHERE id = ?", peer.character_id ) .fetch_one(&server.pool) .await else { error!("Fetching peer character {} failed", peer.character_id); continue; }; let peer_position = Vec3::new( peer_character.position_x as f32, peer_character.position_y as f32, peer_character.position_z as f32, ); // Notify new client about peers let packet = net::client::Packet::SpawnPlayer(net::client::SpawnPlayer { username: peer_user.username.clone(), position: peer_position, }); if let Err(e) = server.send(connection, &packet).await { warn!( "Failed to notify new player {} of player {} due to {}", addr, peer.addr, e ); } } let Ok(items) = sqlx::query!( "SELECT item, quantity FROM items WHERE owner = ?", character.id ) .fetch_all(&server.pool) .await else { error!( "Fetching items for character {} user {} failed", character.name, packet.username ); return; }; // Set clients inventory for stack in items { let Some(item) = Item::from_i64(stack.item) else { error!("Invalid item ID in database {}", stack.item); continue; }; let inventory_packet = net::client::Packet::ModifyInventory(net::client::ModifyInventory { stack: ItemStack { item, amount: stack.quantity as u32, }, }); if let Err(e) = server.send(connection, &inventory_packet).await { warn!( "Failed to update player {}'s inventory stack {:?} due to {}", packet.username, stack, e ); continue; } info!("Updating player {}'s stack {:?}", packet.username, stack); } info!("Added {} to connection list", packet.username); } async fn handle_move(server: &mut Server, packet: &net::server::Move, addr: SocketAddr) { let Some(connection) = server.online.get_mut(&addr) else { warn!("Cannot find client for addr {}", addr); return; }; if let Err(e) = sqlx::query!( "UPDATE characters SET position_x = ?, position_y = ?, position_z = ? WHERE id = ?", packet.position.x, packet.position.y, packet.position.z, connection.character_id ) .execute(&server.pool) .await { error!( "Updating position for character {} failed due to {}", connection.character_id, e ); return; } info!( "Updated position for {} to {:?}", connection.character_id, packet.position ); let Some(connection) = server.online.get(&addr) else { warn!("Cannot find client for addr {}", addr); return; }; let Ok(user) = sqlx::query!( "SELECT username FROM users WHERE id = ?", connection.user_id ) .fetch_one(&server.pool) .await else { error!("Failed to fetch user with id {}", connection.user_id); return; }; for peer in server.online.values().filter(|peer| peer != &connection) { let packet = net::client::Packet::Move(net::client::Move { username: user.username.clone(), position: packet.position, }); if let Err(e) = server.send(peer, &packet).await { warn!( "Failed to notify {} of {} moving due to {}", peer.addr, user.username, e ); continue; } } } fn handle_heartbeat(server: &mut Server, addr: SocketAddr) { let Some(connection) = server.online.get_mut(&addr) else { warn!("Cannot find client for addr {}", addr); return; }; connection.last_heartbeat = Instant::now(); info!("{} heartbeat", connection.user_id); } async fn handle_packet( server: &mut Server, packet: &net::server::Packet, addr: SocketAddr, ) -> Result<()> { match packet { net::server::Packet::Login(packet) => handle_login(server, packet, addr).await, net::server::Packet::Move(packet) => handle_move(server, packet, addr).await, net::server::Packet::Heartbeat => handle_heartbeat(server, addr), net::server::Packet::Disconnect => disconnect(server, addr, None).await?, net::server::Packet::ModifyInventory(packet) => { handle_modify_inventory(server, packet, addr).await } net::server::Packet::Signup(packet) => handle_signup(server, packet, addr).await, }; Ok(()) } async fn disconnect(server: &mut Server, addr: SocketAddr, reason: Option<String>) -> Result<()> { let Some(connection) = server.online.get(&addr) else { warn!("Cannot find client for addr {}", addr); return Err(std::io::Error::new(std::io::ErrorKind::NotFound, "Client not found").into()); }; info!("{} is disconnecting", connection.user_id); let user = sqlx::query!( "SELECT username FROM users WHERE id = ?", connection.user_id ) .fetch_one(&server.pool) .await?; for peer in server.online.values().filter(|peer| peer != &connection) { let packet = net::client::Packet::DespawnPlayer(net::client::DespawnPlayer { username: user.username.clone(), }); server.send(peer, &packet).await?; } if let Some(reason) = reason { let packet = net::client::Packet::NotifyDisconnection(net::client::NotifyDisconnection { reason }); server.send(&connection, &packet).await?; } server.online.remove(&connection.get_unique_key()); Ok(()) } async fn send_error(server: &Server, addr: SocketAddr, message: &str, fatal: bool) { let packet = net::client::Packet::DisplayError(net::client::DisplayError { message: message.to_owned(), fatal, }); let _ = server.send(&addr, &packet).await; } async fn handle_modify_inventory( server: &mut Server, packet: &net::server::ModifyInventory, addr: SocketAddr, ) { let Some(connection) = server.online.get_mut(&addr) else { warn!("Cannot find client for addr {}", addr); return; }; let item = packet.stack.item.to_i64(); let Ok(existing) = sqlx::query!( "SELECT id FROM items WHERE owner = ? AND item = ?", connection.character_id, item ) .fetch_optional(&server.pool) .await else { error!( "Failed to fetch existing item stack character {} item {}", connection.character_id, packet.stack.item ); return; }; let result = if let Some(_) = existing { sqlx::query!("UPDATE items SET quantity = ?", packet.stack.amount) .execute(&server.pool) .await } else { sqlx::query!( "INSERT INTO items (item, quantity, owner) VALUES (?, ?, ?)", item, packet.stack.amount, connection.character_id ) .execute(&server.pool) .await }; if let Err(e) = result { error!( "Failed to set item stack {:?} for character {} due to {}", packet.stack, connection.character_id, e ); } } async fn handle_signup(server: &Server, packet: &net::server::Signup, addr: SocketAddr) { let Ok(existing) = sqlx::query!("SELECT id FROM users WHERE username = ?", packet.username) .fetch_optional(&server.pool) .await else { error!("Failed to fetch existing user for signup"); send_error(server, addr, "Server error", true).await; return; }; if let Some(_) = existing { send_error(server, addr, "User exists with that username", true).await; return; } if let Err(e) = sqlx::query!( "INSERT INTO users (username, password) VALUES (?, ?)", packet.username, packet.password ) .execute(&server.pool) .await { error!("Failed to create new user due to {}", e); send_error(server, addr, "Server error", true).await; return; } let Ok(user) = sqlx::query!("SELECT id FROM users WHERE username = ?", packet.username) .fetch_one(&server.pool) .await else { error!("Newly created user cannot be found"); send_error(server, addr, "Server error", true).await; return; }; if let Err(e) = sqlx::query!("INSERT INTO characters (name, position_x, position_y, position_z, owner) VALUES (?, ?, ?, ?, ?)", packet.username, 0.0, 0.0, 0.0, user.id).execute(&server.pool).await { error!("Failed to insert new character for {} due to {}", packet.username, e); send_error(server, addr, "Server error", true).await; return; } } ``` === components::components.rs ```pretty-rs // components.rs use crate::{ input::Mouse, ui::{self, Element, Rectangle, Region, SizeConstraints}, }; use glam::{UVec2, Vec4}; #[derive(Clone, Debug)] pub struct Container<T: Element> { pub child: T, pub color: Vec4, pub border_radius: u32, pub border_color: Vec4, } impl<T: Element> Element for Container<T> { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { let max = constraint.max - UVec2::new(self.border_radius, self.border_radius); let child_size = self.child.layout(SizeConstraints { min: constraint.min, max, }); child_size + UVec2::new(self.border_radius * 2, self.border_radius * 2) } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { scene.push(Rectangle { color: self.border_color, origin: region.origin, extent: region.size, radius: self.border_radius, ..Default::default() }); scene.push(Rectangle { color: self.color, origin: region.origin + UVec2::new(self.border_radius, self.border_radius), extent: region.size - UVec2::new(self.border_radius * 2, self.border_radius * 2), ..Default::default() }); self.child.paint( Region { origin: region.origin + UVec2::new(self.border_radius, self.border_radius), size: region.size - UVec2::new(self.border_radius * 2, self.border_radius * 2), }, scene, ) } } #[derive(Clone, Debug)] pub struct Padding<T: Element> { pub child: T, pub top: u32, pub bottom: u32, pub left: u32, pub right: u32, } impl<T: Element> Padding<T> { pub fn new_uniform(child: T, padding: u32) -> Self { Self { child, top: padding, bottom: padding, left: padding, right: padding, } } } impl<T: Element> Element for Padding<T> { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { let max = constraint.max - UVec2::new(self.left + self.right, self.top + self.bottom); let child_size = self.child.layout(SizeConstraints { min: constraint.min, max, }); child_size + UVec2::new(self.left + self.right, self.top + self.bottom) } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { self.child.paint( Region { origin: region.origin + UVec2::new(self.left, self.top), size: region.size - UVec2::new(self.left + self.right, self.top + self.bottom), }, scene, ); } } #[derive(Debug)] pub struct PaddingRef<'a, T: Element> { pub child: &'a mut T, pub top: u32, pub bottom: u32, pub left: u32, pub right: u32, } impl<'a, T: Element> PaddingRef<'a, T> { pub fn new_uniform(child: &'a mut T, padding: u32) -> Self { Self { child, top: padding, bottom: padding, left: padding, right: padding, } } } impl<T: Element> Element for PaddingRef<'_, T> { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { let max = constraint.max - UVec2::new(self.left + self.right, self.top + self.bottom); let child_size = self.child.layout(SizeConstraints { min: constraint.min, max, }); child_size + UVec2::new(self.left + self.right, self.top + self.bottom) } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { self.child.paint( Region { origin: region.origin + UVec2::new(self.left, self.top), size: region.size - UVec2::new(self.left + self.right, self.top + self.bottom), }, scene, ); } } #[derive(Clone, Copy, Debug)] pub enum VAlign { Top, Bottom, Center, } #[derive(Clone, Debug)] pub struct HPair<L: Element, R: Element> { pub left: L, pub right: R, pub align: VAlign, pub separation: u32, left_size: UVec2, right_size: UVec2, } impl<L: Element, R: Element> HPair<L, R> { pub fn new(left: L, right: R, align: VAlign, separation: u32) -> Self { Self { left, right, align, separation, left_size: UVec2::ZERO, right_size: UVec2::ZERO, } } fn get_top_padding(&self, wanted: u32, actual: u32) -> u32 { println!("Wanted: {}, Actual: {}", wanted, actual); match self.align { VAlign::Top => 0, VAlign::Bottom => wanted - actual, VAlign::Center => (wanted - actual) / 2, } } fn get_bottom_padding(&self, wanted: u32, actual: u32) -> u32 { (wanted - actual) - self.get_top_padding(wanted, actual) } } impl<L: Element, R: Element> Element for HPair<L, R> { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { self.left_size = self.left.layout(constraint.clone()); self.right_size = self.right.layout(SizeConstraints { min: constraint.min, max: constraint.max - UVec2::new(self.left_size.x + self.separation, 0), }); UVec2::new( self.left_size.x + self.right_size.x + self.separation, *[self.left_size.y, self.right_size.y].iter().max().unwrap(), ) } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { { let top = self.get_top_padding(region.size.y, self.left_size.y); let bottom = self.get_bottom_padding(region.size.y, self.left_size.y); let mut left = PaddingRef { child: &mut self.left, top, bottom, left: 0, right: 0, }; left.paint( Region { origin: region.origin, size: UVec2::new(self.left_size.x, region.size.y), }, scene, ); } { let top = self.get_top_padding(region.size.y, self.right_size.y); let bottom = self.get_bottom_padding(region.size.y, self.right_size.y); let mut right = PaddingRef { child: &mut self.right, top, bottom, left: 0, right: 0, }; right.paint( Region { origin: region.origin + UVec2::new(self.left_size.x + self.separation, 0), size: UVec2::new(self.right_size.x, region.size.y), }, scene, ); } } } pub const CHAR_HEIGHT: u32 = 5; static CHARACTER_MAP: [(char, u32); 38] = [ ('A', 5), ('B', 5), ('C', 5), ('D', 5), ('E', 5), ('F', 5), ('G', 5), ('H', 5), ('I', 5), ('J', 5), ('K', 5), ('L', 5), ('M', 5), ('N', 5), ('O', 5), ('P', 5), ('Q', 5), ('R', 5), ('S', 5), ('T', 5), ('U', 5), ('V', 5), ('W', 5), ('X', 5), ('Y', 5), ('Z', 5), (' ', 5), ('0', 5), ('1', 3), ('2', 4), ('3', 4), ('4', 4), ('5', 4), ('6', 4), ('7', 4), ('8', 4), ('9', 4), ('/', 5), ]; #[derive(Clone, Debug)] pub struct Text { pub color: Vec4, pub content: String, } impl Element for Text { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { let width = self .content .to_uppercase() .chars() .map(|c| { CHARACTER_MAP .iter() .find(|a| a.0 == c) .expect(&format!("Character {} not in font", c)) .1 }) .fold(0, |acc, w| acc + w + 1); UVec2::new( width.max(constraint.min.x), CHAR_HEIGHT.max(constraint.min.y), ) } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { let mut offset = 0; for c in self.content.to_uppercase().chars() { let (atlas_id, (_, width)) = CHARACTER_MAP .iter() .enumerate() .find(|(_, a)| a.0 == c) .expect(&format!("Character {} not in font", c)); scene.push(Rectangle { color: self.color, origin: region.origin + UVec2::new(offset, 0), extent: UVec2::new(*width, 5), atlas_id: atlas_id as i32, ..Default::default() }); offset += width + 1; } } } #[derive(Clone, Debug)] pub enum HAlign { Left, Right, Center, } #[derive(Clone, Debug)] pub struct VList<T: Element> { pub children: Vec<T>, pub separation: u32, pub align: HAlign, } // TODO: Alignment impl<T: Element> Element for VList<T> { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { if self.children.len() == 0 { return UVec2::new(0, 0); } let children_sizes = self .children .iter_mut() .map(|child| child.layout(constraint.clone())) .collect::<Vec<UVec2>>(); let width = children_sizes.iter().map(|size| size.x).max().unwrap(); let height = children_sizes.first().unwrap().y * self.children.len() as u32 + self.separation * (self.children.len() as u32 - 1); UVec2 { x: width, y: height, } } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { if self.children.len() == 0 { return; } let height_per_child = (region.size.y + self.separation - (self.children.len() as u32 * self.separation)) / (self.children.len() as u32); for (i, child) in self.children.iter_mut().enumerate() { child.paint( Region { origin: region.origin + UVec2::new(0, (height_per_child + self.separation) * i as u32), size: UVec2::new(region.size.x, height_per_child), }, scene, ); } } } #[derive(Clone, Debug)] pub struct VPair<T: Element, B: Element> { pub top: T, pub bottom: B, pub align: HAlign, pub separation: u32, top_size: UVec2, bottom_size: UVec2, } impl<T: Element, B: Element> VPair<T, B> { pub fn new(top: T, bottom: B, align: HAlign, separation: u32) -> Self { Self { top, bottom, align, separation, top_size: UVec2::ZERO, bottom_size: UVec2::ZERO, } } fn get_left_padding(&self, wanted: u32, actual: u32) -> u32 { println!("Wanted: {}, Actual: {}", wanted, actual); match self.align { HAlign::Left => 0, HAlign::Right => wanted - actual, HAlign::Center => (wanted - actual) / 2, } } fn get_right_padding(&self, wanted: u32, actual: u32) -> u32 { (wanted - actual) - self.get_left_padding(wanted, actual) } } impl<T: Element, B: Element> Element for VPair<T, B> { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { self.top_size = self.top.layout(constraint.clone()); self.bottom_size = self.bottom.layout(SizeConstraints { min: constraint.min, max: constraint.max - UVec2::new(0, self.top_size.y + self.separation), }); UVec2::new( *[self.top_size.x, self.bottom_size.x].iter().max().unwrap(), self.top_size.y + self.bottom_size.y + self.separation, ) } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { { let left = self.get_left_padding(region.size.x, self.top_size.x); let right = self.get_right_padding(region.size.x, self.top_size.x); let mut top = PaddingRef { child: &mut self.top, left, right, top: 0, bottom: 0, }; top.paint( Region { origin: region.origin, size: UVec2::new(region.size.x, self.top_size.y), }, scene, ); } { let left = self.get_left_padding(region.size.x, self.bottom_size.x); let right = self.get_right_padding(region.size.x, self.bottom_size.x); let mut bottom = PaddingRef { child: &mut self.bottom, left, right, top: 0, bottom: 0, }; bottom.paint( Region { origin: region.origin + UVec2::new(0, self.top_size.y + self.separation), size: UVec2::new(region.size.x, self.bottom_size.y), }, scene, ); } } } pub struct Button<'a, H: Handler> { component: Container<Padding<Text>>, mouse: &'a Mouse, on_click: H, } pub trait Handler { fn handle(&mut self); } impl<'a, H: Handler> Button<'a, H> { pub fn new(mouse: &'a Mouse, text: &str, on_click: H) -> Self { Self { component: Container { child: Padding::new_uniform( Text { color: ui::color::get_highlight(), content: text.to_owned(), }, 3, ), color: ui::color::get_background(), border_color: ui::color::get_highlight(), border_radius: 1, }, mouse, on_click, } } } impl<H: Handler> Element for Button<'_, H> { fn layout(&mut self, constraint: ui::SizeConstraints) -> glam::UVec2 { self.component.layout(constraint) } fn paint(&mut self, region: ui::Region, scene: &mut Vec<ui::Rectangle>) { if ui::input::hovering(self.mouse, &region) { self.component.color = Vec4::ONE; } else { self.component.color = ui::color::get_background(); } if ui::input::clicked(self.mouse, &region, winit::event::MouseButton::Left) { self.on_click.handle() } self.component.paint(region, scene) } } ``` === aetheria::main.rs ```pretty-rs // main.rs #![feature(let_chains)] #![feature(trivial_bounds)] #![feature(associated_type_defaults)] #![warn(clippy::pedantic)] #![warn(clippy::nursery)] extern crate core; mod camera; mod components; mod data; mod entities; mod input; mod macros; mod renderer; mod scenes; mod socket; mod systems; mod time; mod ui; use anyhow::Result; use ash::vk; use assets::{ModelRegistry, ShaderRegistry, TextureRegistry, Transform}; use bytemuck::cast_slice; use camera::Camera; use common::{ item::{Item, ItemStack}, net, Observable, Observer, }; use glam::{IVec2, Quat, UVec2, Vec2, Vec3, Vec4}; use input::{Keyboard, Mouse}; use num_traits::FromPrimitive; use std::{ collections::HashMap, f32::consts::PI, io, net::{IpAddr, SocketAddr, UdpSocket}, ops::DerefMut, sync::{Arc, Mutex}, time::Instant, }; use time::Time; use tracing::info; use vulkan::Context; use winit::{ event::{MouseButton, VirtualKeyCode}, event_loop::ControlFlow, }; use crate::{ components::{craft, recipe_selector}, data::{inventory::Inventory, Data}, entities::{Player, Tree}, renderer::{Renderer, RENDER_HEIGHT, RENDER_WIDTH}, scenes::RootScene, socket::Socket, systems::{interact, render, Systems}, ui::{Element, Rectangle, Region, SizeConstraints, UIPass}, }; use dialog::DialogBox; struct Indices(Vec<u32>); impl From<Indices> for Vec<u8> { fn from(indices: Indices) -> Self { cast_slice::<u32, u8>(&indices.0).to_vec() } } fn create_window() -> (winit::event_loop::EventLoop<()>, winit::window::Window) { let event_loop = winit::event_loop::EventLoop::new(); let window = winit::window::WindowBuilder::new() .build(&event_loop) .unwrap(); (event_loop, window) } const CAMERA_SENSITIVITY: f32 = 250.0; fn main() { tracing_subscriber::fmt::init(); let mut ip = dialog::Input::new("Enter Server IP:") .title("IP") .show() .expect("Failed to show IP dialog box") .unwrap_or("".to_owned()); if ip.trim().is_empty() { ip = "127.0.0.1".to_owned(); } let remote = SocketAddr::new(IpAddr::V4(ip.trim().parse().unwrap()), 8000); let socket: Arc<Socket> = Arc::new(UdpSocket::bind("[::]:0").unwrap().into()); socket.connect(remote).unwrap(); socket.set_nonblocking(true).unwrap(); let username = dialog::Input::new("Enter username:") .title("Username") .show() .expect("Failed to show username dialog box"); if username.is_none() || username.as_ref().unwrap().trim().is_empty() { dialog::Message::new("Username cannot be empty") .title("Username error") .show() .expect("Failed to show error dialog box"); return; } let username = username.unwrap(); let password = dialog::Password::new("Enter password:") .title("Password") .show() .expect("Failed to show password dialog box"); if password.is_none() || password.as_ref().unwrap().trim().is_empty() { dialog::Message::new("Password cannot be empty") .title("Password error") .show() .expect("Failed to show error dialog box"); return; } let password = <PASSWORD>.<PASSWORD>(); match dialog::Question::new("Do you have an existing account") .title("Existing account") .show() .unwrap() { dialog::Choice::Yes => { let login = net::server::Packet::Login(net::server::Login { username: username.trim().to_owned(), password: <PASSWORD>.trim().to_owned(), }); socket.send(&login).unwrap(); } dialog::Choice::No => { let signup = net::server::Packet::Signup(net::server::Signup { username: username.trim().to_owned(), password: <PASSWORD>.trim().to_owned(), }); socket.send(&signup).unwrap(); } dialog::Choice::Cancel => return, }; let (event_loop, window) = create_window(); let window = Arc::new(window); let ctx = Context::new(&window); let mut model_registry = ModelRegistry::new(); let mut shader_registry = ShaderRegistry::new(); let mut texture_registry = TextureRegistry::new(); let mut renderer = Renderer::new(ctx, window.clone()).unwrap(); let mut camera = Camera::new(480.0, 270.0, &renderer).unwrap(); let mut time = Time::new(&renderer).unwrap(); let render_system = Arc::new(Mutex::new( render::System::new(&renderer, &mut shader_registry, &camera, &time).unwrap(), )); let interact_system = Arc::new(Mutex::new(interact::System::new())); let mut data = Data { inventory: Inventory::new(socket.clone()), current_recipe: None, recipe_selections: None, }; let ui_pass = Arc::new(Mutex::new( UIPass::new( &mut renderer, &mut shader_registry, &mut texture_registry, render_system.lock().unwrap().get_texture(), ) .unwrap(), )); renderer.add_pass(render_system.clone()); renderer.add_pass(ui_pass.clone()); renderer.set_output_image( ui_pass.lock().unwrap().get_texture().image.clone(), vk::ImageLayout::GENERAL, ); let mut keyboard = Keyboard::new(); let mut mouse = Mouse::new(); let mut root = RootScene::new( &mut renderer, &mut Systems { render: &mut render_system.lock().unwrap(), interact: &mut interact_system.lock().unwrap(), }, &mut model_registry, ) .expect("Failed to load scene"); interact_system .lock() .unwrap() .set_player(root.player.clone()); let mut players: HashMap<String, Arc<Mutex<Player>>> = HashMap::new(); let mut last_heartbeat: Instant = Instant::now(); let mut inventory_open = false; event_loop.run(move |event, _, control_flow| { if let ControlFlow::ExitWithCode(_) = *control_flow { return; } control_flow.set_poll(); keyboard.on_event(&event); mouse.on_event(&event); let mut buf = [0; 4096]; match socket.recv(&mut buf) { Err(e) if e.kind() == io::ErrorKind::WouldBlock => {} Err(e) => panic!("{e}"), Ok(_) => { let packet: net::client::Packet = postcard::from_bytes(&buf).unwrap(); match packet { net::client::Packet::SpawnPlayer(packet) => { info!("Spawning player"); players.insert( packet.username, Player::new( &mut renderer, &mut Systems { render: &mut render_system.lock().unwrap(), interact: &mut interact_system.lock().unwrap(), }, &mut model_registry, Transform { translation: packet.position, rotation: Quat::IDENTITY, scale: Vec3::ONE, }, ) .unwrap(), ); } net::client::Packet::Move(packet) => { info!("Moving peer player"); players .get_mut(&packet.username) .expect("Peer not found") .lock() .unwrap() .player .transform .translation = packet.position; } net::client::Packet::DespawnPlayer(packet) => { info!("Deleting peer player"); players.remove(&packet.username); } net::client::Packet::NotifyDisconnection(packet) => { info!("Disconnecting due to {}", packet.reason); control_flow.set_exit(); return; } net::client::Packet::ModifyInventory(packet) => { info!("Setting {:?} to {}", packet.stack.item, packet.stack.amount); data.inventory.set(packet.stack); } net::client::Packet::DisplayError(packet) => { dialog::Message::new(packet.message) .title("Error") .show() .unwrap(); if packet.fatal { control_flow.set_exit(); return; } } } } }; if last_heartbeat.elapsed().as_secs_f32() > 10.0 { heartbeat(&socket).unwrap(); last_heartbeat = Instant::now(); } match event { winit::event::Event::WindowEvent { event, .. } => match event { winit::event::WindowEvent::Resized(size) => { renderer.recreate_swapchain().unwrap(); camera.width = size.width as f32; camera.height = size.height as f32; } winit::event::WindowEvent::CloseRequested => { disconnect(&socket).unwrap(); control_flow.set_exit() } _ => (), }, winit::event::Event::MainEventsCleared => { if keyboard.is_key_down(VirtualKeyCode::Escape) { disconnect(&socket).unwrap(); control_flow.set_exit() } if mouse.is_button_down(MouseButton::Right) { camera.theta -= mouse.delta.x / CAMERA_SENSITIVITY } if keyboard.is_key_down(VirtualKeyCode::Left) { let mut sun = root.sun.lock().unwrap(); let theta = sun.get_theta() + PI / 60.0; sun.update_theta(theta); } if keyboard.is_key_down(VirtualKeyCode::Right) { let mut sun = root.sun.lock().unwrap(); let theta = sun.get_theta() - PI / 60.0; sun.update_theta(theta); } if keyboard.is_key_pressed(VirtualKeyCode::I) { inventory_open = !inventory_open; } renderer.wait_for_frame(); render_system .lock() .unwrap() .set_geometry(&data, &renderer, &model_registry); let mut scene = Vec::new(); interact_system .lock() .unwrap() .frame_finished(&camera, &keyboard, &mut scene, &mut data); if let Some(mut component) = craft::Component::new(&mut data, &mouse) { let size = component.layout(SizeConstraints { min: UVec2::new(0, 0), max: UVec2::new(480, 270), }); component.paint( Region { origin: UVec2::new(0, 0), size, }, &mut scene, ) } if let Some(mut component) = recipe_selector::Component::new(&mut data, &mouse) { let size = component.layout(SizeConstraints { min: UVec2::new(0, 0), max: UVec2::new(480, 270), }); component.paint( Region { origin: UVec2::new(0, 0), size, }, &mut scene, ) } if inventory_open { let mut inventory_window = components::inventory::Component::new(&data.inventory); let size = inventory_window.layout(SizeConstraints { min: UVec2::new(0, 0), max: UVec2::new(RENDER_WIDTH, RENDER_HEIGHT), }); inventory_window.paint( Region { origin: UVec2::new(480 - (size.x + 2), 270 - (size.y + 2)), size, }, &mut scene, ); } ui_pass .lock() .unwrap() .set_geometry(&renderer, &scene) .expect("Failed to set UI geometry"); renderer.render(); let viewport = Vec2::new( window.inner_size().width as f32, window.inner_size().height as f32, ); root.frame_finished(&keyboard, &mouse, &camera, &time, viewport, &socket); time.frame_finished(); keyboard.frame_finished(); camera.frame_finished(); mouse.frame_finished(); camera.target = root.player.lock().unwrap().player.transform.translation; println!("{}", mouse.position); } _ => (), }; if let ControlFlow::ExitWithCode(_) = *control_flow { println!("Waiting for GPU to finish"); unsafe { renderer.device.device_wait_idle().unwrap() }; } }); } fn heartbeat(socket: &Socket) -> Result<()> { let packet = net::server::Packet::Heartbeat; socket.send(&packet)?; Ok(()) } fn disconnect(socket: &Socket) -> Result<()> { let packet = net::server::Packet::Disconnect; socket.send(&packet)?; Ok(()) } ``` === vulkan::command.rs ```pretty-rs // command.rs use super::{compute, graphics, Device, Image, Renderpass, Set}; use ash::vk; use std::{ops::Deref, result::Result, sync::Arc}; #[derive(Clone, Copy, Debug, Default)] pub struct DrawOptions { pub vertex_count: u32, pub instance_count: u32, pub first_vertex: i32, pub first_instance: u32, } #[derive(Clone, Debug)] pub struct Buffer { pub(crate) buffer: vk::CommandBuffer, } enum Pipeline { Graphics(graphics::Pipeline), Compute(compute::Pipeline), } impl Pipeline { pub fn get_layout(&self) -> vk::PipelineLayout { match self { Pipeline::Graphics(graphics) => graphics.layout, Pipeline::Compute(compute) => compute.layout, } } pub fn get_bind_point(&self) -> vk::PipelineBindPoint { match self { Pipeline::Compute(_) => vk::PipelineBindPoint::COMPUTE, Pipeline::Graphics(_) => vk::PipelineBindPoint::GRAPHICS, } } } pub struct BufferBuilder { buffer: Buffer, device: Arc<Device>, pipeline: Option<Pipeline>, } #[derive(Clone, Debug)] pub struct TransitionLayoutOptions { pub old: vk::ImageLayout, pub new: vk::ImageLayout, pub source_access: vk::AccessFlags, pub destination_access: vk::AccessFlags, pub source_stage: vk::PipelineStageFlags, pub destination_stage: vk::PipelineStageFlags, } impl BufferBuilder { pub fn begin(self) -> Result<Self, vk::Result> { let begin_info = vk::CommandBufferBeginInfo::builder(); unsafe { self.device.begin_command_buffer(**self, &begin_info)? }; Ok(self) } pub fn begin_renderpass( self, renderpass: &Renderpass, framebuffer: vk::Framebuffer, extent: vk::Extent2D, ) -> Self { let render_area = vk::Rect2D::builder() .offset(vk::Offset2D::default()) .extent(extent); let color_clear_value = vk::ClearValue { color: vk::ClearColorValue { float32: [0.0, 0.0, 0.0, 1.0], }, }; let depth_clear_value = vk::ClearValue { depth_stencil: vk::ClearDepthStencilValue { depth: 1.0, stencil: 0, }, }; let clear_values = &[color_clear_value, depth_clear_value]; let begin_info = vk::RenderPassBeginInfo::builder() .render_pass(**renderpass) .framebuffer(framebuffer) .render_area(*render_area) .clear_values(clear_values); unsafe { self.device .cmd_begin_render_pass(**self, &begin_info, vk::SubpassContents::INLINE) }; self } pub fn bind_graphics_pipeline(mut self, pipeline: graphics::Pipeline) -> Self { unsafe { self.device .cmd_bind_pipeline(**self, vk::PipelineBindPoint::GRAPHICS, *pipeline) }; self.pipeline = Some(Pipeline::Graphics(pipeline)); self } pub fn bind_compute_pipeline(mut self, pipeline: compute::Pipeline) -> Self { unsafe { self.device .cmd_bind_pipeline(**self, vk::PipelineBindPoint::COMPUTE, *pipeline) }; self.pipeline = Some(Pipeline::Compute(pipeline)); self } pub fn bind_descriptor_set(self, binding: u32, descriptor_set: &Set) -> Self { let descriptor_sets = &[**descriptor_set]; unsafe { self.device.cmd_bind_descriptor_sets( **self, self.pipeline .as_ref() .expect("Binding descriptor set without pipeline bound") .get_bind_point(), self.pipeline.as_ref().unwrap().get_layout(), binding, descriptor_sets, &[], ); } self } pub fn bind_index_buffer(self, index_buffer: &super::Buffer) -> Self { unsafe { self.device .cmd_bind_index_buffer(**self, **index_buffer, 0, vk::IndexType::UINT32) }; self } pub fn bind_vertex_buffer(self, vertex_buffer: &super::Buffer) -> Self { unsafe { self.device .cmd_bind_vertex_buffers(**self, 0, &[**vertex_buffer], &[0]) }; self } pub fn next_subpass(self) -> Self { unsafe { self.device .cmd_next_subpass(**self, vk::SubpassContents::INLINE) }; self } pub fn draw(self, options: DrawOptions) -> Self { unsafe { self.device.cmd_draw_indexed( **self, options.vertex_count, options.instance_count, 0, options.first_vertex, options.first_instance, ); }; self } pub fn dispatch(self, x: u32, y: u32, z: u32) -> Self { unsafe { self.device.cmd_dispatch(**self, x, y, z); } self } pub fn blit_image( self, from: &Image, to: &Image, from_layout: vk::ImageLayout, to_layout: vk::ImageLayout, aspect: vk::ImageAspectFlags, filter: vk::Filter, ) -> Self { unsafe { let subresource = vk::ImageSubresourceLayers::builder() .aspect_mask(aspect) .mip_level(0) .base_array_layer(0) .layer_count(1); let copy_info = vk::ImageBlit::builder() .src_subresource(*subresource) .src_offsets([ vk::Offset3D::default(), vk::Offset3D { x: from.width as i32, y: from.height as i32, z: 1, }, ]) .dst_subresource(*subresource) .dst_offsets([ vk::Offset3D::default(), vk::Offset3D { x: to.width as i32, y: to.height as i32, z: 1, }, ]); self.device.cmd_blit_image( **self, from.image, from_layout, to.image, to_layout, &[*copy_info], filter, ); } self } pub fn end_renderpass(self) -> Self { unsafe { self.device.cmd_end_render_pass(**self) }; self } pub fn copy_buffer_to_image(self, buffer: &super::Buffer, image: &Image) -> Self { let region = vk::BufferImageCopy::builder() .buffer_offset(0) .buffer_row_length(0) .buffer_image_height(0) .image_subresource(vk::ImageSubresourceLayers { aspect_mask: vk::ImageAspectFlags::COLOR, mip_level: 0, base_array_layer: 0, layer_count: 1, }) .image_offset(vk::Offset3D { x: 0, y: 0, z: 0 }) .image_extent(vk::Extent3D { width: image.width, height: image.height, depth: 1, }); let regions = &[*region]; unsafe { self.device.cmd_copy_buffer_to_image( **self, **buffer, **image, vk::ImageLayout::TRANSFER_DST_OPTIMAL, regions, ) }; self } pub fn transition_image_layout(self, image: &Image, options: &TransitionLayoutOptions) -> Self { let barrier = vk::ImageMemoryBarrier::builder() .src_access_mask(options.source_access) .dst_access_mask(options.destination_access) .old_layout(options.old) .new_layout(options.new) .src_queue_family_index(vk::QUEUE_FAMILY_IGNORED) .dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED) .image(**image) .subresource_range(vk::ImageSubresourceRange { aspect_mask: vk::ImageAspectFlags::COLOR, base_mip_level: 0, level_count: 1, base_array_layer: 0, layer_count: 1, }); let image_memory_barriers = &[*barrier]; unsafe { self.device.cmd_pipeline_barrier( **self, options.source_stage, options.destination_stage, vk::DependencyFlags::empty(), &[], &[], image_memory_barriers, ) }; self } pub fn clear(self, image: Arc<Image>, color: [f32; 4], layout: vk::ImageLayout) -> Self { unsafe { let clear_value = vk::ClearColorValue { float32: color }; let subresource_range = vk::ImageSubresourceRange::builder() .aspect_mask(vk::ImageAspectFlags::COLOR) .base_mip_level(0) .level_count(1) .base_array_layer(0) .layer_count(1); let subresource_ranges = &[*subresource_range]; self.device.cmd_clear_color_image( **self, image.image, layout, &clear_value, subresource_ranges, ); } self } pub fn record<F: Fn(Self) -> Self>(self, predicate: F) -> Self { predicate(self) } pub fn end(self) -> Result<Buffer, vk::Result> { unsafe { self.device.end_command_buffer(**self)? }; Ok(self.buffer) } pub fn submit(self) -> Result<(), vk::Result> { unsafe { self.device.end_command_buffer(**self)? }; let command_buffers = &[**self]; let submit_info = vk::SubmitInfo::builder().command_buffers(command_buffers); let submits = &[*submit_info]; unsafe { self.device .queue_submit(*self.device.queues.graphics, submits, vk::Fence::null())? }; unsafe { self.device.queue_wait_idle(*self.device.queues.graphics)? }; Ok(()) } } impl Deref for BufferBuilder { type Target = Buffer; fn deref(&self) -> &Self::Target { &self.buffer } } impl Deref for Buffer { type Target = vk::CommandBuffer; fn deref(&self) -> &Self::Target { &self.buffer } } pub struct Pool { pub(crate) pool: vk::CommandPool, buffers: Vec<Buffer>, device: Arc<Device>, } impl Pool { pub fn new(device: Arc<Device>) -> Result<Self, vk::Result> { let create_info = vk::CommandPoolCreateInfo::builder().queue_family_index(device.queues.graphics.index); let pool = unsafe { device.create_command_pool(&create_info, None)? }; Ok(Self { pool, buffers: Vec::new(), device, }) } pub fn allocate(&mut self) -> Result<BufferBuilder, vk::Result> { let allocate_info = vk::CommandBufferAllocateInfo::builder() .command_pool(self.pool) .level(vk::CommandBufferLevel::PRIMARY) .command_buffer_count(1); let buffer = unsafe { self.device.allocate_command_buffers(&allocate_info)?[0] }; let buffer = Buffer { buffer }; self.buffers.push(buffer.clone()); let builder = BufferBuilder { buffer, device: self.device.clone(), pipeline: None, }; Ok(builder) } pub fn clear(&mut self) { if self.buffers.is_empty() { return; } unsafe { self.device.free_command_buffers( **self, &self .buffers .iter() .map(|buffer| **buffer) .collect::<Vec<vk::CommandBuffer>>(), ); } self.buffers = Vec::new(); } } impl Deref for Pool { type Target = vk::CommandPool; fn deref(&self) -> &Self::Target { &self.pool } } ``` === gltf::lib.rs ```pretty-rs // lib.rs #![feature(exact_size_is_empty)] use std::{ collections::HashMap, fmt::Debug, io::{Cursor, Read}, }; use serde::{Deserialize, Serialize}; use serde_repr::{Deserialize_repr, Serialize_repr}; #[derive(Deserialize_repr, Serialize_repr, Debug)] #[repr(u16)] pub enum ComponentType { I8 = 5120, U8 = 5121, I16 = 5122, U16 = 5123, U32 = 5125, F32 = 5126, } impl ComponentType { pub fn size_of(&self) -> usize { match self { Self::I8 | Self::U8 => 1, Self::I16 | Self::U16 => 2, Self::U32 | Self::F32 => 4, } } } #[derive(Serialize, Deserialize, Debug)] pub struct Accessor { #[serde(rename = "bufferView")] pub buffer_view: usize, #[serde(default)] #[serde(rename = "byteOffset")] pub byte_offset: usize, #[serde(rename = "componentType")] pub component_type: ComponentType, #[serde(default)] pub normalized: bool, pub count: usize, #[serde(rename = "type")] pub element_type: String, #[serde(default)] pub max: Option<Vec<f64>>, #[serde(default)] pub min: Option<Vec<f64>>, } impl Accessor { pub fn get_data(&self, glb: &Glb) -> Vec<u8> { let buffer_view = glb.gltf.buffer_views.get(self.buffer_view).unwrap(); let buffer = glb.gltf.buffers.get(buffer_view.buffer).unwrap(); let offset = self.byte_offset + buffer_view.byte_offset; let element_size = match self.element_type.as_str() { "SCALAR" => 1, "VEC2" => 2, "VEC3" => 3, "VEC4" | "MAT2" => 4, "MAT3" => 9, "MAT4" => 16, _ => panic!("Invalid element type"), }; let size = self.component_type.size_of() * element_size * self.count; glb.buffer[offset..(offset + size)].to_vec() } } #[derive(Serialize, Deserialize, Debug)] pub struct Animation {} #[derive(Serialize, Deserialize, Debug)] pub struct Asset { #[serde(default)] pub copyright: Option<String>, #[serde(default)] pub generator: Option<String>, pub version: String, #[serde(default)] #[serde(rename = "minVersion")] pub min_version: Option<String>, } #[derive(Serialize, Deserialize, Debug)] pub struct Buffer { #[serde(default)] pub uri: String, #[serde(rename = "byteLength")] pub byte_length: usize, } #[derive(Serialize, Deserialize, Debug)] pub struct BufferView { pub buffer: usize, #[serde(default)] #[serde(rename = "byteOffset")] pub byte_offset: usize, #[serde(rename = "byteLength")] pub byte_length: usize, #[serde(default)] #[serde(rename = "byteStride")] pub byte_stride: usize, #[serde(default)] pub target: Option<u32>, } #[derive(Serialize, Deserialize, Debug)] pub struct Camera {} #[derive(Serialize, Deserialize, Debug)] pub struct Image { #[serde(default)] pub uri: Option<String>, #[serde(default)] #[serde(rename = "mimeType")] pub mime_type: Option<String>, #[serde(default)] #[serde(rename = "bufferView")] pub buffer_view: Option<usize>, } #[derive(Serialize, Deserialize, Debug)] pub struct TextureInfo { pub index: usize, #[serde(default)] #[serde(rename = "texCoord")] pub tex_coord: usize, } #[derive(Serialize, Deserialize, Debug)] pub struct MaterialPBR { #[serde(default)] #[serde(rename = "baseColorFactor")] pub base_color_factor: Option<[f32; 4]>, #[serde(default)] #[serde(rename = "baseColorTexture")] pub base_color_texture: Option<TextureInfo>, #[serde(default)] #[serde(rename = "metallicFactor")] pub metallic_factor: Option<f32>, #[serde(default)] #[serde(rename = "roughnessFactor")] pub roughness_factor: Option<f32>, #[serde(default)] #[serde(rename = "metallicRoughnessTexture")] pub metallic_roughness_texture: Option<TextureInfo>, } impl Default for MaterialPBR { fn default() -> Self { Self { base_color_factor: Some([1.0, 1.0, 1.0, 1.0]), base_color_texture: None, metallic_factor: Some(1.0), roughness_factor: Some(1.0), metallic_roughness_texture: None, } } } #[derive(Serialize, Deserialize, Debug)] pub struct MaterialNormalTexture { pub index: usize, #[serde(default)] #[serde(rename = "texCoord")] pub tex_coord: usize, #[serde(default)] pub scale: Option<f32>, } #[derive(Serialize, Deserialize, Debug)] pub struct MaterialOcclusionTexture { pub index: usize, #[serde(default)] #[serde(rename = "texCoord")] pub tex_coord: usize, #[serde(default)] pub strength: Option<f32>, } #[derive(Serialize, Deserialize, Debug)] pub struct Material { #[serde(default)] #[serde(rename = "pbrMetallicRoughness")] pub pbr: MaterialPBR, #[serde(default)] #[serde(rename = "normalTexture")] pub normal_texture: Option<MaterialNormalTexture>, #[serde(default)] #[serde(rename = "occlusionTexture")] pub occlusion_texture: Option<MaterialOcclusionTexture>, #[serde(default)] #[serde(rename = "emissiveTexture")] pub emissive_texture: Option<TextureInfo>, #[serde(default)] #[serde(rename = "emissiveFavtor")] pub emissive_factor: Option<[f32; 3]>, #[serde(default)] #[serde(rename = "alphaMode")] pub alpha_mode: Option<String>, #[serde(default)] #[serde(rename = "alphaCutoff")] pub alpha_cutoff: Option<f32>, #[serde(default)] #[serde(rename = "doubleSided")] pub double_sided: bool, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct MeshPrimitive { pub attributes: HashMap<String, usize>, #[serde(default)] pub indices: Option<usize>, #[serde(default)] pub material: Option<usize>, } impl MeshPrimitive { pub fn get_attribute_data(&self, glb: &Glb, attribute: &str) -> Option<Vec<u8>> { glb.gltf .accessors .get(*self.attributes.get(attribute)?) .map(|accessor| accessor.get_data(glb)) } pub fn get_indices_data(&self, glb: &Glb) -> Option<Vec<u32>> { glb.gltf.accessors.get(self.indices?).map(|accessor| { let data = accessor.get_data(glb); match accessor.component_type { ComponentType::U16 => bytemuck::cast_slice::<u8, u16>(&data) .iter() .copied() .map(|short| short as u32) .collect(), ComponentType::U32 => bytemuck::cast_slice::<u8, u32>(&data).to_vec(), _ => panic!("Invalid index component type"), } }) } } #[derive(Serialize, Deserialize, Debug)] pub struct Mesh { pub primitives: Vec<MeshPrimitive>, #[serde(default)] pub weights: Option<Vec<f64>>, } #[derive(Serialize, Deserialize, Debug)] pub struct Node { #[serde(default)] pub camera: Option<usize>, #[serde(default)] pub children: Vec<usize>, #[serde(default)] pub skin: Option<usize>, #[serde(default)] pub matrix: Option<[f32; 16]>, #[serde(default)] pub mesh: Option<usize>, #[serde(default)] pub rotation: Option<[f32; 4]>, #[serde(default)] pub scale: Option<[f32; 3]>, #[serde(default)] pub translation: Option<[f32; 3]>, #[serde(default)] pub weights: Option<Vec<f64>>, } #[derive(Deserialize_repr, Serialize_repr, Debug)] #[repr(u16)] pub enum Filter { Nearest = 9728, Linear = 9729, NearestMipmapNearest = 9984, LinearMipmapNearest = 9985, NearestMipmapLinear = 9986, LinearMipmapLinear = 9987, } impl Default for Filter { fn default() -> Self { Self::Linear } } #[derive(Deserialize_repr, Serialize_repr, Debug)] #[repr(u16)] pub enum AddressMode { ClampToEdge = 33071, MirroredRepeat = 33648, Repeat = 10497, } impl Default for AddressMode { fn default() -> Self { Self::Repeat } } #[derive(Serialize, Deserialize, Default, Debug)] pub struct Sampler { #[serde(default)] #[serde(rename = "magFilter")] pub mag_filter: Filter, #[serde(default)] #[serde(rename = "minFilter")] pub min_filter: Filter, #[serde(default)] #[serde(rename = "wrapS")] pub wrap_u: AddressMode, #[serde(default)] #[serde(rename = "wrapT")] pub wrap_v: AddressMode, } #[derive(Serialize, Deserialize, Debug)] pub struct Scene { #[serde(default)] pub nodes: Vec<usize>, } #[derive(Serialize, Deserialize, Debug)] pub struct Skin {} #[derive(Serialize, Deserialize, Debug)] pub struct Texture { #[serde(default)] pub sampler: Option<usize>, pub source: usize, } #[derive(Serialize, Deserialize, Debug)] pub struct Gltf { #[serde(default)] #[serde(rename = "extensionsUsed")] pub extensions_used: Vec<String>, #[serde(default)] #[serde(rename = "extensionsRequired")] pub extensions_required: Vec<String>, #[serde(default)] pub accessors: Vec<Accessor>, #[serde(default)] pub animations: Vec<Animation>, pub asset: Asset, #[serde(default)] pub buffers: Vec<Buffer>, #[serde(default)] #[serde(rename = "bufferViews")] pub buffer_views: Vec<BufferView>, #[serde(default)] pub cameras: Vec<Camera>, #[serde(default)] pub images: Vec<Image>, #[serde(default)] pub materials: Vec<Material>, #[serde(default)] pub meshes: Vec<Mesh>, #[serde(default)] pub nodes: Vec<Node>, #[serde(default)] pub samplers: Vec<Sampler>, #[serde(default)] pub scene: usize, #[serde(default)] pub scenes: Vec<Scene>, #[serde(default)] pub skins: Vec<Skin>, #[serde(default)] pub textures: Vec<Texture>, } impl Gltf { fn load(bytes: &[u8]) -> serde_json::Result<Self> { serde_json::from_slice(bytes) } } pub struct Glb { pub gltf: Gltf, pub buffer: Vec<u8>, } impl Glb { fn get_u32(bytes: &mut impl Iterator<Item = u8>) -> u32 { *bytemuck::from_bytes::<u32>(&bytes.take(4).collect::<Vec<u8>>()) } fn get(bytes: &mut impl Iterator<Item = u8>, length: usize) -> Vec<u8> { bytes.take(length).collect() } pub fn load(bytes: &[u8]) -> serde_json::Result<Self> { let mut bytes = bytes.iter().copied(); let magic = Self::get_u32(&mut bytes); if magic != 0x46546C67 { panic!("Malformed GLB"); } let version = Self::get_u32(&mut bytes); if version != 2 { panic!("Aetheria only supports glTF 2.0"); } let _length = Self::get_u32(&mut bytes); let gltf_length = Self::get_u32(&mut bytes); let gltf_type = Self::get_u32(&mut bytes); if gltf_type != 0x4E4F534A { panic!("Malformed GLB"); } let gltf_bytes: Vec<u8> = Self::get(&mut bytes, gltf_length as usize); let gltf = Gltf::load(&gltf_bytes)?; let mut buffer = Vec::new(); if !bytes.is_empty() { let buffer_length = Self::get_u32(&mut bytes); let buffer_type = Self::get_u32(&mut bytes); if buffer_type != 0x004E4942 { panic!("Malformed GLB"); } buffer = bytes.take(buffer_length as usize).collect(); } Ok(Self { gltf, buffer }) } } ``` === systems::render.rs ```pretty-rs // render.rs use ash::vk; use assets::{Mesh, Model, ModelRegistry, ShaderRegistry, Transform, Vertex}; use bytemuck::{cast_slice, Pod, Zeroable}; use glam::{Vec3, Vec4}; use std::{ collections::HashMap, sync::{Arc, Mutex, Weak}, }; use uuid::Uuid; use vulkan::{ command, command::TransitionLayoutOptions, compute, Buffer, Context, Image, Pool, Set, SetLayout, SetLayoutBuilder, Shader, Texture, }; use crate::{ data::Data, renderer::{Pass, Renderer, RENDER_HEIGHT, RENDER_WIDTH}, Camera, Time, }; fn calculate_box(mesh: &Mesh, transform: &Transform) -> (Vec3, Vec3) { let mut min = Vec3::new(f32::INFINITY, f32::INFINITY, f32::INFINITY); let mut max = Vec3::new(f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY); for vertex in &mesh.vertices { let v = transform.get_matrix() * Vec4::new(vertex.pos.x, vertex.pos.y, vertex.pos.z, 1.0); min.x = min.x.min(v.x); min.y = min.y.min(v.y); min.z = min.z.min(v.z); max.x = max.x.max(v.x); max.y = max.y.max(v.y); max.z = max.z.max(v.z); } return (min, max); } #[derive(Clone)] pub struct RenderObject { pub model: Arc<Model>, pub transform: Transform, } pub trait Renderable { fn get_objects(&self) -> Vec<RenderObject>; } impl<T: Renderable> Renderable for Vec<T> { fn get_objects(&self) -> Vec<RenderObject> { self.iter() .flat_map(|item| item.get_objects()) .collect::<Vec<RenderObject>>() } } #[repr(C)] #[derive(Clone, Copy, Debug, Default, Pod, Zeroable)] struct MeshData { first_index: i32, num_indices: i32, material: i32, _padding: [f32; 1], min_aabb: [f32; 3], _padding2: [f32; 1], max_aabb: [f32; 3], _padding3: [f32; 1], transform: [f32; 16], } #[repr(C)] #[derive(Clone, Copy, Debug, Default, Pod, Zeroable)] pub struct Material { albedo: Vec4, } #[repr(C)] #[derive(Clone, Copy, Debug, Pod, Zeroable)] pub struct Light { pub position: Vec3, pub strength: f32, pub color: Vec3, _padding: [f32; 1], } impl Light { pub fn new(position: Vec3, strength: f32, color: Vec3) -> Self { Self { position, strength, color, _padding: [0.0], } } } pub trait Emissive { fn get_lights(&self, data: &Data) -> Vec<Light>; } pub struct System { texture: Texture, frame_layout: SetLayout, frame_pool: Pool, frame_set: Set, geometry_layout: SetLayout, geometry_pool: Pool, geometry_set: Set, pipeline: compute::Pipeline, renderables: Vec<Weak<Mutex<dyn Renderable>>>, lights: Vec<Weak<Mutex<dyn Emissive>>>, } impl System { pub fn new( ctx: &Context, shader_registry: &mut ShaderRegistry, camera: &Camera, time: &Time, ) -> Result<Self, vk::Result> { let image = Image::new( &ctx, RENDER_WIDTH, RENDER_HEIGHT, vk::Format::R8G8B8A8_UNORM, vk::ImageUsageFlags::STORAGE | vk::ImageUsageFlags::SAMPLED, )?; let texture = Texture::from_image(&ctx, image, vk::Filter::NEAREST, vk::Filter::NEAREST, true)?; let frame_layout = SetLayoutBuilder::new(&ctx.device) .add(vk::DescriptorType::UNIFORM_BUFFER) .add(vk::DescriptorType::UNIFORM_BUFFER) .build()?; let mut frame_pool = Pool::new(ctx.device.clone(), frame_layout.clone(), 1)?; let frame_set = frame_pool.allocate()?; frame_set.update_buffer(&ctx.device, 0, &camera.buffer); frame_set.update_buffer(&ctx.device, 1, &time.buffer); let geometry_layout = SetLayoutBuilder::new(&ctx.device) .add(vk::DescriptorType::STORAGE_IMAGE) .add(vk::DescriptorType::STORAGE_BUFFER) .add(vk::DescriptorType::STORAGE_BUFFER) .add(vk::DescriptorType::STORAGE_BUFFER) .add(vk::DescriptorType::STORAGE_BUFFER) .add(vk::DescriptorType::STORAGE_BUFFER) .build()?; let mut geometry_pool = Pool::new(ctx.device.clone(), geometry_layout.clone(), 1)?; let geometry_set = geometry_pool.allocate()?; geometry_set.update_texture(&ctx.device, 0, &texture, vk::ImageLayout::GENERAL); let shader: Arc<Shader> = shader_registry.load(&ctx.device, "test.comp.glsl"); let pipeline = compute::Pipeline::new( &ctx.device, shader.clone(), &[frame_layout.clone(), geometry_layout.clone()], )?; Ok(Self { texture, frame_layout, frame_set, frame_pool, geometry_layout, geometry_pool, geometry_set, pipeline, renderables: Vec::new(), lights: Vec::new(), }) } pub fn set_geometry(&self, data: &Data, renderer: &Renderer, model_registry: &ModelRegistry) { let objects = self .renderables .iter() .filter_map(|renderable| renderable.upgrade()) .flat_map(|renderable| renderable.lock().unwrap().get_objects()) .collect::<Vec<RenderObject>>(); let lights = self .lights .iter() .filter_map(|emissive| emissive.upgrade()) .flat_map(|emissive| emissive.lock().unwrap().get_lights(data).clone()) .collect::<Vec<Light>>(); let mut meshes: Vec<MeshData> = Vec::new(); let mut vertices: Vec<Vertex> = Vec::new(); let mut indices: Vec<i32> = Vec::new(); let mut materials: Vec<Material> = Vec::new(); let mut mesh_to_index: HashMap<Uuid, i32> = HashMap::new(); for mesh in model_registry .get_models() .iter() .flat_map(|model| &model.meshes) { mesh_to_index.insert(mesh.id, indices.len() as i32); indices.append( &mut mesh .indices .iter() .copied() .map(|index| index as i32 + vertices.len() as i32) .collect::<Vec<i32>>(), ); vertices.append(&mut mesh.vertices.clone()); } for (i, (mesh, transform)) in objects .iter() .flat_map(|object| { object .model .meshes .iter() .map(|mesh| (mesh, object.transform.clone())) }) .enumerate() { let transform = transform.combine(&mesh.transform); let (min_aabb, max_aabb) = calculate_box(&mesh, &transform); let mesh_data = MeshData { first_index: *mesh_to_index .get(&mesh.id) .expect("Can't find index in mesh_to_index"), num_indices: mesh.indices.len() as i32, material: i as i32, transform: transform.get_matrix().to_cols_array(), min_aabb: min_aabb.to_array(), max_aabb: max_aabb.to_array(), ..Default::default() }; meshes.push(mesh_data); materials.push(Material { albedo: mesh.color }); } let mut mesh_data = cast_slice::<i32, u8>(&[meshes.len() as i32, 0, 0, 0]).to_vec(); mesh_data.append(&mut cast_slice::<MeshData, u8>(&meshes).to_vec()); let vertex_buffer = Buffer::new( &renderer, cast_slice::<Vertex, u8>(&vertices), vk::BufferUsageFlags::STORAGE_BUFFER, ) .unwrap(); let indices = indices .iter() .copied() .flat_map(|index| [index, 0, 0, 0]) .collect::<Vec<i32>>(); let index_buffer = Buffer::new( &renderer, cast_slice::<i32, u8>(&indices), vk::BufferUsageFlags::STORAGE_BUFFER, ) .unwrap(); let mesh_buffer = Buffer::new(&renderer, mesh_data, vk::BufferUsageFlags::STORAGE_BUFFER).unwrap(); let material_buffer = Buffer::new( &renderer, cast_slice::<Material, u8>(&materials), vk::BufferUsageFlags::STORAGE_BUFFER, ) .unwrap(); let mut light_data = cast_slice::<Light, u8>(&lights).to_vec(); let mut light_buffer = cast_slice::<i32, u8>(&[lights.len() as i32, 0, 0, 0]).to_vec(); light_buffer.append(&mut light_data); let light_buffer = Buffer::new( &renderer, light_buffer, vk::BufferUsageFlags::STORAGE_BUFFER, ) .unwrap(); self.geometry_set .update_buffer(&renderer.device, 1, &vertex_buffer); self.geometry_set .update_buffer(&renderer.device, 2, &index_buffer); self.geometry_set .update_buffer(&renderer.device, 3, &mesh_buffer); self.geometry_set .update_buffer(&renderer.device, 4, &material_buffer); self.geometry_set .update_buffer(&renderer.device, 5, &light_buffer); } pub fn get_texture(&self) -> &'_ Texture { &self.texture } pub fn add<T: Renderable + Sized + 'static>(&mut self, renderable: Arc<Mutex<T>>) { self.renderables .push(Arc::downgrade(&(renderable as Arc<Mutex<dyn Renderable>>))); } pub fn add_light<T: Emissive + Sized + 'static>(&mut self, emissive: Arc<Mutex<T>>) { self.lights .push(Arc::downgrade(&(emissive as Arc<Mutex<dyn Emissive>>))); } } impl Pass for System { fn record(&self, cmd: command::BufferBuilder) -> command::BufferBuilder { cmd.transition_image_layout( &self.texture.image, &TransitionLayoutOptions { old: vk::ImageLayout::UNDEFINED, new: vk::ImageLayout::GENERAL, source_access: vk::AccessFlags::NONE, destination_access: vk::AccessFlags::SHADER_WRITE, source_stage: vk::PipelineStageFlags::TOP_OF_PIPE, destination_stage: vk::PipelineStageFlags::COMPUTE_SHADER, }, ) .bind_compute_pipeline(self.pipeline.clone()) .bind_descriptor_set(0, &self.frame_set) .bind_descriptor_set(1, &self.geometry_set) .dispatch( RENDER_WIDTH / 16, (RENDER_HEIGHT as f32 / 16.0).ceil() as u32, 1, ) } } ``` === assets::lib.rs ```pretty-rs // lib.rs use ash::vk; use bytemuck::{cast_slice, Pod, Zeroable}; use glam::{Mat4, Quat, Vec2, Vec3, Vec4}; use std::{ collections::HashMap, path::Path, sync::{Arc, Weak}, }; use uuid::Uuid; use vulkan::{buffer::Buffer, context::Context, device::Device, graphics::Shader, Texture}; pub struct ShaderRegistry { registry: HashMap<String, Weak<Shader>>, } impl ShaderRegistry { pub fn new() -> Self { Self { registry: HashMap::new(), } } pub fn load(&mut self, device: &Device, path: &str) -> Arc<Shader> { let registry_value = self .registry .get(&path.to_owned()) .map(|weak| weak.upgrade()) .flatten(); match registry_value { Some(value) => value, None => { let spv = Path::new("assets/shaders/compiled") .join(path) .with_extension("spv"); let stage = match spv .file_stem() .unwrap() .to_str() .unwrap() .split(".") .last() .unwrap() { "vert" => vk::ShaderStageFlags::VERTEX, "frag" => vk::ShaderStageFlags::FRAGMENT, "comp" => vk::ShaderStageFlags::COMPUTE, shader_type => panic!("Unexpected shader type: {}", shader_type), }; let code = std::fs::read(spv) .ok() .expect(&format!("Cannot find file: {}", path)); let shader = Arc::new(Shader::new(device, &code, stage).unwrap()); self.registry .insert(path.to_owned(), Arc::downgrade(&shader)); shader } } } } #[repr(C)] #[derive(Clone, Copy, Debug, Pod, Zeroable, Default)] pub struct Vertex { pub pos: Vec3, pub _padding: f32, pub normal: Vec3, pub _padding2: f32, } pub struct Model { pub meshes: Vec<Mesh>, } pub struct Mesh { pub id: Uuid, pub vertices: Vec<Vertex>, pub indices: Vec<u32>, pub color: Vec4, pub transform: Transform, } #[derive(Clone, Debug)] pub struct Transform { pub translation: Vec3, pub rotation: Quat, pub scale: Vec3, } impl Transform { pub const IDENTITY: Self = Self { translation: Vec3::ZERO, rotation: Quat::IDENTITY, scale: Vec3::ONE, }; pub fn get_matrix(&self) -> Mat4 { Mat4::from_scale_rotation_translation( self.scale, self.rotation.normalize(), self.translation, ) } pub fn from_matrix(matrix: &Mat4) -> Self { let (scale, rotation, translation) = matrix.to_scale_rotation_translation(); Self { translation, rotation, scale, } } pub fn combine(&self, rhs: &Self) -> Self { Self { translation: self.translation + rhs.translation, rotation: self.rotation * rhs.rotation, scale: self.scale * rhs.scale, } } } impl Default for Transform { fn default() -> Self { Self { translation: Vec3::ZERO, rotation: Quat::IDENTITY, scale: Vec3::ONE, } } } pub struct ModelRegistry { registry: HashMap<String, Weak<Model>>, } impl ModelRegistry { pub fn new() -> Self { Self { registry: HashMap::new(), } } pub fn get_models(&self) -> Vec<Arc<Model>> { self.registry .values() .filter_map(|weak| weak.upgrade()) .collect() } pub fn load(&mut self, path: &str) -> Arc<Model> { fn get_transform(node: &gltf::Node) -> Mat4 { if let Some(matrix) = node.matrix { Mat4::from_cols_array(&matrix) } else { let scale = node .scale .map(|arr| Vec3::from_array(arr)) .unwrap_or(Vec3::ONE); let rotation = node .rotation .map(|arr| Quat::from_array(arr)) .unwrap_or(Quat::IDENTITY); let translation = node .translation .map(|arr| Vec3::from_array(arr)) .unwrap_or(Vec3::ZERO); Mat4::from_scale_rotation_translation(scale, rotation, translation) } } fn get_meshes(glb: &gltf::Glb, node: &gltf::Node, parent_transform: Mat4) -> Vec<Mesh> { let transform = get_transform(node) * parent_transform; let mut meshes = match node.mesh { None => Vec::new(), Some(mesh) => { let mesh = &glb.gltf.meshes[mesh]; mesh.primitives .iter() .map(|primitive| { let color = primitive .material .map(|material| &glb.gltf.materials[material]) .and_then(|material| material.pbr.base_color_factor) .map(|arr| Vec4::from_array(arr)) .unwrap_or(Vec4::ONE); let indices = primitive.get_indices_data(glb).expect("No indicies"); let positions = primitive .get_attribute_data(glb, "POSITION") .expect("No positions"); let positions = bytemuck::cast_slice::<u8, Vec3>(&positions) .iter() .map(|position| *position * 100.0) .collect::<Vec<Vec3>>(); let normals = primitive .get_attribute_data(glb, "NORMAL") .expect("No normals"); let normals = bytemuck::cast_slice::<u8, Vec3>(&normals); let vertices: Vec<Vertex> = std::iter::zip(positions, normals) .map(|(pos, normal)| Vertex { pos, normal: *normal, ..Default::default() }) .collect(); Mesh { id: Uuid::new_v4(), indices, vertices, color, transform: Transform::from_matrix(&transform), } }) .collect() } }; meshes.append( &mut node .children .iter() .map(|child| &glb.gltf.nodes[*child]) .flat_map(|child| get_meshes(glb, child, transform)) .collect(), ); meshes } let registry_value = self .registry .get(&path.to_owned()) .map(|weak| weak.upgrade()) .flatten(); match registry_value { Some(value) => value, None => { let glb_path = Path::new("assets/meshes").join(path); println!("Loading: {}", glb_path.display()); let glb = gltf::Glb::load(&std::fs::read(glb_path).unwrap()).unwrap(); let scene = &glb.gltf.scenes[glb.gltf.scene]; let meshes = scene .nodes .iter() .map(|node| &glb.gltf.nodes[*node]) .flat_map(|node| get_meshes(&glb, node, Mat4::IDENTITY)) .collect(); let model = Model { meshes }; let model = Arc::new(model); self.registry .insert(path.to_owned(), Arc::downgrade(&model)); model } } } } pub struct TextureRegistry { registry: HashMap<String, Weak<Texture>>, } impl TextureRegistry { pub fn new() -> Self { Self { registry: HashMap::new(), } } pub fn get_meshes(&self) -> Vec<Arc<Texture>> { self.registry .values() .filter_map(|weak| weak.upgrade()) .collect() } pub fn load(&mut self, ctx: &mut Context, path: &str, normalized_uv: bool) -> Arc<Texture> { let registry_value = self .registry .get(&path.to_owned()) .map(|weak| weak.upgrade()) .flatten(); match registry_value { Some(value) => value, None => { let texture = Path::new("assets/textures/compiled").join(path); println!("Loading: {}", texture.display()); let texture = Arc::new( Texture::new( ctx, &std::fs::read(texture).expect("Failed to read texture"), normalized_uv, ) .expect("Failed to read texture"), ); self.registry .insert(path.to_owned(), Arc::downgrade(&texture)); texture } } } } ``` === vulkan::allocator.rs ```pretty-rs // allocator.rs use crate::{Buffer, Device, Instance}; use ash::vk; use std::{ ffi::c_void, fmt::{Debug, Display}, sync::Arc, }; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Region { size: usize, offset: usize, } impl Display for Region { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}-{}", self.offset, self.offset + self.size) } } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Allocation { id: usize, region: Region, } // Vulkan calls these memory types #[derive(Clone, Debug)] pub struct Heap { size: usize, properties: vk::MemoryPropertyFlags, memory: vk::DeviceMemory, allocations: Vec<Allocation>, } impl Display for Heap { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}: ", self.properties)?; for allocation in &self.allocations { write!(f, "{}, ", allocation.region)?; } Ok(()) } } pub struct Allocator { device: Arc<Device>, heaps: Vec<Heap>, to_free: Vec<usize>, next_id: usize, } impl Debug for Allocator { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Allocator") .field("heaps", &self.heaps) .field("next_id", &self.next_id) .finish() } } impl Allocator { pub fn new(instance: &Instance, device: Arc<Device>) -> Result<Self, vk::Result> { let properties = unsafe { instance.get_physical_device_memory_properties(*device.physical) }; let heaps = &properties.memory_types[0..properties.memory_type_count as usize]; let heaps = heaps .iter() .enumerate() .map(|(i, heap)| { let alloc_info = vk::MemoryAllocateInfo::builder() .allocation_size(32 * 1024 * 1024) // 32MiB .memory_type_index(i as u32); let memory = unsafe { device .allocate_memory(&alloc_info, None) .expect("Failed to allocate memory") }; Heap { size: properties.memory_heaps[heap.heap_index as usize].size as usize, properties: heap.property_flags, memory, allocations: Vec::new(), } }) .collect::<Vec<Heap>>(); Ok(Self { device, heaps, to_free: Vec::new(), next_id: 0, }) } fn find_region( size: usize, alignment: usize, occupied: Vec<Region>, end: usize, ) -> Option<Region> { let mut points = vec![0_usize]; for region in occupied { points.push(region.offset); points.push(region.offset + region.size); } points.push(end); let free = points .chunks_exact(2) .map(|points| { let from = points[0]; let to = points[1]; Region { offset: from + (from % alignment), size: to - (from + (from % alignment)), } }) .collect::<Vec<Region>>(); for region in free { if region.size > size { return Some(Region { size, offset: region.offset, }); } } None } fn allocate_from_requirements( &mut self, requirements: vk::MemoryRequirements, properties: vk::MemoryPropertyFlags, ) -> (vk::DeviceMemory, Allocation) { let (_, heap) = self .heaps .iter_mut() .enumerate() .filter(|(i, heap)| { heap.properties.contains(properties) && (requirements.memory_type_bits & (1 << i)) != 0 }) .next() .expect("No suitable memory heap"); let region = Self::find_region( requirements.size as usize, requirements.alignment as usize, heap.allocations .iter() .map(|alloc| alloc.region) .collect::<Vec<Region>>(), 32 * 1024 * 1024, ) .expect("Cannot find region in heap"); let allocation = Allocation { id: self.next_id, region, }; heap.allocations.push(allocation); self.next_id += 1; (heap.memory, allocation) } pub fn create_buffer( &mut self, create_info: &vk::BufferCreateInfo, properties: vk::MemoryPropertyFlags, ) -> Result<(vk::Buffer, Allocation), vk::Result> { //unsafe { self.device.device_wait_idle()? }; let buffer = unsafe { self.device.create_buffer(create_info, None)? }; let requirements = unsafe { self.device.get_buffer_memory_requirements(buffer) }; let (memory, allocation) = self.allocate_from_requirements(requirements, properties); unsafe { self.device .bind_buffer_memory(buffer, memory, allocation.region.offset as u64)? }; Ok((buffer, allocation)) } pub fn create_image( &mut self, create_info: &vk::ImageCreateInfo, properties: vk::MemoryPropertyFlags, ) -> Result<(vk::Image, Allocation), vk::Result> { //unsafe { self.device.device_wait_idle()? }; let image = unsafe { self.device.create_image(create_info, None)? }; let requirements = unsafe { self.device.get_image_memory_requirements(image) }; let (memory, allocation) = self.allocate_from_requirements(requirements, properties); unsafe { self.device .bind_image_memory(image, memory, allocation.region.offset as u64)? }; Ok((image, allocation)) } pub fn write(&self, allocation: &Allocation, bytes: &[u8]) -> Result<(), vk::Result> { if bytes.len() > allocation.region.size { panic!("Buffer overflow with allocation {}", allocation.id) } let heap = self .heaps .iter() .find(|heap| heap.allocations.contains(allocation)) .expect(&format!("Can't find allocation with id {}", allocation.id)); let ptr = unsafe { self.device.map_memory( heap.memory, allocation.region.offset as u64, allocation.region.size as u64, vk::MemoryMapFlags::empty(), )? }; unsafe { ptr.copy_from(bytes.as_ptr() as *const c_void, bytes.len()) }; unsafe { self.device.unmap_memory(heap.memory) }; Ok(()) } pub fn free(&mut self, allocation: &Allocation) { self.heaps .iter_mut() .find(|heap| heap.allocations.contains(allocation)) .expect(&format!("Double free of allocation {}", allocation.id)); self.to_free.push(allocation.id); } pub fn flush_frees(&mut self) { for allocation in &self.to_free { let heap = self .heaps .iter_mut() .find(|heap| { heap.allocations .iter() .find(|alloc| alloc.id == *allocation) .is_some() }) .expect(&format!("Double free of allocation {}", allocation)); let allocation = heap .allocations .iter() .find(|alloc| alloc.id == *allocation) .unwrap(); heap.allocations.remove( heap.allocations .iter() .position(|alloc| alloc.id == allocation.id) .unwrap(), ); } self.to_free.clear(); } } ``` === vulkan::image.rs ```pretty-rs // image.rs use super::{ allocator::{Allocation, Allocator}, Buffer, Context, Device, Pool, }; use crate::command::TransitionLayoutOptions; use crate::Set; use ash::vk::{self, MemoryPropertyFlags}; use std::cell::OnceCell; use std::ops::Deref; use std::path::Path; use std::sync::{Arc, Mutex}; #[derive(Debug)] pub struct Image { pub(crate) image: vk::Image, pub format: vk::Format, pub width: u32, pub height: u32, pub(crate) allocation: Option<Allocation>, allocator: Option<Arc<Mutex<Allocator>>>, } impl Image { pub fn new( ctx: &Context, width: u32, height: u32, format: vk::Format, usage: vk::ImageUsageFlags, ) -> Result<Arc<Self>, vk::Result> { let create_info = vk::ImageCreateInfo::builder() .image_type(vk::ImageType::TYPE_2D) .format(format) .extent(vk::Extent3D { width, height, depth: 1, }) .mip_levels(1) .array_layers(1) .samples(vk::SampleCountFlags::TYPE_1) .tiling(vk::ImageTiling::OPTIMAL) .usage(usage) .sharing_mode(vk::SharingMode::EXCLUSIVE) .initial_layout(vk::ImageLayout::UNDEFINED); let (image, allocation) = ctx .allocator .lock() .unwrap() .create_image(&create_info, MemoryPropertyFlags::DEVICE_LOCAL)?; Ok(Arc::new(Self { image, format, width, height, allocation: Some(allocation), allocator: Some(ctx.allocator.clone()), })) } pub fn from_image(image: vk::Image, format: vk::Format, width: u32, height: u32) -> Arc<Self> { Arc::new(Self { image, format, width, height, allocation: None, allocator: None, }) } pub fn create_view_without_context( &self, device: &Device, ) -> Result<vk::ImageView, vk::Result> { let create_info = vk::ImageViewCreateInfo::builder() .image(**self) .view_type(vk::ImageViewType::TYPE_2D) .format(self.format) .components(vk::ComponentMapping::default()) .subresource_range(vk::ImageSubresourceRange { aspect_mask: vk::ImageAspectFlags::COLOR, base_mip_level: 0, level_count: 1, base_array_layer: 0, layer_count: 1, }); unsafe { device.create_image_view(&create_info, None) } } pub fn create_view(&self, ctx: &Context) -> Result<vk::ImageView, vk::Result> { let aspect_mask = if self.format == vk::Format::D32_SFLOAT { vk::ImageAspectFlags::DEPTH } else { vk::ImageAspectFlags::COLOR }; let create_info = vk::ImageViewCreateInfo::builder() .image(**self) .view_type(vk::ImageViewType::TYPE_2D) .format(self.format) .components(vk::ComponentMapping::default()) .subresource_range(vk::ImageSubresourceRange { aspect_mask, base_mip_level: 0, level_count: 1, base_array_layer: 0, layer_count: 1, }); unsafe { ctx.device.create_image_view(&create_info, None) } } pub fn create_sampler( &self, ctx: &Context, mag_filter: vk::Filter, min_filter: vk::Filter, normalized_uv: bool, ) -> Result<vk::Sampler, vk::Result> { let create_info = vk::SamplerCreateInfo::builder() .mag_filter(mag_filter) .min_filter(min_filter) .mipmap_mode(vk::SamplerMipmapMode::LINEAR) .address_mode_u(vk::SamplerAddressMode::MIRRORED_REPEAT) .address_mode_v(vk::SamplerAddressMode::MIRRORED_REPEAT) .address_mode_w(vk::SamplerAddressMode::MIRRORED_REPEAT) .mip_lod_bias(0.0) .anisotropy_enable(true) .max_anisotropy(ctx.device.physical.properties.limits.max_sampler_anisotropy) .compare_enable(false) .compare_op(vk::CompareOp::ALWAYS) .min_lod(0.0) .max_lod(0.0) .border_color(vk::BorderColor::INT_OPAQUE_BLACK) .unnormalized_coordinates(!normalized_uv); unsafe { ctx.device.create_sampler(&create_info, None) } } } impl Deref for Image { type Target = vk::Image; fn deref(&self) -> &Self::Target { &self.image } } impl Drop for Image { fn drop(&mut self) { if self.allocation.is_none() || self.allocator.is_none() { return; } self.allocator .take() .unwrap() .lock() .unwrap() .free(&self.allocation.unwrap()) } } pub struct Texture { pub image: Arc<Image>, pub view: vk::ImageView, pub sampler: vk::Sampler, } impl Deref for Texture { type Target = Image; fn deref(&self) -> &Self::Target { &self.image } } impl Texture { pub const WHITE: OnceCell<Self> = OnceCell::new(); pub fn new(ctx: &mut Context, bytes: &[u8], normalized_uv: bool) -> Result<Self, vk::Result> { let (header, data) = qoi::decode_to_vec(bytes).unwrap(); Self::new_bytes(ctx, &data, header.width, header.height, normalized_uv) } pub fn new_bytes( ctx: &mut Context, data: &[u8], width: u32, height: u32, normalized_uv: bool, ) -> Result<Self, vk::Result> { let texture_buffer = Buffer::new(ctx, data, vk::BufferUsageFlags::TRANSFER_SRC)?; let image = Image::new( ctx, width, height, vk::Format::R8G8B8A8_SRGB, vk::ImageUsageFlags::TRANSFER_DST | vk::ImageUsageFlags::SAMPLED, )?; let view = image.create_view(ctx)?; let sampler = image.create_sampler(ctx, vk::Filter::NEAREST, vk::Filter::NEAREST, normalized_uv)?; ctx.command_pool .allocate() .unwrap() .begin() .unwrap() .transition_image_layout( &image, &TransitionLayoutOptions { old: vk::ImageLayout::UNDEFINED, new: vk::ImageLayout::TRANSFER_DST_OPTIMAL, source_access: vk::AccessFlags::empty(), destination_access: vk::AccessFlags::TRANSFER_WRITE, source_stage: vk::PipelineStageFlags::TOP_OF_PIPE, destination_stage: vk::PipelineStageFlags::TRANSFER, }, ) .copy_buffer_to_image(&texture_buffer, &image) .transition_image_layout( &image, &TransitionLayoutOptions { old: vk::ImageLayout::TRANSFER_DST_OPTIMAL, new: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL, source_access: vk::AccessFlags::TRANSFER_WRITE, destination_access: vk::AccessFlags::SHADER_READ, source_stage: vk::PipelineStageFlags::TRANSFER, destination_stage: vk::PipelineStageFlags::FRAGMENT_SHADER, }, ) .submit() .unwrap(); Ok(Self { image, view, sampler, }) } pub fn from_image( ctx: &Context, image: Arc<Image>, mag_filter: vk::Filter, min_filter: vk::Filter, normalized_uv: bool, ) -> Result<Self, vk::Result> { let view = image.create_view(ctx)?; let sampler = image.create_sampler(ctx, mag_filter, min_filter, normalized_uv)?; Ok(Self { image, view, sampler, }) } } ``` === vulkan::graphics.rs ```pretty-rs // graphics.rs use super::{Device, Renderpass, SetLayout}; use ash::vk; use bytemuck::cast_slice; use cstr::cstr; use std::{ops::Deref, result::Result}; #[derive(Clone)] pub struct Shader { module: vk::ShaderModule, pub stage: vk::ShaderStageFlags, } impl Shader { pub fn new( device: &ash::Device, code: &[u8], stage: vk::ShaderStageFlags, ) -> Result<Self, vk::Result> { let create_info = vk::ShaderModuleCreateInfo::builder().code(cast_slice(code)); let module = unsafe { device.create_shader_module(&create_info, None)? }; Ok(Self { module, stage }) } pub fn get_stage(&self) -> vk::PipelineShaderStageCreateInfoBuilder { vk::PipelineShaderStageCreateInfo::builder() .stage(self.stage) .module(self.module) .name(cstr!("main")) } } #[derive(Clone)] pub struct Shaders { pub vertex: Option<Shader>, pub fragment: Option<Shader>, } pub struct Binding { binding: usize, stride: usize, attributes: Vec<vk::VertexInputAttributeDescription>, } impl Binding { pub fn add_attribute(mut self, format: vk::Format) -> Self { let attribute = vk::VertexInputAttributeDescription::builder() .binding(self.binding.try_into().unwrap()) .location(self.attributes.len().try_into().unwrap()) .format(format) .offset(self.stride.try_into().unwrap()) .build(); self.attributes.push(attribute); self.stride += match format { vk::Format::R32G32_SFLOAT => 2 * 4, vk::Format::R32G32B32_SFLOAT => 3 * 4, vk::Format::R32G32B32A32_SFLOAT => 4 * 4, vk::Format::R8G8B8A8_UINT => 4 * 1, _ => todo!(), }; self } } pub struct VertexInputBuilder { bindings: Vec<Binding>, } impl VertexInputBuilder { pub fn new() -> Self { Self::default() } pub fn add_binding<F: Fn(Binding) -> Binding>(mut self, callback: F) -> Self { let binding = Binding { binding: self.bindings.len(), stride: 0, attributes: Vec::new(), }; self.bindings.push(callback(binding)); self } fn to_vertex_bindings( &self, ) -> ( Vec<vk::VertexInputBindingDescription>, Vec<vk::VertexInputAttributeDescription>, ) { let bindings = self .bindings .iter() .enumerate() .map(|(i, binding)| { vk::VertexInputBindingDescription::builder() .binding(i.try_into().unwrap()) .stride(binding.stride.try_into().unwrap()) .input_rate(vk::VertexInputRate::VERTEX) .build() }) .collect::<Vec<vk::VertexInputBindingDescription>>(); let attributes = self .bindings .iter() .flat_map(|binding| binding.attributes.clone()) .collect::<Vec<vk::VertexInputAttributeDescription>>(); (bindings, attributes) } } impl Default for VertexInputBuilder { fn default() -> Self { Self { bindings: Vec::new(), } } } pub struct Pipeline { pub(crate) pipeline: vk::Pipeline, pub layout: vk::PipelineLayout, pub shaders: Shaders, } impl Pipeline { pub fn new( device: &Device, renderpass: &Renderpass, shaders: Shaders, extent: vk::Extent2D, descriptor_layouts: &[SetLayout], vertex_input: VertexInputBuilder, subpass: u32, depth: bool, cull: bool, ) -> Result<Self, vk::Result> { let vertex_stage = shaders .vertex .as_ref() .expect("All graphics pipelines need a vertex shader") .get_stage(); let fragment_stage = shaders .fragment .as_ref() .expect("All graphics pipelines need a fragment shader") .get_stage(); let (bindings, attributes) = vertex_input.to_vertex_bindings(); let vertex_input = vk::PipelineVertexInputStateCreateInfo::builder() .vertex_binding_descriptions(&bindings) .vertex_attribute_descriptions(&attributes); let input_assembly = vk::PipelineInputAssemblyStateCreateInfo::builder() .topology(vk::PrimitiveTopology::TRIANGLE_LIST) .primitive_restart_enable(false); #[allow(clippy::cast_precision_loss)] let viewport = vk::Viewport::builder() .x(0.0) .y(0.0) .width(extent.width as f32) .height(extent.height as f32) .min_depth(0.0) .max_depth(1.0); let scissor = vk::Rect2D::builder() .offset(vk::Offset2D { x: 0, y: 0 }) .extent(extent); let viewports = &[viewport.build()]; let scissors = &[scissor.build()]; let viewport_state = vk::PipelineViewportStateCreateInfo::builder() .viewports(viewports) .scissors(scissors); let rasterization_state = vk::PipelineRasterizationStateCreateInfo::builder() .depth_clamp_enable(false) .rasterizer_discard_enable(false) .polygon_mode(vk::PolygonMode::FILL) .line_width(1.0) .cull_mode(if cull { vk::CullModeFlags::BACK } else { vk::CullModeFlags::NONE }) .front_face(vk::FrontFace::COUNTER_CLOCKWISE) .depth_bias_enable(false); let multisampling = vk::PipelineMultisampleStateCreateInfo::builder() .sample_shading_enable(false) .rasterization_samples(vk::SampleCountFlags::TYPE_1); let depth_stencil = vk::PipelineDepthStencilStateCreateInfo::builder() .depth_test_enable(true) .depth_write_enable(true) .depth_compare_op(vk::CompareOp::LESS) .depth_bounds_test_enable(false) .stencil_test_enable(false); let attachment = vk::PipelineColorBlendAttachmentState::builder() .color_write_mask(vk::ColorComponentFlags::RGBA) .blend_enable(false) .build(); let attachments = &[attachment]; let color_blending = vk::PipelineColorBlendStateCreateInfo::builder() .logic_op_enable(false) .logic_op(vk::LogicOp::COPY) .attachments(attachments) .blend_constants([0.0, 0.0, 0.0, 0.0]); let set_layouts: Vec<vk::DescriptorSetLayout> = descriptor_layouts.iter().map(|layout| **layout).collect(); let layout_info = vk::PipelineLayoutCreateInfo::builder().set_layouts(&set_layouts); let layout = unsafe { device.create_pipeline_layout(&layout_info, None)? }; let stages = &[*vertex_stage, *fragment_stage]; let mut create_info = vk::GraphicsPipelineCreateInfo::builder() .stages(stages) .vertex_input_state(&vertex_input) .input_assembly_state(&input_assembly) .viewport_state(&viewport_state) .rasterization_state(&rasterization_state) .multisample_state(&multisampling) .color_blend_state(&color_blending) .layout(layout) .render_pass(**renderpass) .subpass(subpass); if depth { create_info = create_info.depth_stencil_state(&depth_stencil); } let pipeline = unsafe { device .create_graphics_pipelines(vk::PipelineCache::null(), &[*create_info], None) .expect("Graphics pipeline creation failed")[0] }; Ok(Self { pipeline, layout, shaders, }) } } impl Deref for Pipeline { type Target = vk::Pipeline; fn deref(&self) -> &Self::Target { &self.pipeline } } ``` === aetheria::renderer.rs ```pretty-rs // renderer.rs use ash::vk; use std::ops::DerefMut; use std::sync::Mutex; use std::{ops::Deref, sync::Arc}; use tracing::info; use vulkan::command::{self, TransitionLayoutOptions}; use vulkan::{Context, Image, Swapchain}; use winit::window::Window; pub trait Pass { fn record(&self, cmd: command::BufferBuilder) -> command::BufferBuilder; } pub struct Renderer { pub(crate) ctx: Context, window: Arc<Window>, render_finished: vk::Semaphore, in_flight: vk::Fence, output_image: Option<(Arc<Image>, vk::ImageLayout)>, passes: Vec<Arc<Mutex<dyn Pass>>>, } pub const RENDER_WIDTH: u32 = 480; pub const RENDER_HEIGHT: u32 = 270; impl Renderer { pub fn new(ctx: Context, window: Arc<Window>) -> Result<Self, vk::Result> { let semaphore_info = vk::SemaphoreCreateInfo::builder(); let fence_info = vk::FenceCreateInfo::builder().flags(vk::FenceCreateFlags::SIGNALED); let render_finished = unsafe { ctx.device.create_semaphore(&semaphore_info, None).unwrap() }; let in_flight = unsafe { ctx.device.create_fence(&fence_info, None).unwrap() }; let renderer = Self { ctx, window, render_finished, in_flight, output_image: None, passes: Vec::new(), }; Ok(renderer) } unsafe fn destroy_swapchain(&mut self) { self.ctx.device.device_wait_idle().unwrap(); self.ctx .swapchain .image_views .iter() .for_each(|view| self.ctx.device.destroy_image_view(*view, None)); self.ctx .device .extensions .swapchain .as_ref() .unwrap() .destroy_swapchain(*self.ctx.swapchain, None); } pub fn recreate_swapchain(&mut self) -> Result<(), vk::Result> { unsafe { self.destroy_swapchain() }; info!("Recreating swapchain"); self.ctx.swapchain = Swapchain::new( &self.ctx.instance, &self.ctx.surface, &self.ctx.device, &self.window, )?; Ok(()) } pub fn add_pass(&mut self, pass: Arc<Mutex<dyn Pass>>) { self.passes.push(pass); } pub fn set_output_image(&mut self, image: Arc<Image>, layout: vk::ImageLayout) { self.output_image = Some((image, layout)); } pub fn wait_for_frame(&self) { unsafe { self.device .wait_for_fences(&[self.in_flight], true, u64::MAX) .unwrap(); } } pub fn render(&mut self) { unsafe { let in_flight = self.in_flight.clone(); let acquire_result = self.ctx.start_frame(in_flight); /*self.render_pass .set_geometry(&self, mesh_registry, renderables, lights); self.ui_pass.set_geometry(&self, &[Rectangle { origin: Vec2::new(50.0, 50.0), extent: Vec2::new(50.0, 50.0), radius: 25.0, color: Vec4::new(1.0, 0.0, 1.0, 0.3), ..Default::default() }]).expect("Failed to update UI geometry");*/ let image_index = match acquire_result { Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => { self.recreate_swapchain() .expect("Swapchain recreation failed"); return; } Err(e) => panic!("{}", e), Ok(image_index) => image_index, }; self.command_pool.clear(); let cmd = self .command_pool .allocate() .unwrap() .begin() .unwrap() .record(|cmd| { self.passes.iter().fold(cmd, |cmd, pass| { cmd.record(|cmd| pass.lock().unwrap().record(cmd)) }) }) .transition_image_layout( &self.output_image.as_ref().expect("No output image set").0, &TransitionLayoutOptions { old: self.output_image.as_ref().unwrap().1, new: vk::ImageLayout::TRANSFER_SRC_OPTIMAL, source_access: vk::AccessFlags::SHADER_WRITE, destination_access: vk::AccessFlags::TRANSFER_READ, source_stage: vk::PipelineStageFlags::COMPUTE_SHADER, destination_stage: vk::PipelineStageFlags::TRANSFER, }, ) .transition_image_layout( &self.ctx.swapchain.images[image_index as usize], &TransitionLayoutOptions { old: vk::ImageLayout::UNDEFINED, new: vk::ImageLayout::TRANSFER_DST_OPTIMAL, source_access: vk::AccessFlags::NONE, destination_access: vk::AccessFlags::TRANSFER_WRITE, source_stage: vk::PipelineStageFlags::TOP_OF_PIPE, destination_stage: vk::PipelineStageFlags::TRANSFER, }, ) .blit_image( &self.output_image.as_ref().unwrap().0, &self.ctx.swapchain.images[image_index as usize], vk::ImageLayout::TRANSFER_SRC_OPTIMAL, vk::ImageLayout::TRANSFER_DST_OPTIMAL, vk::ImageAspectFlags::COLOR, vk::Filter::NEAREST, ) .transition_image_layout( &self.ctx.swapchain.images[image_index as usize], &TransitionLayoutOptions { old: vk::ImageLayout::TRANSFER_DST_OPTIMAL, new: vk::ImageLayout::PRESENT_SRC_KHR, source_access: vk::AccessFlags::TRANSFER_WRITE, destination_access: vk::AccessFlags::NONE, source_stage: vk::PipelineStageFlags::TRANSFER, destination_stage: vk::PipelineStageFlags::BOTTOM_OF_PIPE, }, ) .end() .unwrap(); let wait_semaphores = &[self.ctx.image_available]; let signal_semaphores = &[self.render_finished]; let command_buffers = &[*cmd]; let submit_info = vk::SubmitInfo::builder() .wait_semaphores(wait_semaphores) .wait_dst_stage_mask(&[vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT]) .command_buffers(command_buffers) .signal_semaphores(signal_semaphores); self.ctx .device .queue_submit( self.ctx.device.queues.graphics.queue, &[*submit_info], self.in_flight, ) .unwrap(); let presentation_result = self.ctx.end_frame(image_index, self.render_finished); match presentation_result { Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => self .recreate_swapchain() .expect("Swapchain recreation failed"), Err(e) => panic!("{}", e), Ok(_) => (), } } } } impl Deref for Renderer { type Target = Context; fn deref(&self) -> &Self::Target { &self.ctx } } impl DerefMut for Renderer { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.ctx } } ``` === vulkan::descriptor.rs ```pretty-rs // descriptor.rs use super::{Buffer, Device, Texture}; use ash::vk; use std::{collections::HashMap, ops::Deref, result::Result, sync::Arc}; #[derive(Clone, Copy)] pub struct Binding { pub(crate) binding: vk::DescriptorSetLayoutBinding, } impl Binding { fn new(index: usize, descriptor_type: vk::DescriptorType) -> Self { let binding = vk::DescriptorSetLayoutBinding::builder() .binding(index.try_into().unwrap()) .descriptor_type(descriptor_type) .descriptor_count(1) .stage_flags(vk::ShaderStageFlags::ALL) .build(); Self { binding } } } impl Deref for Binding { type Target = vk::DescriptorSetLayoutBinding; fn deref(&self) -> &Self::Target { &self.binding } } pub struct SetLayoutBuilder<'a> { device: &'a Device, bindings: Vec<Binding>, } impl<'a> SetLayoutBuilder<'a> { pub const fn new(device: &'a Device) -> Self { Self { device, bindings: Vec::new(), } } pub fn add(mut self, descriptor_type: vk::DescriptorType) -> Self { self.bindings .push(Binding::new(self.bindings.len(), descriptor_type)); self } pub fn build(self) -> Result<SetLayout, vk::Result> { let bindings: Vec<vk::DescriptorSetLayoutBinding> = self.bindings.iter().map(|binding| **binding).collect(); let create_info = vk::DescriptorSetLayoutCreateInfo::builder().bindings(&bindings); let layout = unsafe { self.device .create_descriptor_set_layout(&create_info, None)? }; Ok(SetLayout { layout, binding_types: self .bindings .iter() .map(|binding| binding.descriptor_type) .collect(), }) } } #[derive(Clone)] pub struct SetLayout { pub(crate) layout: vk::DescriptorSetLayout, pub binding_types: Vec<vk::DescriptorType>, } impl Deref for SetLayout { type Target = vk::DescriptorSetLayout; fn deref(&self) -> &Self::Target { &self.layout } } #[derive(Clone)] pub struct Set { pub(crate) set: vk::DescriptorSet, binding_types: Vec<vk::DescriptorType>, } impl Set { pub fn update_buffer(&self, device: &Device, binding: u32, buffer: &Buffer) { let buffer_info = vk::DescriptorBufferInfo::builder() .buffer(**buffer) .offset(0) .range(buffer.size.try_into().unwrap()); let buffer_infos = &[*buffer_info]; let write_info = vk::WriteDescriptorSet::builder() .dst_set(**self) .dst_binding(binding) .dst_array_element(0) .descriptor_type(self.binding_types[binding as usize]) .buffer_info(buffer_infos); let descriptor_writes = &[*write_info]; unsafe { device.update_descriptor_sets(descriptor_writes, &[]) }; } pub fn update_texture( &self, device: &Device, binding: u32, texture: &Texture, layout: vk::ImageLayout, ) { let image_info = vk::DescriptorImageInfo::builder() .sampler(texture.sampler) .image_view(texture.view) .image_layout(layout); let image_infos = &[*image_info]; let write_info = vk::WriteDescriptorSet::builder() .dst_set(**self) .dst_binding(binding) .dst_array_element(0) .descriptor_type(self.binding_types[binding as usize]) .image_info(image_infos); let descriptor_writes = &[*write_info]; unsafe { device.update_descriptor_sets(descriptor_writes, &[]) }; } } impl Deref for Set { type Target = vk::DescriptorSet; fn deref(&self) -> &Self::Target { &self.set } } pub struct Pool { pub(crate) pool: vk::DescriptorPool, device: Arc<Device>, layout: SetLayout, sets: Vec<Set>, } impl Pool { pub fn new( device: Arc<Device>, layout: SetLayout, capacity: usize, ) -> Result<Self, vk::Result> { let descriptor_types: Vec<vk::DescriptorType> = layout.binding_types.clone(); let mut descriptor_type_amounts: HashMap<vk::DescriptorType, usize> = HashMap::new(); for descriptor_type in &descriptor_types { match descriptor_type_amounts.get_mut(descriptor_type) { Some(amount) => { *amount += 1; } None => { descriptor_type_amounts.insert(*descriptor_type, 1); } } } let pool_sizes: Vec<vk::DescriptorPoolSize> = descriptor_type_amounts .into_iter() .map(|(descriptor_type, amount)| { vk::DescriptorPoolSize::builder() .ty(descriptor_type) .descriptor_count((amount * capacity).try_into().unwrap()) .build() }) .collect(); let create_info = vk::DescriptorPoolCreateInfo::builder() .max_sets(capacity.try_into().unwrap()) .pool_sizes(&pool_sizes); let pool = unsafe { device.create_descriptor_pool(&create_info, None)? }; Ok(Self { pool, device, layout, sets: Vec::new(), }) } pub fn allocate(&mut self) -> Result<Set, vk::Result> { let set_layouts = &[*self.layout]; let allocate_info = vk::DescriptorSetAllocateInfo::builder() .descriptor_pool(**self) .set_layouts(set_layouts); let set = unsafe { self.device.allocate_descriptor_sets(&allocate_info)?[0] }; Ok(Set { set, binding_types: self.layout.binding_types.clone(), }) } } impl Deref for Pool { type Target = vk::DescriptorPool; fn deref(&self) -> &Self::Target { &self.pool } } ``` === vulkan::device.rs ```pretty-rs // device.rs use super::{Instance, Surface}; use ash::{extensions::khr, vk}; use bytemuck::cast_slice; use std::{collections::HashSet, ffi::CStr, ops::Deref, result::Result}; use tracing::info; pub struct Queue { pub queue: vk::Queue, pub index: u32, } impl Queue { const fn new(queue: vk::Queue, index: u32) -> Self { Self { queue, index } } } impl Deref for Queue { type Target = vk::Queue; fn deref(&self) -> &Self::Target { &self.queue } } pub struct Queues { pub graphics: Queue, pub present: Queue, } pub struct Extensions { pub swapchain: Option<khr::Swapchain>, } impl Extensions { fn load(instance: &ash::Instance, device: &ash::Device, available: &[&CStr]) -> Self { Self { swapchain: available .iter() .find(|ext| **ext == khr::Swapchain::name()) .map(|_| khr::Swapchain::new(instance, device)), } } } pub struct Device { pub(crate) device: ash::Device, pub physical: super::instance::PhysicalDevice, pub queues: Queues, pub extensions: Extensions, } impl Device { pub unsafe fn new(instance: &Instance, surface: &Surface) -> Result<Self, vk::Result> { let physicals = instance.get_physical_devices()?; let physical = physicals .first() .cloned() .expect("No device supporting vulkan found"); let mut features = vk::PhysicalDeviceFeatures::default(); features.sampler_anisotropy = physical.features.sampler_anisotropy; let (graphics_family_index, _graphics_family) = physical .queue_families .iter() .enumerate() .find(|(_, family)| family.queue_flags.intersects(vk::QueueFlags::GRAPHICS)) .expect("No graphics queue family"); let (present_family_index, _present_family) = physical .queue_families .iter() .enumerate() .find(|(i, _)| { instance .extensions .surface .as_ref() .unwrap() .get_physical_device_surface_support( physical.physical, (*i).try_into().unwrap(), surface.surface, ) .unwrap() }) .expect("No present family"); info!("Found graphics family at index {}", graphics_family_index); info!("Found present family at index {}", present_family_index); let queue_family_indices = [graphics_family_index, present_family_index]; let unique_queue_family_indices: HashSet<usize> = HashSet::from_iter(queue_family_indices); let queue_priorities = [1.0]; let queue_create_infos: Vec<vk::DeviceQueueCreateInfo> = unique_queue_family_indices .iter() .map(|index| { vk::DeviceQueueCreateInfo::builder() .queue_family_index((*index).try_into().unwrap()) .queue_priorities(&queue_priorities) .build() }) .collect(); let available_layers = instance.enumerate_device_layer_properties(physical.physical)?; let available_extensions = instance.enumerate_device_extension_properties(physical.physical)?; let available_layer_names: Vec<&CStr> = available_layers .iter() .map(|layer| CStr::from_bytes_until_nul(cast_slice(&layer.layer_name)).unwrap()) .collect(); let available_extension_names: Vec<&CStr> = available_extensions .iter() .map(|extension| { CStr::from_bytes_until_nul(cast_slice(&extension.extension_name)).unwrap() }) .collect(); let wanted_layers = super::get_wanted_layers(); let wanted_extensions = get_wanted_extensions(); let wanted_layers = super::intersection(&wanted_layers, &available_layer_names); let wanted_extensions = super::intersection(&wanted_extensions, &available_extension_names); info!("Using device layers: {:?}", wanted_layers); info!("Using device extesions: {:?}", wanted_extensions); let wanted_layers_raw: Vec<*const i8> = wanted_layers.iter().map(|name| name.as_ptr()).collect(); let wanted_extensions_raw: Vec<*const i8> = wanted_extensions.iter().map(|name| name.as_ptr()).collect(); let create_info = vk::DeviceCreateInfo::builder() .queue_create_infos(&queue_create_infos) .enabled_layer_names(&wanted_layers_raw) .enabled_extension_names(&wanted_extensions_raw) .enabled_features(&features); let device = unsafe { instance.create_device(physical.physical, &create_info, None)? }; info!("Created vulkan device"); let graphics = device.get_device_queue(graphics_family_index.try_into().unwrap(), 0); let graphics = Queue::new(graphics, graphics_family_index.try_into().unwrap()); let present = device.get_device_queue(present_family_index.try_into().unwrap(), 0); let present = Queue::new(present, present_family_index.try_into().unwrap()); Ok(Self { extensions: Extensions::load(instance, &device, &available_extension_names), device, physical, queues: Queues { graphics, present }, }) } } impl Deref for Device { type Target = ash::Device; fn deref(&self) -> &Self::Target { &self.device } } fn get_wanted_extensions() -> Vec<&'static CStr> { vec![khr::Swapchain::name()] } ``` === vulkan::instance.rs ```pretty-rs // instance.rs use ash::{extensions::khr, vk}; use bytemuck::cast_slice; use cstr::cstr; use std::{clone::Clone, ffi::CStr, ops::Deref, result::Result}; use tracing::info; #[derive(Debug, Clone)] pub struct PhysicalDeviceProperties { properties: vk::PhysicalDeviceProperties, pub device_name: String, } impl PhysicalDeviceProperties { fn new(properties: &vk::PhysicalDeviceProperties) -> Self { let device_name_raw: &CStr = CStr::from_bytes_until_nul(cast_slice(&properties.device_name)).unwrap(); let device_name = device_name_raw.to_str().unwrap().to_owned(); Self { properties: *properties, device_name, } } } impl Deref for PhysicalDeviceProperties { type Target = vk::PhysicalDeviceProperties; fn deref(&self) -> &Self::Target { &self.properties } } #[derive(Debug, Clone)] pub struct PhysicalDevice { pub(crate) physical: vk::PhysicalDevice, pub properties: PhysicalDeviceProperties, pub queue_families: Vec<vk::QueueFamilyProperties>, pub features: vk::PhysicalDeviceFeatures, } impl PhysicalDevice { unsafe fn new(instance: &Instance, physical: vk::PhysicalDevice) -> Self { let properties = instance.get_physical_device_properties(physical); let properties = PhysicalDeviceProperties::new(&properties); let queue_families = instance.get_physical_device_queue_family_properties(physical); let features = instance.get_physical_device_features(physical); Self { physical, properties, queue_families, features, } } } impl Deref for PhysicalDevice { type Target = vk::PhysicalDevice; fn deref(&self) -> &Self::Target { &self.physical } } #[derive(Clone)] pub struct Extensions { pub surface: Option<khr::Surface>, pub xlib_surface: Option<khr::XlibSurface>, pub win32_surface: Option<khr::Win32Surface>, } impl Extensions { pub fn load(entry: &ash::Entry, instance: &ash::Instance, available: &[&CStr]) -> Self { Self { surface: available .iter() .find(|ext| **ext == khr::Surface::name()) .map(|_| khr::Surface::new(entry, instance)), xlib_surface: available .iter() .find(|ext| **ext == khr::XlibSurface::name()) .map(|_| khr::XlibSurface::new(entry, instance)), win32_surface: available .iter() .find(|ext| **ext == khr::Win32Surface::name()) .map(|_| khr::Win32Surface::new(entry, instance)), } } } #[derive(Clone)] pub struct Instance { instance: ash::Instance, pub extensions: Extensions, } impl Instance { pub fn new(entry: &ash::Entry) -> Result<Self, vk::Result> { let app_info = vk::ApplicationInfo::builder() .application_name(cstr!("aetheria")) .application_version(vk::make_api_version(0, 1, 0, 0)) .engine_name(cstr!("aetheria")) .engine_version(vk::make_api_version(0, 1, 0, 0)) .api_version(vk::make_api_version(0, 1, 3, 238)); let available_layers = entry.enumerate_instance_layer_properties()?; let available_extensions = entry.enumerate_instance_extension_properties(None)?; let available_layer_names: Vec<&CStr> = available_layers .iter() .map(|layer| CStr::from_bytes_until_nul(cast_slice(&layer.layer_name)).unwrap()) .collect(); let available_extension_names: Vec<&CStr> = available_extensions .iter() .map(|extension| { CStr::from_bytes_until_nul(cast_slice(&extension.extension_name)).unwrap() }) .collect(); let wanted_layers = super::get_wanted_layers(); let wanted_extensions = get_wanted_extensions(); let wanted_layers = super::intersection(&wanted_layers, &available_layer_names); let wanted_extensions = super::intersection(&wanted_extensions, &available_extension_names); info!("Using instance layers: {:?}", wanted_layers); info!("Using instance extensions: {:?}", wanted_extensions); let wanted_layers_raw: Vec<*const i8> = wanted_layers.iter().map(|name| name.as_ptr()).collect(); let wanted_extensions_raw: Vec<*const i8> = wanted_extensions.iter().map(|name| name.as_ptr()).collect(); let instance_info = vk::InstanceCreateInfo::builder() .application_info(&app_info) .enabled_layer_names(&wanted_layers_raw) .enabled_extension_names(&wanted_extensions_raw); let instance = unsafe { entry.create_instance(&instance_info, None)? }; Ok(Self { extensions: Extensions::load(entry, &instance, &available_extension_names), instance, }) } pub fn get_physical_devices(&self) -> Result<Vec<PhysicalDevice>, vk::Result> { let physicals = unsafe { self.enumerate_physical_devices()? }; unsafe { Ok(physicals .iter() .copied() .map(|physical| PhysicalDevice::new(self, physical)) .collect()) } } } impl Deref for Instance { type Target = ash::Instance; fn deref(&self) -> &Self::Target { &self.instance } } #[cfg(target_os = "linux")] fn get_wanted_extensions() -> Vec<&'static CStr> { vec![khr::Surface::name(), khr::XlibSurface::name()] } #[cfg(target_os = "windows")] fn get_wanted_extensions() -> Vec<&'static CStr> { vec![khr::Surface::name(), khr::Win32Surface::name()] } ``` === vulkan::renderpass.rs ```pretty-rs // renderpass.rs use super::{Device, Image}; use ash::vk; use std::ops::Deref; pub struct Renderpass { pub(crate) renderpass: vk::RenderPass, } impl Renderpass { pub fn new_render(device: &Device, color_format: vk::Format) -> Result<Self, vk::Result> { let color_attachment = vk::AttachmentDescription::builder() .format(color_format) .samples(vk::SampleCountFlags::TYPE_1) .load_op(vk::AttachmentLoadOp::CLEAR) .store_op(vk::AttachmentStoreOp::STORE) .stencil_load_op(vk::AttachmentLoadOp::DONT_CARE) .stencil_store_op(vk::AttachmentStoreOp::DONT_CARE) .initial_layout(vk::ImageLayout::UNDEFINED) .final_layout(vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL); let color_attachment_ref = vk::AttachmentReference::builder() .attachment(0) .layout(vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL); let depth_attachment = vk::AttachmentDescription::builder() .format(vk::Format::D32_SFLOAT) .samples(vk::SampleCountFlags::TYPE_1) .load_op(vk::AttachmentLoadOp::CLEAR) .store_op(vk::AttachmentStoreOp::DONT_CARE) .stencil_load_op(vk::AttachmentLoadOp::DONT_CARE) .stencil_store_op(vk::AttachmentStoreOp::DONT_CARE) .initial_layout(vk::ImageLayout::UNDEFINED) .final_layout(vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL); let depth_attachment_ref = vk::AttachmentReference::builder() .attachment(1) .layout(vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL); let color_attachments = &[*color_attachment_ref]; let geometry_subpass = vk::SubpassDescription::builder() .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS) .color_attachments(color_attachments) .depth_stencil_attachment(&depth_attachment_ref); let grass_subpass = vk::SubpassDescription::builder() .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS) .color_attachments(color_attachments) .depth_stencil_attachment(&depth_attachment_ref); let dependency = vk::SubpassDependency::builder() .src_subpass(0) .dst_subpass(1) .src_stage_mask(vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT) .src_access_mask(vk::AccessFlags::COLOR_ATTACHMENT_WRITE) .dst_stage_mask(vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT) .dst_access_mask(vk::AccessFlags::COLOR_ATTACHMENT_WRITE); let attachments = &[*color_attachment, *depth_attachment]; let subpasses = &[*geometry_subpass, *grass_subpass]; let dependencies = &[*dependency]; let create_info = vk::RenderPassCreateInfo::builder() .attachments(attachments) .subpasses(subpasses) .dependencies(dependencies); let renderpass = unsafe { device.create_render_pass(&create_info, None)? }; Ok(Self { renderpass }) } pub fn new_upscale_ui(device: &Device, color_format: vk::Format) -> Result<Self, vk::Result> { let color_attachment = vk::AttachmentDescription::builder() .format(color_format) .samples(vk::SampleCountFlags::TYPE_1) .load_op(vk::AttachmentLoadOp::CLEAR) .store_op(vk::AttachmentStoreOp::STORE) .stencil_load_op(vk::AttachmentLoadOp::DONT_CARE) .stencil_store_op(vk::AttachmentStoreOp::DONT_CARE) .initial_layout(vk::ImageLayout::UNDEFINED) .final_layout(vk::ImageLayout::PRESENT_SRC_KHR); let color_attachment_ref = vk::AttachmentReference::builder() .attachment(0) .layout(vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL); let color_attachments = &[*color_attachment_ref]; let upscale_subpass = vk::SubpassDescription::builder() .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS) .color_attachments(color_attachments); let ui_subpass = vk::SubpassDescription::builder() .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS) .color_attachments(color_attachments); let dependency = vk::SubpassDependency::builder() .src_subpass(0) .dst_subpass(1) .src_stage_mask(vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT) .src_access_mask(vk::AccessFlags::COLOR_ATTACHMENT_WRITE) .dst_stage_mask(vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT) .dst_access_mask(vk::AccessFlags::COLOR_ATTACHMENT_WRITE); let attachments = &[*color_attachment]; let subpasses = &[*upscale_subpass, *ui_subpass]; let dependencies = &[*dependency]; let create_info = vk::RenderPassCreateInfo::builder() .attachments(attachments) .subpasses(subpasses) .dependencies(dependencies); let renderpass = unsafe { device.create_render_pass(&create_info, None)? }; Ok(Self { renderpass }) } pub fn create_framebuffer( &self, device: &Device, width: u32, height: u32, attachments: &[vk::ImageView], ) -> Result<vk::Framebuffer, vk::Result> { let create_info = vk::FramebufferCreateInfo::builder() .render_pass(**self) .attachments(attachments) .width(width) .height(height) .layers(1); unsafe { device.create_framebuffer(&create_info, None) } } } impl Deref for Renderpass { type Target = vk::RenderPass; fn deref(&self) -> &Self::Target { &self.renderpass } } ``` === aetheria::ui.rs ```pretty-rs // ui.rs use ash::vk; use assets::{ShaderRegistry, TextureRegistry}; use bytemuck::{cast_slice, Pod, Zeroable}; use glam::{UVec2, Vec4}; use std::sync::Arc; use vulkan::{ command, command::TransitionLayoutOptions, compute, Buffer, Image, Pool, Set, SetLayout, SetLayoutBuilder, Shader, Texture, }; use winit::event::MouseButton; use crate::renderer::{Pass, Renderer, RENDER_HEIGHT, RENDER_WIDTH}; #[derive(Clone, Debug, PartialEq)] pub struct SizeConstraints { pub min: UVec2, pub max: UVec2, } #[derive(Clone, Debug, PartialEq)] pub struct Region { pub origin: UVec2, pub size: UVec2, } pub mod color { use glam::Vec4; pub const fn get_highlight() -> Vec4 { Vec4::new(0.957, 0.247, 0.369, 1.0) } pub const fn get_background() -> Vec4 { Vec4::new(0.094, 0.094, 0.106, 1.0) } pub const fn get_success() -> Vec4 { Vec4::new(0.133, 0.773, 0.369, 1.0) } } pub mod input { use glam::UVec2; use winit::event::MouseButton; use crate::input::Mouse; use super::Region; pub fn hovering(mouse: &Mouse, region: &Region) -> bool { let position = mouse.get_position(); let min = region.origin; let max = region.origin + region.size; min.x < position.x && position.x < max.x && min.y < position.y && position.y < max.y } pub fn clicked(mouse: &Mouse, region: &Region, button: MouseButton) -> bool { hovering(mouse, region) && mouse.is_button_pressed(button) } } pub trait Element { fn layout(&mut self, constraint: SizeConstraints) -> UVec2; fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>); } #[repr(C)] #[derive(Clone, Copy, Debug, Pod, Zeroable)] pub struct Rectangle { pub color: Vec4, pub origin: UVec2, pub extent: UVec2, pub radius: u32, pub atlas_id: i32, pub _padding: [u8; 8], } impl Default for Rectangle { fn default() -> Self { Self { color: Vec4::ONE, origin: UVec2::ZERO, extent: UVec2::ONE, radius: 0, atlas_id: -1, _padding: [0_u8; 8], } } } pub struct UIPass { pipeline: compute::Pipeline, font: Arc<Texture>, ui_layout: SetLayout, ui_pool: Pool, ui_set: Set, output: Texture, } impl UIPass { pub fn new( renderer: &mut Renderer, shader_registry: &mut ShaderRegistry, texture_registry: &mut TextureRegistry, input: &Texture, ) -> Result<Self, vk::Result> { let image = Image::new( &renderer, RENDER_WIDTH, RENDER_HEIGHT, vk::Format::R8G8B8A8_UNORM, vk::ImageUsageFlags::STORAGE | vk::ImageUsageFlags::TRANSFER_SRC, )?; let output = Texture::from_image( &renderer, image, vk::Filter::NEAREST, vk::Filter::NEAREST, true, )?; let ui_layout = SetLayoutBuilder::new(&renderer.device) .add(vk::DescriptorType::STORAGE_IMAGE) .add(vk::DescriptorType::STORAGE_IMAGE) .add(vk::DescriptorType::COMBINED_IMAGE_SAMPLER) .add(vk::DescriptorType::STORAGE_BUFFER) .build()?; let mut ui_pool = Pool::new(renderer.device.clone(), ui_layout.clone(), 1)?; let ui_set = ui_pool.allocate()?; ui_set.update_texture(&renderer.device, 0, &output, vk::ImageLayout::GENERAL); ui_set.update_texture(&renderer.device, 1, &input, vk::ImageLayout::GENERAL); let font = texture_registry.load(renderer, "font.qoi", false); ui_set.update_texture( &renderer.device, 2, &font, vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL, ); let shader: Arc<Shader> = shader_registry.load(&renderer.device, "ui.comp.glsl"); let pipeline = compute::Pipeline::new(&renderer.device, shader.clone(), &[ui_layout.clone()])?; Ok(Self { pipeline, ui_layout, ui_pool, ui_set, font, output, }) } pub fn set_geometry( &self, renderer: &Renderer, rectangles: &[Rectangle], ) -> Result<(), vk::Result> { let mut rectangle_data: Vec<u8> = cast_slice::<i32, u8>(&[rectangles.len() as i32, 0, 0, 0]).to_vec(); rectangle_data.extend_from_slice(cast_slice::<Rectangle, u8>(rectangles)); let rectangle_buffer = Buffer::new( renderer, rectangle_data, vk::BufferUsageFlags::STORAGE_BUFFER, )?; self.ui_set .update_buffer(&renderer.device, 3, &rectangle_buffer); Ok(()) } pub fn get_texture(&self) -> &'_ Texture { &self.output } } impl Pass for UIPass { fn record(&self, cmd: command::BufferBuilder) -> command::BufferBuilder { cmd.transition_image_layout( &self.output.image, &TransitionLayoutOptions { old: vk::ImageLayout::UNDEFINED, new: vk::ImageLayout::GENERAL, source_access: vk::AccessFlags::NONE, destination_access: vk::AccessFlags::SHADER_WRITE, source_stage: vk::PipelineStageFlags::TOP_OF_PIPE, destination_stage: vk::PipelineStageFlags::COMPUTE_SHADER, }, ) .bind_compute_pipeline(self.pipeline.clone()) .bind_descriptor_set(0, &self.ui_set) .dispatch( RENDER_WIDTH / 16, (RENDER_HEIGHT as f32 / 16.0).ceil() as u32, 1, ) } } ``` === vulkan::swapchain.rs ```pretty-rs // swapchain.rs use super::{Device, Image, Instance, Surface}; use ash::vk; use std::{ops::Deref, sync::Arc}; use winit::window::Window; #[derive(Debug)] pub struct Swapchain { pub(crate) swapchain: vk::SwapchainKHR, pub format: vk::Format, pub extent: vk::Extent2D, pub images: Vec<Arc<Image>>, pub image_views: Vec<vk::ImageView>, } impl Swapchain { pub fn new( instance: &Instance, surface: &Surface, device: &Device, window: &Window, ) -> Result<Self, vk::Result> { let surface_khr = instance.extensions.surface.as_ref().unwrap(); let swapchain_khr = device.extensions.swapchain.as_ref().unwrap(); let capabilities = unsafe { surface_khr.get_physical_device_surface_capabilities( device.physical.physical, surface.surface, )? }; let formats = unsafe { surface_khr .get_physical_device_surface_formats(device.physical.physical, surface.surface)? }; let present_modes = unsafe { surface_khr.get_physical_device_surface_present_modes( device.physical.physical, surface.surface, )? }; let format = formats .iter() .find(|format| { format.format == vk::Format::B8G8R8A8_SRGB && format.color_space == vk::ColorSpaceKHR::SRGB_NONLINEAR }) .unwrap_or_else(|| formats.first().unwrap()); let present_mode = present_modes .iter() .copied() //.find(|present_mode| *present_mode == vk::PresentModeKHR::FIFO) .find(|present_mode| *present_mode == vk::PresentModeKHR::MAILBOX) .unwrap_or(vk::PresentModeKHR::FIFO); let extent = if capabilities.current_extent.width == u32::MAX { vk::Extent2D { width: window.inner_size().width, height: window.inner_size().height, } } else { capabilities.current_extent }; let image_count = if capabilities.max_image_count == 0 || capabilities.min_image_count + 1 < capabilities.max_image_count { capabilities.min_image_count + 1 } else { capabilities.min_image_count }; let (sharing_mode, queue_family_indices) = if device.queues.graphics.index == device.queues.present.index { (vk::SharingMode::EXCLUSIVE, Vec::new()) } else { ( vk::SharingMode::CONCURRENT, vec![device.queues.graphics.index, device.queues.present.index], ) }; let create_info = vk::SwapchainCreateInfoKHR::builder() .surface(surface.surface) .min_image_count(image_count) .image_format(format.format) .image_color_space(format.color_space) .image_extent(extent) .image_array_layers(1) .image_usage(vk::ImageUsageFlags::COLOR_ATTACHMENT | vk::ImageUsageFlags::TRANSFER_DST) .image_sharing_mode(sharing_mode) .queue_family_indices(&queue_family_indices) .pre_transform(capabilities.current_transform) .composite_alpha(vk::CompositeAlphaFlagsKHR::OPAQUE) .present_mode(present_mode) .clipped(true); let swapchain = unsafe { swapchain_khr.create_swapchain(&create_info, None)? }; let images = unsafe { swapchain_khr.get_swapchain_images(swapchain)? }; let images: Vec<Arc<Image>> = images .iter() .copied() .map(|image| Image::from_image(image, format.format, extent.width, extent.height)) .collect(); let image_views = images .iter() .map(|image| image.create_view_without_context(device).unwrap()) .collect(); Ok(Self { swapchain, format: format.format, extent, images, image_views, }) } } impl Deref for Swapchain { type Target = vk::SwapchainKHR; fn deref(&self) -> &Self::Target { &self.swapchain } } ``` === components::craft.rs ```pretty-rs // craft.rs use common::item::ItemStack; use super::components::{ Button, Container, HAlign, HPair, Handler, Padding, Text, VAlign, VList, VPair, }; use crate::{ data::{inventory::Inventory, Data, Recipe}, input::Mouse, ui::{self, Element}, }; use glam::Vec4; use std::sync::{Arc, Mutex}; pub struct CraftButtonHandler<'a> { recipe: Recipe, data: Arc<Mutex<&'a mut Data>>, } impl Handler for CraftButtonHandler<'_> { fn handle(&mut self) { if !self .recipe .has_ingredients(&self.data.lock().unwrap().inventory) { return; } self.recipe .ingredients .iter() .for_each(|stack| self.data.lock().unwrap().inventory.remove(*stack)); self.recipe .outputs .iter() .for_each(|stack| self.data.lock().unwrap().inventory.add(*stack)); self.data.lock().unwrap().current_recipe = None; } } pub struct CloseHandler<'a> { data: Arc<Mutex<&'a mut Data>>, } impl Handler for CloseHandler<'_> { fn handle(&mut self) { self.data.lock().unwrap().current_recipe = None } } pub type Component<'a> = Container< Padding< VPair<VList<Text>, HPair<Button<'a, CloseHandler<'a>>, Button<'a, CraftButtonHandler<'a>>>>, >, >; impl<'a> Component<'a> { pub fn new(data: &'a mut Data, mouse: &'a Mouse) -> Option<Self> { let mut text = Vec::new(); let color = if data .current_recipe .as_ref()? .has_ingredients(&data.inventory) { ui::color::get_success() } else { ui::color::get_highlight() }; text.push(Text { color, content: "Ingredients".to_owned(), }); data.current_recipe .as_ref()? .ingredients .iter() .for_each(|ingredient| { let inventory_amount = data .inventory .get_items() .iter() .find(|stack| stack.item == ingredient.item) .map(|stack| stack.amount) .unwrap_or(0); let color = if inventory_amount >= ingredient.amount { ui::color::get_success() } else { ui::color::get_highlight() }; text.push(Text { color, content: format!( "{} {}/{}", ingredient.item, inventory_amount, ingredient.amount ), }) }); text.push(Text { color: Vec4::ZERO, content: String::new(), }); text.push(Text { color: ui::color::get_highlight(), content: "Outputs".to_owned(), }); data.current_recipe .as_ref()? .outputs .iter() .for_each(|output| { text.push(Text { color: ui::color::get_highlight(), content: format!("{}", output), }) }); let text = VList { children: text, separation: 2, align: HAlign::Left, }; let recipe = data.current_recipe.clone()?; let data_mutex = Arc::new(Mutex::new(data)); let craft_handler = CraftButtonHandler { recipe, data: data_mutex.clone(), }; let craft_button = Button::new(mouse, "Craft", craft_handler); let close_handler = CloseHandler { data: data_mutex }; let close_button = Button::new(mouse, "Cancel", close_handler); let pair = VPair::new( text, HPair::new(close_button, craft_button, VAlign::Top, 4), HAlign::Center, 6, ); Some(Self { child: Padding::new_uniform(pair, 2), color: ui::color::get_background(), border_color: ui::color::get_highlight(), border_radius: 1, }) } } ``` === entities::firefly.rs ```pretty-rs // firefly.rs use std::sync::{Arc, Mutex, Weak}; use ash::vk; use assets::{ModelRegistry, Transform}; use glam::{Quat, Vec3}; use rand::Rng; use super::Sun; use crate::{ data::{inventory::Inventory, Data}, renderer::Renderer, systems::{ interact::Interactable, render::{Emissive, Light, RenderObject, Renderable, System}, Named, Positioned, Systems, }, time::Time, }; use common::item::{Item, ItemStack}; const FIREFLY_SPEED: f32 = 60.0; pub struct Firefly { light: Light, velocity: Vec3, origin: Vec3, render: RenderObject, gathered: bool, } impl Firefly { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, translation: Vec3, color: Vec3, ) -> Result<Arc<Mutex<Self>>, vk::Result> { let light = Light::new(translation, 0.0, color); let transform = Transform { translation, rotation: Quat::IDENTITY, scale: Vec3::ONE, }; let render = RenderObject { model: model_registry.load("firefly.glb"), transform, }; let mut rng = rand::thread_rng(); let velocity = Vec3::new( rng.gen_range(-1.0..1.0), rng.gen_range(-1.0..1.0), rng.gen_range(-1.0..1.0), ) .normalize_or_zero(); let firefly = Arc::new(Mutex::new(Self { light, velocity, origin: translation, render, gathered: false, })); systems.render.add(firefly.clone()); systems.render.add_light(firefly.clone()); systems.interact.add(firefly.clone()); Ok(firefly) } pub fn frame_finished(&mut self, sun: &Sun, time: &Time) { if sun.get_theta() > (std::f32::consts::PI / 3.0) && sun.get_theta() < (std::f32::consts::PI * (5.0 / 3.0)) { self.light.strength = 300.0 * ((sun.get_theta() / 2.0).sin() - sun.get_theta().cos()) .powf(1.5) .min(1.0) * !self.gathered as u32 as f32; } else { self.light.strength = 0.0 } self.light.position += self.velocity * FIREFLY_SPEED * time.delta_seconds(); let mut rng = rand::thread_rng(); let random_vec3 = Vec3::new( rng.gen_range(-1.0..1.0), rng.gen_range(-1.0..1.0), rng.gen_range(-1.0..1.0), ) .normalize_or_zero(); let origin_direction = (self.origin - self.light.position).normalize_or_zero(); let origin_bias = ((self.origin - self.light.position).length() - 100.0) / 100.0; self.velocity = (self.velocity + random_vec3 * 0.1 + origin_direction * origin_bias) .normalize_or_zero(); self.light.position.y = self.light.position.y.clamp(5.0, 15.0); self.render.transform.translation = self.light.position + Vec3::new(0.0, 5.0, 0.0); let v = Vec3::new(self.velocity.x, 0.0, self.velocity.z).normalize(); let rotation = Quat::from_rotation_arc(Vec3::new(0.0, 0.0, 1.0), v); self.render.transform.rotation = rotation; } } impl Emissive for Firefly { fn get_lights(&self, _: &Data) -> Vec<Light> { vec![self.light] } } impl Renderable for Firefly { fn get_objects(&self) -> Vec<RenderObject> { if self.light.strength != 0.0 && !self.gathered { vec![self.render.clone()] } else { vec![] } } } impl Named for Firefly { fn get_name(&self) -> String { "Sunset Firefly".to_owned() } } impl Positioned for Firefly { fn get_position(&self) -> Vec3 { self.light.position } } impl Interactable for Firefly { fn interact(&mut self, data: &mut crate::data::Data) { data.inventory.add(ItemStack { item: Item::Fireglow, amount: 1, }); self.gathered = true; } fn active(&self) -> bool { !self.gathered && self.light.strength > 0.0 } } ``` === entities::player.rs ```pretty-rs // player.rs use std::{ f32::consts::PI, sync::{Arc, Mutex}, }; use ash::vk; use assets::{ModelRegistry, Transform}; use common::{ item::{Item, ItemStack}, net, }; use glam::{Vec2, Vec3}; use winit::event::VirtualKeyCode; use crate::{ camera::Camera, data::Data, input::{Keyboard, Mouse}, renderer::Renderer, socket::Socket, systems::{ render::{Emissive, Light, RenderObject, Renderable}, Positioned, Systems, }, time::Time, }; const PLAYER_SPEED: f32 = 100.0; const JUMP_HEIGHT: f32 = 100.0; const JUMP_SPEED: f32 = 4.0; const DASH_DISTANCE: f32 = 100.0; #[derive(Clone)] pub struct Player { pub player: RenderObject, jump_t: f32, pub light: Light, } impl Player { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, transform: Transform, ) -> Result<Arc<Mutex<Self>>, vk::Result> { let player = RenderObject { model: model_registry.load("player.glb"), transform, }; let player = Arc::new(Mutex::new(Self { player, jump_t: 0.0, light: Light::new(Vec3::ZERO, 5000.0, Vec3::new(1.0, 1.0, 1.0)), })); systems.render.add(player.clone()); systems.render.add_light(player.clone()); Ok(player) } pub fn frame_finished( &mut self, keyboard: &Keyboard, mouse: &Mouse, camera: &Camera, time: &Time, viewport: Vec2, socket: &Socket, ) { let old_translation = self.player.transform.translation.clone(); // Dash if keyboard.is_key_pressed(VirtualKeyCode::Space) && self.jump_t >= (PI / 4.0) { let mouse_direction = (mouse.position - (viewport / 2.0)).normalize_or_zero(); let mouse_direction = camera.get_rotation() * Vec3::new(mouse_direction.x, 0.0, mouse_direction.y); self.player.transform.translation += mouse_direction * DASH_DISTANCE } // Jump if keyboard.is_key_pressed(VirtualKeyCode::Space) && self.jump_t == 0.0 { self.jump_t = std::f32::consts::PI - 0.0001; } self.player.transform.translation.y = self.jump_t.sin().powf(0.6) * JUMP_HEIGHT; self.jump_t -= time.delta_seconds() * JUMP_SPEED; self.jump_t = self.jump_t.max(0.0); // Movement let z = keyboard.is_key_down(VirtualKeyCode::W) as i32 - keyboard.is_key_down(VirtualKeyCode::S) as i32; let x = keyboard.is_key_down(VirtualKeyCode::D) as i32 - keyboard.is_key_down(VirtualKeyCode::A) as i32; if x != 0 || z != 0 { let delta = Vec3::new(x as f32, 0.0, z as f32).normalize() * PLAYER_SPEED * time.delta_seconds(); self.player.transform.translation += camera.get_rotation() * delta; } self.light.position = self.player.transform.translation + Vec3::new(0.0, 15.0, 0.0); if old_translation != self.player.transform.translation { let packet = net::server::Packet::Move(net::server::Move { position: self.player.transform.translation.clone(), }); socket.send(&packet).unwrap(); } } } impl Emissive for Player { fn get_lights(&self, data: &Data) -> Vec<Light> { if data .inventory .get_items() .iter() .find(|stack| stack.item == Item::Lamp) .is_some() { vec![self.light] } else { Vec::new() } } } impl Renderable for Player { fn get_objects(&self) -> Vec<RenderObject> { vec![self.player.clone()] } } impl Positioned for Player { fn get_position(&self) -> Vec3 { self.player.transform.translation } } ``` === aetheria::input.rs ```pretty-rs // input.rs use glam::{UVec2, Vec2}; use std::collections::{HashMap, HashSet}; #[derive(Default)] pub struct Keyboard { down: HashSet<winit::event::VirtualKeyCode>, pressed: HashSet<winit::event::VirtualKeyCode>, } impl Keyboard { pub fn new() -> Self { Self::default() } pub fn is_key_down(&self, key: winit::event::VirtualKeyCode) -> bool { self.down.contains(&key) } pub fn is_key_pressed(&self, key: winit::event::VirtualKeyCode) -> bool { self.pressed.contains(&key) } pub fn on_event(&mut self, event: &winit::event::Event<()>) { if let winit::event::Event::DeviceEvent { event, .. } = event { if let winit::event::DeviceEvent::Key(key) = event { if let Some(keycode) = key.virtual_keycode { match key.state { winit::event::ElementState::Pressed => { self.down.insert(keycode); self.pressed.insert(keycode) } winit::event::ElementState::Released => self.down.remove(&keycode), }; } } } if let winit::event::Event::WindowEvent { event, .. } = event { if let winit::event::WindowEvent::Focused(false) = event { self.down.clear(); self.pressed.clear(); } } } pub fn frame_finished(&mut self) { self.pressed.clear(); } } #[derive(Default)] pub struct Mouse { pub delta: Vec2, pub position: Vec2, down: HashSet<winit::event::MouseButton>, pressed: HashSet<winit::event::MouseButton>, scale_factor: Vec2, } impl Mouse { pub fn new() -> Self { Self::default() } pub fn is_button_down(&self, key: winit::event::MouseButton) -> bool { self.down.contains(&key) } pub fn is_button_pressed(&self, key: winit::event::MouseButton) -> bool { self.pressed.contains(&key) } pub fn on_event(&mut self, event: &winit::event::Event<()>) { if let winit::event::Event::WindowEvent { event, .. } = event { if let winit::event::WindowEvent::MouseInput { state, button, .. } = event { match state { winit::event::ElementState::Pressed => { self.down.insert(*button); self.pressed.insert(*button) } winit::event::ElementState::Released => self.down.remove(button), }; } if let winit::event::WindowEvent::CursorMoved { position, .. } = event { self.position = Vec2::new(position.x as f32, position.y as f32); } if let winit::event::WindowEvent::Resized(size) = event { self.scale_factor = Vec2::new(size.width as f32 / 480.0, size.height as f32 / 270.0); } } if let winit::event::Event::DeviceEvent { event, .. } = event { if let winit::event::DeviceEvent::MouseMotion { delta } = event { self.delta.x = delta.0 as f32; self.delta.y = delta.1 as f32; } } } pub fn get_position(&self) -> UVec2 { UVec2::new( (self.position.x / self.scale_factor.x) as u32, (self.position.y / self.scale_factor.y) as u32, ) } pub fn frame_finished(&mut self) { self.pressed.clear(); self.delta = Vec2::ZERO; } } ``` === assets::build.rs ```pretty-rs // build.rs #![feature(let_chains)] use image::io::Reader as ImageReader; use std::{ fs::{self, File}, io::{Read, Write}, path::PathBuf, }; fn main() { // SHADERS let compiler = shaderc::Compiler::new().unwrap(); let options = shaderc::CompileOptions::new().unwrap(); let shader_source_paths: Vec<PathBuf> = fs::read_dir("shaders") .unwrap() .filter_map(|entry| { if let Ok(entry) = entry.as_ref() && let Some(extension) = entry.path().extension() && extension == "glsl" { Some(entry.path()) } else { None } }) .collect(); for shader_source_path in &shader_source_paths { println!("cargo:rerun-if-changed={}", shader_source_path.display()); } let shader_output_paths: Vec<PathBuf> = shader_source_paths .iter() .map(|path| { PathBuf::from(format!( "shaders/compiled/{}.spv", path.file_stem().unwrap().to_str().unwrap() )) }) .collect(); std::iter::zip(shader_source_paths, shader_output_paths).for_each(|(input, output)| { let mut file = File::open(&input).unwrap(); let mut buf = Vec::new(); file.read_to_end(&mut buf).unwrap(); let source = String::from_utf8(buf).unwrap(); let kind = match input .file_stem() .unwrap() .to_str() .unwrap() .split(".") .last() .unwrap() { "vert" => shaderc::ShaderKind::Vertex, "frag" => shaderc::ShaderKind::Fragment, "comp" => shaderc::ShaderKind::Compute, shader_type => panic!("Unexpected shader type: {}", shader_type), }; let spirv = compiler .compile_into_spirv( &source, kind, input.file_name().unwrap().to_str().unwrap(), "main", Some(&options), ) .unwrap(); let mut output_file = File::create(output).unwrap(); output_file.write_all(spirv.as_binary_u8()).unwrap(); }); // TEXTURES let texture_source_paths: Vec<PathBuf> = fs::read_dir("textures") .unwrap() .filter_map(|entry| { if let Ok(entry) = entry.as_ref() && let Some(extension) = entry.path().extension() && (extension == "png" || extension == "jpg") { Some(entry.path()) } else { None } }) .collect(); for texture_source_path in &texture_source_paths { println!("cargo:rerun-if-changed={}", texture_source_path.display()); } let texture_output_paths: Vec<PathBuf> = texture_source_paths .iter() .map(|path| { PathBuf::from(format!( "textures/compiled/{}.qoi", path.file_stem().unwrap().to_str().unwrap() )) }) .collect(); std::iter::zip(texture_source_paths, texture_output_paths).for_each(|(input, output)| { let image = ImageReader::open(input).unwrap().decode().unwrap(); let bytes = image.to_rgba8().to_vec(); let encoded = qoi::encode_to_vec(bytes, image.width(), image.height()).unwrap(); let mut output_file = File::create(output).unwrap(); output_file.write_all(&encoded).unwrap(); }); } ``` === systems::interact.rs ```pretty-rs // interact.rs use glam::{IVec2, Quat, UVec2, Vec3}; use crate::{ camera::Camera, components, data::{inventory::Inventory, Data}, entities::Player, input::Keyboard, renderer::{RENDER_HEIGHT, RENDER_WIDTH}, ui::{Element, Rectangle, Region, SizeConstraints}, }; use super::{Named, Positioned}; use std::{ f32::consts::PI, sync::{Arc, Mutex, Weak}, }; pub struct System { interactables: Vec<Weak<Mutex<dyn Interactable>>>, player: Option<Weak<Mutex<Player>>>, } impl System { pub fn new() -> Self { Self { interactables: Vec::new(), player: None, } } pub fn add<T: Interactable + Sized + 'static>(&mut self, interactable: Arc<Mutex<T>>) { self.interactables.push(Arc::downgrade( &(interactable as Arc<Mutex<dyn Interactable>>), )) } pub fn set_player(&mut self, player: Arc<Mutex<Player>>) { self.player = Some(Arc::downgrade(&player)); } pub fn frame_finished( &mut self, camera: &Camera, keyboard: &Keyboard, scene: &mut Vec<Rectangle>, data: &mut Data, ) { if self.player.is_none() || self.player.as_ref().unwrap().upgrade().is_none() { return; } let camera_delta = Quat::from_axis_angle(Vec3::new(0.0, 1.0, 0.0), 2.0 * PI - camera.actual_theta) * (camera.target - camera.actual_target); let player_position = self .player .as_ref() .unwrap() .upgrade() .unwrap() .lock() .unwrap() .get_position(); let mut distances = self .interactables .iter() .enumerate() .filter_map(|(i, interactable)| interactable.upgrade().map(|g| (i, g))) .filter(|(_, interactable)| interactable.lock().unwrap().active()) .map(|(i, interactable)| { ( i, (interactable.lock().unwrap().get_position() - player_position).length(), ) }) .collect::<Vec<(usize, f32)>>(); distances.sort_by(|(_, a), (_, b)| a.total_cmp(&b)); let Some(closest) = distances.first() else { return; }; if closest.1 < 50.0 { let interactable = self.interactables[closest.0].upgrade().unwrap(); let mut widget = components::interact::Component::new(&interactable.lock().unwrap().get_name()); let size = widget.layout(SizeConstraints { min: UVec2::new(0, 0), max: UVec2::new(RENDER_WIDTH, RENDER_HEIGHT), }); let origin = IVec2::new(250, 145) + IVec2::new( camera_delta.x as i32, (camera_delta.z * 2.0_f32.powf(-0.5)) as i32, ); widget.paint( Region { origin: UVec2::new(origin.x as u32, origin.y as u32), size, }, scene, ); if keyboard.is_key_pressed(winit::event::VirtualKeyCode::F) { interactable.lock().unwrap().interact(data); } } } } pub trait Interactable: Named + Positioned { fn interact(&mut self, data: &mut Data); fn active(&self) -> bool; } ``` === scenes::root.rs ```pretty-rs // root.rs use std::{ ops::Deref, sync::{Arc, Mutex}, }; use ash::vk; use assets::{ModelRegistry, Transform}; use glam::{Quat, Vec2, Vec3}; use crate::{ camera::Camera, entities::{CraftingBench, Furnace, Grass, Player, Sun}, input::{Keyboard, Mouse}, renderer::Renderer, socket::Socket, systems::{render::Light, Systems}, time::Time, }; use super::{Fireflies, Ores, Trees}; pub struct RootScene { pub player: Arc<Mutex<Player>>, pub sun: Arc<Mutex<Sun>>, pub grass: Arc<Mutex<Grass>>, pub trees: Trees, pub fireflies: Fireflies, pub furnace: Arc<Mutex<Furnace>>, pub crafting_bench: Arc<Mutex<CraftingBench>>, pub ores: Ores, } impl RootScene { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, ) -> Result<Self, vk::Result> { let player = { let transform = Transform { translation: Vec3::new(0.0, 10.0, 0.0), rotation: Quat::IDENTITY, scale: Vec3::ONE, }; Player::new(renderer, systems, model_registry, transform).unwrap() }; let sun = Sun::new( systems, Vec3::new(0.0, 1000000.0, 0.0), Vec3::new(0.8, 1.0, 0.5), ); let grass = Grass::new(renderer, systems, model_registry, Transform::IDENTITY).unwrap(); let trees = Trees::new(renderer, systems, model_registry)?; let fireflies = Fireflies::new(renderer, systems, model_registry)?; let furnace = Furnace::new( renderer, systems, model_registry, Transform { translation: Vec3::new(100.0, 0.0, 100.0), scale: Vec3::new(0.2, 0.2, 0.2), ..Default::default() }, )?; let ores = Ores::new(renderer, systems, model_registry)?; let crafting_bench = CraftingBench::new( renderer, systems, model_registry, Transform { translation: Vec3::new(100.0, 0.0, 30.0), rotation: Quat::IDENTITY, scale: Vec3::new(0.1, 0.1, 0.1), }, )?; Ok(Self { player, sun, grass, trees, fireflies, furnace, crafting_bench, ores, }) } pub fn frame_finished( &mut self, keyboard: &Keyboard, mouse: &Mouse, camera: &Camera, time: &Time, viewport: Vec2, socket: &Socket, ) { self.player .lock() .unwrap() .frame_finished(keyboard, mouse, camera, time, viewport, socket); self.sun.lock().unwrap().frame_finished(time); self.fireflies.iter_mut().for_each(|firefly| { firefly .lock() .unwrap() .frame_finished(&self.sun.lock().unwrap(), time) }); } } ``` === vulkan::context.rs ```pretty-rs // context.rs use super::{allocator::Allocator, command, Device, Instance, Surface, Swapchain}; use ash::{vk, Entry}; use std::sync::{Arc, Mutex}; pub struct Context { pub instance: Instance, pub surface: Surface, pub device: Arc<Device>, pub swapchain: Swapchain, pub command_pool: command::Pool, pub image_available: vk::Semaphore, pub allocator: Arc<Mutex<Allocator>>, } impl Context { pub fn new(window: &winit::window::Window) -> Self { let entry = Entry::linked(); let instance = Instance::new(&entry).expect("Vulkan instance creation failed"); let surface = Surface::new(&instance, window).expect("Vulkan surface creation failed"); let device = unsafe { Arc::new(Device::new(&instance, &surface).expect("Vulkan device creation failed")) }; let swapchain = Swapchain::new(&instance, &surface, &device, window) .expect("Vulkan swapchain creation failed"); let command_pool = command::Pool::new(device.clone()).unwrap(); let semaphore_info = vk::SemaphoreCreateInfo::builder(); let image_available = unsafe { device.create_semaphore(&semaphore_info, None).unwrap() }; let allocator = Allocator::new(&instance, device.clone()).unwrap(); Self { instance, surface, device, swapchain, command_pool, image_available, allocator: Arc::new(Mutex::new(allocator)), } } pub unsafe fn start_frame(&mut self, in_flight: vk::Fence) -> Result<u32, vk::Result> { unsafe { let image_index = self .device .extensions .swapchain .as_ref() .unwrap() .acquire_next_image( self.swapchain.swapchain, u64::MAX, self.image_available, vk::Fence::null(), )? .0; self.device.reset_fences(&[in_flight]).unwrap(); self.allocator.lock().unwrap().flush_frees(); Ok(image_index) } } pub unsafe fn end_frame( &self, image_index: u32, render_finished: vk::Semaphore, ) -> Result<(), vk::Result> { unsafe { let signal_semaphores = &[render_finished]; let swapchains = &[self.swapchain.swapchain]; let image_indices = &[image_index]; let present_info = vk::PresentInfoKHR::builder() .wait_semaphores(signal_semaphores) .swapchains(swapchains) .image_indices(image_indices); self.device .extensions .swapchain .as_ref() .unwrap() .queue_present(self.device.queues.present.queue, &present_info)?; } Ok(()) } } ``` === entities::crafting_bench.rs ```pretty-rs // crafting_bench.rs use crate::{ data::{Data, Recipe}, renderer::Renderer, systems::{ interact::Interactable, render::{RenderObject, Renderable}, Named, Positioned, Systems, }, }; use ash::vk; use assets::{ModelRegistry, Transform}; use common::item::{Item, ItemStack}; use glam::Vec3; use std::sync::{Arc, Mutex}; pub struct CraftingBench { render: RenderObject, } impl CraftingBench { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, transform: Transform, ) -> Result<Arc<Mutex<Self>>, vk::Result> { let render = RenderObject { model: model_registry.load("crafting_bench.glb"), transform, }; let bench = Arc::new(Mutex::new(Self { render })); systems.render.add(bench.clone()); systems.interact.add(bench.clone()); Ok(bench) } } impl Renderable for CraftingBench { fn get_objects(&self) -> Vec<RenderObject> { vec![self.render.clone()] } } impl Named for CraftingBench { fn get_name(&self) -> String { "Crafting Bench".to_owned() } } impl Positioned for CraftingBench { fn get_position(&self) -> Vec3 { self.render.transform.translation } } impl Interactable for CraftingBench { fn active(&self) -> bool { true } fn interact(&mut self, data: &mut Data) { data.recipe_selections = Some(vec![ Recipe { ingredients: vec![ ItemStack { item: Item::Wood, amount: 3, }, ItemStack { item: Item::Fireglow, amount: 2, }, ], outputs: vec![ItemStack { item: Item::Lamp, amount: 1, }], }, Recipe { ingredients: vec![ ItemStack { item: Item::Wood, amount: 2, }, ItemStack { item: Item::CopperIngot, amount: 2, }, ], outputs: vec![ItemStack { item: Item::CopperSword, amount: 1, }], }, ]) } } ``` === data::inventory.rs ```pretty-rs // inventory.rs use common::{ item::{Item, ItemStack}, net, }; use std::sync::Arc; use tracing::warn; use crate::socket::Socket; #[derive(Clone)] pub struct Inventory { inventory: Vec<ItemStack>, socket: Arc<Socket>, } impl Inventory { pub fn new(socket: Arc<Socket>) -> Self { Self { inventory: Vec::new(), socket, } } fn update(&self, item: Item) { let Some(stack) = self.inventory.iter().find(|s| s.item == item) else { warn!("Tried to update stack {:?} that doesn't exist", item); return; }; let packet = net::server::Packet::ModifyInventory(net::server::ModifyInventory { stack: stack.clone(), }); if let Err(e) = self.socket.send(&packet) { warn!("Failed to update stack {:?} due to {}", item, e); return; } } pub fn add(&mut self, stack: ItemStack) { if let Some(existing) = self.inventory.iter_mut().find(|s| s.item == stack.item) { existing.amount += stack.amount; } else { self.inventory.push(stack); } self.update(stack.item); } pub fn remove(&mut self, stack: ItemStack) { if let Some((i, existing)) = self .inventory .iter_mut() .enumerate() .find(|(_, s)| s.item == stack.item) { if existing.amount < stack.amount { warn!( "Removing {} from inventory would give negative items", stack ); return; } existing.amount -= stack.amount; if existing.amount == 0 { self.inventory.remove(i); } } else { warn!("Tried to remove {} but no such stack existed", stack); } self.update(stack.item); } pub fn set(&mut self, stack: ItemStack) { if let Some(existing) = self.inventory.iter_mut().find(|s| s.item == stack.item) { existing.amount = stack.amount; } else { self.inventory.push(stack); } self.update(stack.item); } pub fn get_items(&self) -> &[ItemStack] { &self.inventory } } ``` === entities::furnace.rs ```pretty-rs // furnace.rs use crate::{ data::{Data, Recipe}, renderer::Renderer, systems::{ interact::Interactable, render::{Emissive, Light, RenderObject, Renderable}, Named, Positioned, Systems, }, }; use ash::vk; use assets::{ModelRegistry, Transform}; use common::item::{Item, ItemStack}; use glam::Vec3; use std::sync::{Arc, Mutex}; pub struct Furnace { render: RenderObject, light: Light, } impl Furnace { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, transform: Transform, ) -> Result<Arc<Mutex<Self>>, vk::Result> { let render = RenderObject { model: model_registry.load("furnace.glb"), transform: transform.clone(), }; let light = Light::new( transform.translation + Vec3::new(0.0, 20.0, -10.0), 4000.0, Vec3::new(0.976, 0.451, 0.086), ); let furnace = Arc::new(Mutex::new(Self { render, light })); systems.render.add(furnace.clone()); systems.render.add_light(furnace.clone()); systems.interact.add(furnace.clone()); Ok(furnace) } } impl Renderable for Furnace { fn get_objects(&self) -> Vec<RenderObject> { vec![self.render.clone()] } } impl Named for Furnace { fn get_name(&self) -> String { "Furnace".to_owned() } } impl Positioned for Furnace { fn get_position(&self) -> Vec3 { self.render.transform.translation } } impl Interactable for Furnace { fn interact(&mut self, data: &mut Data) { data.current_recipe = Some(Recipe { ingredients: vec![ItemStack { item: Item::CopperOre, amount: 3, }], outputs: vec![ItemStack { item: Item::CopperIngot, amount: 1, }], }) } fn active(&self) -> bool { true } } impl Emissive for Furnace { fn get_lights(&self, _: &Data) -> Vec<Light> { vec![self.light] } } ``` === common::net.rs ```pretty-rs // net.rs mod common { use crate::item::ItemStack; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ModifyInventory { pub stack: ItemStack, } } pub mod server { pub use super::common::ModifyInventory; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Login { pub username: String, pub password: String, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Move { pub position: glam::Vec3, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Signup { pub username: String, pub password: String, } #[derive(Serialize, Deserialize, Debug, Clone)] pub enum Packet { Login(Login), Move(Move), Heartbeat, Disconnect, ModifyInventory(ModifyInventory), Signup(Signup), } } pub mod client { pub use super::common::ModifyInventory; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct SpawnPlayer { pub username: String, pub position: glam::Vec3, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct DespawnPlayer { pub username: String, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Move { pub username: String, pub position: glam::Vec3, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct NotifyDisconnection { pub reason: String, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct DisplayError { pub message: String, pub fatal: bool, } #[derive(Serialize, Deserialize, Debug, Clone)] pub enum Packet { SpawnPlayer(SpawnPlayer), DespawnPlayer(DespawnPlayer), Move(Move), NotifyDisconnection(NotifyDisconnection), ModifyInventory(ModifyInventory), DisplayError(DisplayError), } } ``` === aetheria::camera.rs ```pretty-rs // camera.rs use std::f32::EPSILON; use ash::vk; use bytemuck::{cast_slice, cast_slice_mut}; use glam::{Mat4, Quat, Vec3}; use vulkan::Buffer; use crate::renderer::Renderer; pub struct Camera { pub target: Vec3, pub actual_target: Vec3, pub theta: f32, pub actual_theta: f32, pub width: f32, pub height: f32, pub buffer: Buffer, } impl Camera { const DAMPING: f32 = 0.2; pub fn new(width: f32, height: f32, renderer: &Renderer) -> Result<Self, vk::Result> { //let theta = -45.01_f32.to_radians(); let theta = 0.0; let target = Vec3::new(0.0, 0.0, 0.0); let camera = Self { theta, actual_theta: theta, target, actual_target: target, width, height, buffer: Buffer::new(&renderer, [0_u8; 32], vk::BufferUsageFlags::UNIFORM_BUFFER)?, }; Ok(camera) } fn pad_vec3(data: Vec3) -> [f32; 4] { [data.x, data.y, data.z, 0.0] } pub fn update_buffer(&mut self) { let mut eye = Quat::from_axis_angle(Vec3::new(0.0, 1.0, 0.0), self.actual_theta) * Vec3::new(0.0, 500.0 * 2.0_f32.powf(-0.5), -500.0); eye += self.actual_target; let vp = [Self::pad_vec3(eye), Self::pad_vec3(self.actual_target)] .iter() .flatten() .copied() .collect::<Vec<f32>>(); let vp = cast_slice::<f32, u8>(&vp); self.buffer.upload(vp); } pub fn frame_finished(&mut self) { if (self.actual_theta - self.theta).abs() > EPSILON { self.actual_theta += (self.theta - self.actual_theta) * Self::DAMPING; } if (self.actual_target - self.target).length() > EPSILON { self.actual_target += (self.target - self.actual_target) * Self::DAMPING; } self.update_buffer(); } pub fn get_rotation(&self) -> Quat { Quat::from_axis_angle(Vec3::new(0.0, 1.0, 0.0), self.theta) } } ``` === entities::copper_ore.rs ```pretty-rs // copper_ore.rs use crate::{ data::Data, renderer::Renderer, systems::{ interact::Interactable, render::{RenderObject, Renderable}, Named, Positioned, Systems, }, }; use ash::vk; use assets::{ModelRegistry, Transform}; use common::item::{Item, ItemStack}; use glam::Vec3; use std::sync::{Arc, Mutex}; pub struct CopperOre { render: RenderObject, gathered: bool, } impl CopperOre { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, transform: Transform, ) -> Result<Arc<Mutex<Self>>, vk::Result> { let render = RenderObject { model: model_registry.load("copper_ore.glb"), transform, }; let ore = Arc::new(Mutex::new(Self { render, gathered: false, })); systems.render.add(ore.clone()); systems.interact.add(ore.clone()); Ok(ore) } } impl Renderable for CopperOre { fn get_objects(&self) -> Vec<RenderObject> { if self.gathered { Vec::new() } else { vec![self.render.clone()] } } } impl Named for CopperOre { fn get_name(&self) -> String { "Copper Ore".to_owned() } } impl Positioned for CopperOre { fn get_position(&self) -> Vec3 { self.render.transform.translation.clone() } } impl Interactable for CopperOre { fn active(&self) -> bool { !self.gathered } fn interact(&mut self, data: &mut Data) { data.inventory.add(ItemStack { item: Item::CopperOre, amount: 1, }); self.gathered = true; } } ``` === entities::tree.rs ```pretty-rs // tree.rs use std::sync::{Arc, Mutex}; use ash::vk; use assets::{ModelRegistry, Transform}; use glam::Vec3; use crate::{ data::{inventory::Inventory, Data}, renderer::Renderer, systems::{ interact::Interactable, render::{RenderObject, Renderable}, Named, Positioned, Systems, }, }; use common::item::{Item, ItemStack}; pub struct Tree { pub tree: RenderObject, gathered: bool, } impl Tree { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, transform: Transform, ) -> Result<Arc<Mutex<Tree>>, vk::Result> { let tree = RenderObject { model: model_registry.load("tree.glb"), transform, }; let tree = Arc::new(Mutex::new(Self { tree, gathered: false, })); systems.render.add(tree.clone()); systems.interact.add(tree.clone()); Ok(tree) } } impl Renderable for Tree { fn get_objects(&self) -> Vec<RenderObject> { if !self.gathered { vec![self.tree.clone()] } else { Vec::new() } } } impl Named for Tree { fn get_name(&self) -> String { "Tree".to_owned() } } impl Positioned for Tree { fn get_position(&self) -> Vec3 { self.tree.transform.translation } } impl Interactable for Tree { fn interact(&mut self, data: &mut Data) { data.inventory.add(ItemStack { item: Item::Wood, amount: 1, }); self.gathered = true; } fn active(&self) -> bool { !self.gathered } } ``` === vulkan::buffer.rs ```pretty-rs // buffer.rs use super::{ allocator::{Allocation, Allocator}, Context, }; use ash::vk::{self, MemoryPropertyFlags}; use std::sync::{Arc, Mutex}; use std::{ ops::{Deref, Drop}, result::Result, }; pub struct Buffer { pub(crate) buffer: vk::Buffer, pub(crate) allocation: Allocation, pub size: usize, allocator: Arc<Mutex<Allocator>>, } impl Buffer { pub fn new<T: Into<Vec<u8>>>( ctx: &Context, data: T, usage: vk::BufferUsageFlags, ) -> Result<Self, vk::Result> { let bytes: Vec<u8> = data.into(); let create_info = vk::BufferCreateInfo::builder() .size(bytes.len() as u64) .usage(usage); let (buffer, allocation) = ctx.allocator.lock().unwrap().create_buffer( &create_info, MemoryPropertyFlags::DEVICE_LOCAL | MemoryPropertyFlags::HOST_VISIBLE | MemoryPropertyFlags::HOST_COHERENT, )?; ctx.allocator.lock().unwrap().write(&allocation, &bytes)?; Ok(Self { buffer, allocation, size: bytes.len(), allocator: ctx.allocator.clone(), }) } pub fn upload(&self, bytes: &[u8]) { self.allocator .lock() .unwrap() .write(&self.allocation, bytes) .expect("Failed to write to buffer"); } } impl Deref for Buffer { type Target = vk::Buffer; fn deref(&self) -> &Self::Target { &self.buffer } } impl Drop for Buffer { fn drop(&mut self) { self.allocator.lock().unwrap().free(&self.allocation); } } ``` === vulkan::surface.rs ```pretty-rs // surface.rs use super::Instance; use ash::vk; use std::{ffi::c_void, ops::Deref, result::Result}; use winit::window::Window; #[cfg(target_os = "linux")] use winit::platform::x11::WindowExtX11; #[cfg(target_os = "windows")] use winit::platform::windows::WindowExtWindows; pub struct Surface { pub(crate) surface: vk::SurfaceKHR, } impl Surface { #[cfg(target_os = "linux")] pub fn new(instance: &Instance, window: &Window) -> Result<Self, vk::Result> { let create_info = vk::XlibSurfaceCreateInfoKHR::builder() .dpy(window.xlib_display().unwrap().cast::<*const c_void>()) .window(window.xlib_window().unwrap()); let surface = unsafe { instance .extensions .xlib_surface .as_ref() .unwrap() .create_xlib_surface(&create_info, None)? }; Ok(Self { surface }) } #[cfg(target_os = "windows")] pub fn new(instance: &Instance, window: &Window) -> Result<Self, vk::Result> { let create_info = vk::Win32SurfaceCreateInfoKHR::builder() .hinstance(window.hinstance() as *const c_void) .hwnd(window.hwnd() as *const c_void); let surface = unsafe { instance .extensions .win32_surface .as_ref() .unwrap() .create_win32_surface(&create_info, None)? }; Ok(Self { surface }) } } impl Deref for Surface { type Target = vk::SurfaceKHR; fn deref(&self) -> &Self::Target { &self.surface } } ``` === entities::sun.rs ```pretty-rs // sun.rs use std::{ f32::consts::PI, ops::Deref, sync::{Arc, Mutex}, time::SystemTime, }; use glam::{Quat, Vec3}; use crate::{ data::Data, systems::{ render::{Emissive, Light}, Systems, }, time::Time, }; pub struct Sun { noon_pos: Vec3, pub light: Light, theta: f32, } impl Sun { pub fn new(systems: &mut Systems, noon_pos: Vec3, color: Vec3) -> Arc<Mutex<Self>> { let seconds = SystemTime::UNIX_EPOCH.elapsed().unwrap().as_secs(); let mut sun = Self { noon_pos, light: Light::new(noon_pos, 0.0, color), theta: (seconds % 120) as f32 * (PI / 60.0), }; sun.update_theta(sun.theta); let sun = Arc::new(Mutex::new(sun)); systems.render.add_light(sun.clone()); sun } pub fn update_theta(&mut self, theta: f32) { self.theta = theta % (std::f32::consts::PI * 2.0); self.light.position = Quat::from_axis_angle(Vec3::new(0.0, 0.0, 1.0), self.theta) * self.noon_pos; self.light.color = Vec3::new(1.0, 1.0, 1.0); self.light.strength = self.light.position.length().powf(2.0) * 1.5 * self.theta.cos().powf(0.13).max(0.0); self.light.strength = self.light.strength.max(0.0); } pub fn frame_finished(&mut self, time: &Time) { self.update_theta(self.theta + (time.delta_seconds() * (PI / 60.0))); } pub fn get_theta(&self) -> f32 { self.theta } } impl Emissive for Sun { fn get_lights(&self, _: &Data) -> Vec<Light> { vec![self.light] } } ``` === components::recipe_selector.rs ```pretty-rs // recipe_selector.rs use super::components::{Button, Container, HAlign, Handler, Padding, VList}; use crate::{ data::{Data, Recipe}, input::Mouse, ui, }; use std::sync::{Arc, Mutex}; pub type Component<'a> = Container<Padding<VList<Button<'a, RecipeSelectorHandler<'a>>>>>; pub struct RecipeSelectorHandler<'a> { recipe: Recipe, data: Arc<Mutex<&'a mut Data>>, } impl Handler for RecipeSelectorHandler<'_> { fn handle(&mut self) { self.data.lock().unwrap().current_recipe = Some(self.recipe.clone()); self.data.lock().unwrap().recipe_selections = None; } } impl<'a> Component<'a> { pub fn new(data: &'a mut Data, mouse: &'a Mouse) -> Option<Self> { let recipes = data.recipe_selections.as_ref()?.clone(); let data_mutex = Arc::new(Mutex::new(data)); let buttons = recipes .iter() .map(|recipe| { let handler = RecipeSelectorHandler { recipe: recipe.clone(), data: data_mutex.clone(), }; Button::new(mouse, &format!("{}", recipe.outputs[0]), handler) }) .collect(); Some(Self { child: Padding::new_uniform( VList { children: buttons, separation: 2, align: HAlign::Left, }, 2, ), color: ui::color::get_background(), border_radius: 1, border_color: ui::color::get_highlight(), }) } } ``` === scenes::ores.rs ```pretty-rs // ores.rs use std::{ f32::consts::PI, ops::{Deref, DerefMut}, sync::{Arc, Mutex}, }; use crate::{entities::CopperOre, renderer::Renderer, systems::Systems}; use ash::vk; use assets::{ModelRegistry, Transform}; use glam::{Quat, Vec3}; use rand::Rng; const NUM_ORES: u32 = 10; pub struct Ores { trees: Vec<Arc<Mutex<CopperOre>>>, } impl Ores { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, ) -> Result<Self, vk::Result> { let mut trees = Vec::new(); let mut rng = rand::thread_rng(); for _ in 0..NUM_ORES { let translation = Vec3::new( rng.gen_range(-400.0..400.0), 0.0, rng.gen_range(-400.0..400.0), ); let rotation = Quat::from_axis_angle(Vec3::new(0.0, 1.0, 0.0), rng.gen_range(-PI..PI)); let transform = Transform { translation, rotation, scale: Vec3::new(0.1, 0.1, 0.1), }; trees.push(CopperOre::new(renderer, systems, model_registry, transform).unwrap()); } Ok(Self { trees }) } } impl Deref for Ores { type Target = Vec<Arc<Mutex<CopperOre>>>; fn deref(&self) -> &Self::Target { &self.trees } } impl DerefMut for Ores { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.trees } } ``` === scenes::trees.rs ```pretty-rs // trees.rs use std::{ f32::consts::PI, ops::{Deref, DerefMut}, sync::{Arc, Mutex}, }; use ash::vk; use assets::{ModelRegistry, Transform}; use glam::{Quat, Vec3}; use rand::Rng; use crate::{entities::Tree, renderer::Renderer, systems::Systems}; const NUM_TREES: u32 = 10; pub struct Trees { trees: Vec<Arc<Mutex<Tree>>>, } impl Trees { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, ) -> Result<Self, vk::Result> { let mut trees = Vec::new(); let mut rng = rand::thread_rng(); for _ in 0..NUM_TREES { let translation = Vec3::new( rng.gen_range(-400.0..400.0), 0.0, rng.gen_range(-400.0..400.0), ); let rotation = Quat::from_axis_angle(Vec3::new(0.0, 1.0, 0.0), rng.gen_range(-PI..PI)); let transform = Transform { translation, rotation, scale: Vec3::new(0.1, 0.1, 0.1), }; trees.push(Tree::new(renderer, systems, model_registry, transform).unwrap()); } Ok(Self { trees }) } } impl Deref for Trees { type Target = Vec<Arc<Mutex<Tree>>>; fn deref(&self) -> &Self::Target { &self.trees } } impl DerefMut for Trees { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.trees } } ``` === vulkan::compute.rs ```pretty-rs // compute.rs use ash::vk::{self, DescriptorSetLayout}; use crate::{Device, SetLayout, Shader}; use std::{ops::Deref, sync::Arc}; #[derive(Clone)] pub struct Pipeline { shader: Arc<Shader>, pub(crate) layout: vk::PipelineLayout, pipeline: vk::Pipeline, } impl Pipeline { pub fn new( device: &Device, shader: Arc<Shader>, layouts: &[SetLayout], ) -> Result<Self, vk::Result> { let stage = shader.get_stage(); let descriptors = layouts .iter() .map(|layout| layout.layout) .collect::<Vec<DescriptorSetLayout>>(); println!("Descriptors: {}", descriptors.len()); let layout_info = vk::PipelineLayoutCreateInfo::builder().set_layouts(&descriptors); let layout = unsafe { device.create_pipeline_layout(&layout_info, None)? }; let pipeline_info = vk::ComputePipelineCreateInfo::builder() .stage(*stage) .layout(layout); let pipeline = unsafe { device .create_compute_pipelines(vk::PipelineCache::null(), &[*pipeline_info], None) .expect("Failed to create compute pipeline")[0] }; Ok(Self { shader, layout, pipeline, }) } } impl Deref for Pipeline { type Target = vk::Pipeline; fn deref(&self) -> &Self::Target { &self.pipeline } } ``` === scenes::fireflies.rs ```pretty-rs // fireflies.rs use std::{ ops::{Deref, DerefMut}, sync::{Arc, Mutex}, }; use ash::vk; use assets::{ModelRegistry, Transform}; use glam::Vec3; use rand::Rng; use crate::{entities::Firefly, renderer::Renderer, systems::Systems}; const NUM_FIREFLIES: u32 = 10; pub struct Fireflies { fireflies: Vec<Arc<Mutex<Firefly>>>, } impl Fireflies { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, ) -> Result<Self, vk::Result> { let mut fireflies = Vec::new(); let mut rng = rand::thread_rng(); for _ in 0..NUM_FIREFLIES { let position = Vec3::new( rng.gen_range(-400.0..400.0), 50.0, rng.gen_range(-400.0..400.0), ); fireflies.push( Firefly::new( renderer, systems, model_registry, position, Vec3::new(1.0, 1.0, 1.0), ) .unwrap(), ); } Ok(Self { fireflies }) } } impl Deref for Fireflies { type Target = Vec<Arc<Mutex<Firefly>>>; fn deref(&self) -> &Self::Target { &self.fireflies } } impl DerefMut for Fireflies { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.fireflies } } ``` === aetheria::time.rs ```pretty-rs // time.rs use ash::vk; use bytemuck::cast_slice; use std::time::Instant; use tracing::info; use vulkan::Buffer; use crate::renderer::Renderer; pub struct Time { last_frame: Instant, current_frame: Instant, pub time: f32, pub buffer: Buffer, } impl Time { pub fn new(renderer: &Renderer) -> Result<Self, vk::Result> { info!("Starting frame timer"); let time = Self { last_frame: Instant::now(), current_frame: Instant::now(), time: 0.0, buffer: Buffer::new(renderer, [0_u8; 8], vk::BufferUsageFlags::UNIFORM_BUFFER)?, }; Ok(time) } pub fn delta_seconds(&self) -> f32 { (self.current_frame - self.last_frame).as_secs_f32() } fn update_buffer(&mut self) { let delta = self.delta_seconds(); let data = &[self.time, delta]; let data = cast_slice::<f32, u8>(data); self.buffer.upload(data); } pub fn frame_finished(&mut self) { let delta = self.delta_seconds(); self.time += delta; println!("FPS: {}", 1.0 / self.delta_seconds()); self.last_frame = self.current_frame; self.current_frame = Instant::now(); self.update_buffer(); } } ``` === components::interact.rs ```pretty-rs // interact.rs use crate::ui::{self, Element}; use super::components::*; use glam::Vec4; pub type Component = Container<Padding<HPair<Container<Padding<Text>>, Text>>>; impl Component { pub fn new(name: &str) -> Self { let f = Text { color: ui::color::get_highlight(), content: "F".to_owned(), }; let padded_f = Padding { child: f, top: 1, bottom: 1, left: 1, right: 0, }; let left = Container { child: padded_f, color: ui::color::get_background(), border_color: ui::color::get_highlight(), border_radius: 1, }; let right = Text { color: ui::color::get_highlight(), content: name.to_owned(), }; let hpair = HPair::new(left, right, VAlign::Center, 2); let padding = Padding { child: hpair, top: 2, bottom: 2, left: 2, right: 2, }; Container { child: padding, border_radius: 1, border_color: ui::color::get_highlight(), color: ui::color::get_background(), } .into() } } ``` === vulkan::lib.rs ```pretty-rs // lib.rs #![feature(once_cell_try)] pub mod instance; pub use instance::Instance; pub mod buffer; pub use buffer::Buffer; pub mod command; pub use command::DrawOptions; pub mod context; pub use context::Context; pub mod descriptor; pub use descriptor::*; pub mod device; pub use device::Device; pub mod image; pub use image::{Image, Texture}; pub mod graphics; pub use graphics::{Pipeline, Shader, Shaders, VertexInputBuilder}; pub mod renderpass; pub use renderpass::Renderpass; pub mod surface; pub use surface::Surface; pub mod swapchain; pub use swapchain::Swapchain; pub mod compute; pub mod allocator; use cstr::cstr; use std::{clone::Clone, cmp::Eq, collections::HashSet, ffi::CStr, hash::Hash}; #[cfg(debug_assertions)] fn get_wanted_layers() -> Vec<&'static CStr> { vec![cstr!("VK_LAYER_KHRONOS_validation")] } #[cfg(not(debug_assertions))] fn get_wanted_layers() -> Vec<&'static CStr> { vec![] } fn intersection<T: Hash + Clone + Eq>(a: &[T], b: &[T]) -> Vec<T> { let a_unique: HashSet<T> = a.iter().cloned().collect(); let b_unique: HashSet<T> = b.iter().cloned().collect(); a_unique.intersection(&b_unique).cloned().collect() } ``` === common::item.rs ```pretty-rs // item.rs use num_derive::{FromPrimitive, ToPrimitive}; use serde::{Deserialize, Serialize}; use std::fmt::Display; #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq, Eq, FromPrimitive, ToPrimitive)] pub enum Item { Wood, Fireglow, Lamp, CopperOre, CopperIngot, CopperSword, } impl Display for Item { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "{}", match self { Self::Wood => "Wood", Self::Fireglow => "Fireglow", Self::Lamp => "Lamp", Self::CopperOre => "Copper Ore", Self::CopperIngot => "Copper Ingot", Self::CopperSword => "Copper Sword", } ) } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq, Eq)] pub struct ItemStack { pub item: Item, pub amount: u32, } impl Display for ItemStack { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{} x{}", self.item, self.amount) } } ``` === common::lib.rs ```pretty-rs // lib.rs pub mod item; pub mod net; use std::ops::Deref; pub trait Observer<T> { fn notify(&self, old: &T, new: &T); } pub struct Observable<T: Clone> { inner: T, observers: Vec<Box<dyn Observer<T>>>, } impl<T: Clone> Observable<T> { pub fn new(inner: T) -> Self { Self { inner, observers: Vec::new(), } } pub fn register(&mut self, observer: Box<dyn Observer<T>>) { self.observers.push(observer) } pub fn run<F: FnOnce(&mut T)>(&mut self, predicate: F) { let old = self.inner.clone(); predicate(&mut self.inner); self.observers .iter() .for_each(|observer| observer.notify(&old, &self.inner)) } // Chose to do this instead of DerefMut to be more verbose about the fact observers won't be // triggered pub fn run_silent<F: FnOnce(&mut T)>(&mut self, predicate: F) { predicate(&mut self.inner); } } impl<T: Clone> Deref for Observable<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.inner } } ``` === components::inventory.rs ```pretty-rs // inventory.rs use glam::{UVec2, Vec4}; use super::components::{Container, HAlign, Padding, Text, VList}; use crate::{ data::inventory::Inventory, ui::{self, Element, Rectangle, Region, SizeConstraints}, }; pub type Component = Container<Padding<VList<Text>>>; impl Component { pub fn new(inventory: &Inventory) -> Self { let text = inventory .get_items() .iter() .map(|stack| Text { color: ui::color::get_highlight(), content: format!("{}", stack), }) .collect::<Vec<Text>>(); let vlist = VList { children: text, separation: 3, align: HAlign::Left, }; let padding = Padding::new_uniform(vlist, 2); Self { child: padding, color: ui::color::get_background(), border_radius: 1, border_color: ui::color::get_highlight(), } } } ``` === aetheria::socket.rs ```pretty-rs // socket.rs use common::net; use std::{ net::UdpSocket, ops::{Deref, DerefMut}, }; #[derive(thiserror::Error, Debug)] pub enum PacketSendError { #[error("Error sending packet")] IOError(#[from] std::io::Error), #[error("Error encoding packet")] PostcardError(#[from] postcard::Error), } pub struct Socket { inner: UdpSocket, } impl Socket { pub fn send(&self, packet: &net::server::Packet) -> Result<(), PacketSendError> { let bytes = postcard::to_stdvec(packet)?; self.inner.send(&bytes)?; Ok(()) } } impl Deref for Socket { type Target = UdpSocket; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for Socket { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl From<UdpSocket> for Socket { fn from(value: UdpSocket) -> Self { Self { inner: value } } } ``` === entities::grass.rs ```pretty-rs // grass.rs use std::sync::{Arc, Mutex}; use ash::vk; use assets::{ModelRegistry, Transform}; use glam::Vec3; use crate::{ renderer::Renderer, systems::{ render::{RenderObject, Renderable}, Systems, }, }; pub struct Grass { pub grass: RenderObject, } impl Grass { pub fn new( renderer: &mut Renderer, systems: &mut Systems, model_registry: &mut ModelRegistry, transform: Transform, ) -> Result<Arc<Mutex<Self>>, vk::Result> { let grass = RenderObject { model: model_registry.load("grass.glb"), transform, }; let grass = Arc::new(Mutex::new(Self { grass })); systems.render.add(grass.clone()); Ok(grass) } } impl Renderable for Grass { fn get_objects(&self) -> Vec<RenderObject> { vec![self.grass.clone()] } } ``` === data::mod.rs ```pretty-rs // mod.rs use common::item::ItemStack; pub mod inventory; #[derive(Clone, Debug)] pub struct Recipe { pub ingredients: Vec<ItemStack>, pub outputs: Vec<ItemStack>, } impl Recipe { pub fn has_ingredients(&self, inventory: &inventory::Inventory) -> bool { self.ingredients .iter() .map(|ingredient| { ingredient.amount <= inventory .get_items() .iter() .find(|stack| stack.item == ingredient.item) .map(|stack| stack.amount) .unwrap_or(0) }) .all(|x| x) } } pub struct Data { pub inventory: inventory::Inventory, pub current_recipe: Option<Recipe>, pub recipe_selections: Option<Vec<Recipe>>, } ``` === aetheria::Cargo.toml ```toml // Cargo.toml [package] name = "aetheria" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] assets = { path = "../assets" } gltf = { path = "../gltf" } hecs = { path = "../hecs" } vulkan = { path = "../vulkan" } common = { path = "../common" } ash = { version = "0.37.2", features = ["linked"] } winit = "0.28" cstr = "0.2.11" bytemuck = { version = "1.13", features = ["derive"] } tracing = "0.1" tracing-subscriber = "0.3" gpu-allocator = "0.22" glam = { version = "0.24", features = ["bytemuck"] } qoi = "0.4" rand = "0.8.5" anyhow = "1.0.71" num-traits = "0.2.15" postcard = { version = "1.0.6", features = ["use-std"] } thiserror = "1.0.44" dialog = "0.3.0" uuid = { version = "1.4.1", features = ["v4", "fast-rng"] } ``` === arbiter::Cargo.toml ```toml // Cargo.toml [package] name = "arbiter" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] common = { path = "../common" } anyhow = "1.0.71" num-derive = "0.4.0" num-traits = "0.2.15" tracing = "0.1.37" tracing-subscriber = "0.3.17" glam = { version = "0.24.1", features = ["bytemuck"] } bytemuck = "1.13.1" postcard = { version = "1.0.6", features = ["use-std"] } thiserror = "1.0.44" async-std = { version = "1.12.0", features = ["attributes"] } sqlx = { version = "0.7.1", features = ["runtime-async-std", "sqlite"] } ``` === aetheria::macros.rs ```pretty-rs // macros.rs #[repr(C)] // guarantee 'bytes' comes after '_align' pub struct AlignedAs<Align, Bytes: ?Sized> { pub _align: [Align; 0], pub bytes: Bytes, } #[macro_export] macro_rules! include_bytes_align_as { ($align_ty:ty, $path:literal) => {{ // const block expression to encapsulate the static use $crate::macros::AlignedAs; // this assignment is made possible by CoerceUnsized static ALIGNED: &AlignedAs<$align_ty, [u8]> = &AlignedAs { _align: [], bytes: *include_bytes!($path), }; &ALIGNED.bytes }}; } ``` === components::ui.rs ```pretty-rs // ui.rs use glam::UVec2; use crate::ui::{Rectangle, Element, SizeConstraints, Region}; use super::{inventory, craft}; pub struct UI<'a> { pub inventory: bool, pub craft: bool } impl UI<'_> { pub fn new() -> Self { Self { inventory: None, craft: None } } } impl Element for UI { fn layout(&mut self, constraint: SizeConstraints) -> UVec2 { UVec2::new(constraint.max.x, constraint.max.y) } fn paint(&mut self, region: Region, scene: &mut Vec<Rectangle>) { if self.inventory } } ff ff ``` === macros::lib.rs ```pretty-rs // lib.rs use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, DeriveInput}; #[proc_macro_derive(Entity)] pub fn entity_derive(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let gen = quote! { impl Entity for #name {} }; gen.into() } #[proc_macro_derive(Scene)] pub fn scene_derive(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); TokenStream::new() } ``` === systems::ui.rs ```pretty-rs // ui.rs use std::sync::{Weak, Mutex, Arc}; pub struct System { generators: Vec<Weak<Mutex<dyn UIGenerator>>> } impl System { pub fn new() -> Self { Self { generators: Vec::new() } } pub fn add<T: UIGenerator + Sized + 'static>(&mut self, generator: Arc<Mutex<T>>) { self.generators.push(Arc::downgrade( &(generator as Arc<Mutex<dyn UIGenerator>>), )) } } pub trait UIGenerator { fn generate() ff } ff ``` === hecs::lib.rs ```pretty-rs // lib.rs use std::any::{Any, TypeId}; pub use hecs_macros::*; pub trait Scene { fn tick(&mut self); fn load() -> Self; } pub trait Entity: Any {} pub trait System<T: Entity> { fn filter(entity: &dyn Entity) -> bool { println!( "Looking for {:?}, found {:?}", TypeId::of::<T>(), entity.type_id() ); entity.type_id() == TypeId::of::<T>() } fn run(&mut self, entity: &mut T); } ``` === assets::Cargo.toml ```toml // Cargo.toml [package] name = "assets" version = "0.1.0" edition = "2021" [build-dependencies] shaderc = "0.8" image = "0.24" qoi = "0.4" [dependencies] vulkan = { path = "../vulkan" } gltf = { path = "../gltf" } ash = "0.37.2" bytemuck = { version = "1.13", features = ["derive"] } tracing = "0.1" tobj = "4.0.0" glam = { version = "0.24", features = ["bytemuck"] } uuid = { version = "1.4.1", features = ["v4", "fast-rng"] } ``` === common::Cargo.toml ```toml // Cargo.toml [package] name = "common" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] num-derive = "0.4.0" num-traits = "0.2.15" bytemuck = "1.13" glam = { version = "0.24", features = ["serde"] } thiserror = "1.0.44" postcard = "1.0.6" serde = { version = "1.0.180", features = ["derive"] } ``` === vulkan::Cargo.toml ```toml // Cargo.toml [package] name = "vulkan" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] ash = { version="0.37", features = ["linked"] } winit = "0.28" cstr = "0.2.11" bytemuck = "1.13" tracing = "0.1" gpu-allocator = "0.22" glam = { version = "0.23", features = ["bytemuck"] } qoi = "0.4" ``` === entities::mod.rs ```pretty-rs // mod.rs mod firefly; pub use firefly::Firefly; mod grass; pub use grass::Grass; mod player; pub use player::Player; mod sun; pub use sun::Sun; mod tree; pub use tree::Tree; mod furnace; pub use furnace::Furnace; mod crafting_bench; pub use crafting_bench::CraftingBench; mod copper_ore; pub use copper_ore::CopperOre; ``` === systems::mod.rs ```pretty-rs // mod.rs use glam::Vec3; pub mod interact; pub mod render; pub struct Systems<'a> { pub interact: &'a mut interact::System, pub render: &'a mut render::System, } pub trait Named { fn get_name(&self) -> String; } pub trait Positioned { fn get_position(&self) -> Vec3; } ``` === gltf::Cargo.toml ```toml // Cargo.toml [package] name = "gltf" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" bytemuck = "1.13" serde_repr = "0.1" ``` === macros::Cargo.toml ```toml // Cargo.toml [package] name = "hecs-macros" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] syn = "1.0" quote = "1.0" [lib] proc-macro = true ``` === hecs::Cargo.toml ```toml // Cargo.toml [package] name = "hecs" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] hecs-macros = { path = "macros" } ``` === ::Cargo.toml ```toml // Cargo.toml [workspace] default-members = ["aetheria"] members = [ "assets", "aetheria", "arbiter", "common", "gltf", "hecs", "hecs/macros", "vulkan" ] resolver = "2" [profile.release] debug = true ``` === arbiter::build.rs ```pretty-rs // build.rs // generated by `sqlx migrate build-script` fn main() { // trigger recompilation when a new migration is added println!("cargo:rerun-if-changed=migrations"); } ``` === scenes::mod.rs ```pretty-rs // mod.rs mod fireflies; pub use fireflies::Fireflies; mod root; pub use root::RootScene; mod trees; pub use trees::Trees; mod ores; pub use ores::Ores; ``` === components::mod.rs ```pretty-rs // mod.rs pub mod components; pub mod craft; pub mod interact; pub mod inventory; pub mod recipe_selector; ```
https://github.com/ern1/typiskt
https://raw.githubusercontent.com/ern1/typiskt/main/resume-content.typ
typst
#let hBar() = [ #h(0.2em) | #h(0.2em) ] #let sectionName = [Education] //Load Balancing of Parallel Tasks using Memory Bandwidth Restrictions. #let eduEntries = ( gu: ( title: [Master's degree, Computer Science], society: [University of Gothenburg/Chalmers], date: [2020 - ], location: [Gothenburg, Sweden], /*description: list( //[*Thesis*: Predicting Cache Coherence during Contextual Switching in Primitive Shaders with High Complex Mesh Count and Low Duplication], [*Courses*: // Todo: Korta ner Game Research #hBar() //An introduction to Game Research #hBar() Gameplay Design #hBar() Computer Graphics #hBar() Game Engine Architecture #hBar() Game Development Project #hBar() //Technology-driven experimental gameplay design #hBar() Prototyping in Interaction design #hBar() Requirements Engineering #hBar() Agile Development Processes] )*/ description: [ / Courses\:: Game Research, Gameplay Design, Computer Graphics, Game Engine Architecture, Game Development Project, Prototyping in Interaction Design, Requirement Engineering, Agile Development Processes, Embedded Systems ] ), mdh: ( title: [Bachelor of Science - BS, Computer Science], society: [Mälardalen University], date: [2016 - 2019], location: [Västerås, Sweden], /*description: list( [*Thesis*: Load Balancing of Parallel Tasks using Memory Bandwidth Restrictions \ Researched memory contention and resource allocation strategies in multi-core systems, particularly focusing on memory bandwidth restrictions and their impact on parallel task synchronization. Conducted experiments to assess the effectiveness of adaptive memory partitioning schemes in reducing execution times. Both the memory partitioning algorithm and benchmarking were implemented in C++, with OpenCV used to provide the workload for benchmarking.], [*Courses*: // Todo: Korta ner Programming (in C) #hBar() Data Structures #hBar() //Data Structures, Algorithms and Program Development #hBar() Databases #hBar() Data Communication #hBar() Web Applications #hBar() //Development of Web Applications #hBar() Mobile~Applications #hBar() //Programming Mobile Applications #hBar() Artificial Intelligence #hBar() Computer Graphics #hBar() //Fundamentals of Computer Graphics #hBar() Computer Architecture #hBar() Operating Systems #hBar() Parallell Systems #hBar() Functional Programming #hBar() //Functional Programming with F\# #hBar() //Linux Operating System and Development Environment #hBar() Interaction Design #hBar() //Software Engineering 1: Basic Course #hBar() //Software Engineering 2: Project Teamwork #hBar() Software Engineering 1 & 2 #hBar() Discrete Mathematics #hBar() Vector Algebra] )*/ description: [ / Thesis\:: _"Load Balancing of Parallel Tasks using Memory Bandwidth Restrictions_" \ Researched memory contention and resource allocation strategies in multi-core systems, particularly focusing on memory bandwidth restrictions and their impact on parallel task synchronization. Conducted experiments to assess the effectiveness of adaptive memory partitioning schemes in reducing execution times. Both the memory partitioning algorithm and benchmarking were implemented in C++, with OpenCV used to provide the workload for benchmarking. / Courses\:: Programming (in C), Data Structures, Object-Oriented programming (in C\# and C++), Functional programming, Databases, Data Communication, Web Applications, Mobile Applications, Computer Graphics, Computer Architecture, Operating Systems, Parallel Systems, Interaction Design, Software Engineering 1 & 2, Discrete Mathematics, Vector Algebra] ) ) #let expEntries = ( evtest: ( title: [Android Developer], society: [Everyone Test], date: [July, 2021 - August, 2021], location: [Gothenburg, Sweden], description: list( [Helped developing a dash cam application using the camera2 API for Android. The application was controlled through an app running on a different device, which communicated through a web backend. The other device controlled when to start recording a trip, save a clip, etc.], [Technologies used includes native Android, AWS, socket.io] ) ), noteab: ( title: [Electronic Assembler], society: [Note AB], date: [August, 2014 - August, 2016], location: [Torsby, Sweden], description: list( [Assembled a variety of products, often from start to finish, many demanding a high level of precision and attention to detail], [Responsibilities ranged from soldering tasks requiring IPC-certification, testing, and troubleshooting faulty parts/products] ) ), voltair: ( title: [Builder of Air Treatment Units], society: [Voltair System AB], date: [February, 2014 - June, 2014], location: [Torsby, Sweden], description: list( [Assembled air treatment units from start to finish, but also worked with other tasks such as sheet metal work, producing heat exchangers, and more], [Being a certified electrician, my responsibilities also included electrical wiring, configuring, and testing the units], //[Further tasks included, i.e., working with sheet metal, producing heat exchangers, and electrical wiring] ) ) ) #let projEntries = ( gea: ( title: [Development of Game and Game Engine], // TODO: Change society: [University of Gothenburg/Chalmers], date: [2022], location: [Gothenburg, Sweden], // Tech stacks: C++, SDL2 (with SDL_image, SDL_mixer, SDL_ttf), Git, Visual Studio, Asesprite, UML description: list( [Developed the classic game Zaxxon with a custom game engine written in C++ based on the Entity Component System architecture.], // emphasizing modularity and flexibility. [Gained practical experience in game development, including how to work with different kinds of game entities to implement the game Zaxxon, including a basic game loop, a GUI, objects like the player, enemies, projectiles, obstacles, and VFX.], [Improved my problem‐solving skills as I started without any, or limited, prior knowledge or experience of game engine architectures or using the ECS design pattern. Systems were added, and more commonly improved or expanded with additional features as I saw necessary during the development of the game.] ) ), app: ( // Game Development/Interaction Design Project title: [Design and implementation of mobile app/game], society: [University of Gothenburg/Chalmers], date: [2022], location: [Gothenburg, Sweden], // Tech stacks: Typescript, React Native, Expo, Firebase, Git, VS Code, Figma description: list( [Worked in collaboration with Generation Pep on a game to encourage increased movement. Users get to take care of a dog playing by mini-games where they walk to the beat of music, earning them tokens they can use to customize their appearance.], [Written in Typescript using React Native with Expo cross-platform development written in Typescript. Cloud storage of user profiles using Firebase.] ) ), unityAR: ( // Experimental technological gameplay design title: [Augmented Reality game in Unity], society: [University of Gothenburg/Chalmers], date: [2021], location: [Gothenburg, Sweden], // Tech stacks: Unity, C#, ARCore, Git, VS Code description: list( [Designed and developed a Jenga-inspired, 2-player AR game where the focus lies on strategic block placement to protect your castle as you take turns firing blocks at the opponent’s castle. You win if the opponent’s castle falls.], [Leveraged Unity assets for wooden blocks, robust player interaction, and dynamic ground plane anchoring.], [Optimized gameplay by using ray casting, scaling techniques, and occlusion for Android devices supporting the Depth API.] ) ), volvoce: ( title: [Software Engineering Project], society: [Mälardalen University/Volvo CE], date: [Winter 2019], location: [Västerås/Eskilstuna, Sweden], // Tech stacks: Native Android, Java, Firebase, Git description: list( [Developed a native Android app for Volvo CE to create and edit paths for their new autonomous machines to follow.], [Features included user authentication, database storage, listing paths with the possibility to filter, create and delete paths, editing paths via a visual interface or through G‐Code, reservation of machines, and the ability to reserve machines.], [Continuous communication with an external client, including weekly meetings and presentations, as well as having to implement significant changes as the client’s requirements changed.] ) ) )
https://github.com/howardlau1999/sysu-thesis-typst
https://raw.githubusercontent.com/howardlau1999/sysu-thesis-typst/master/chapters/ch03.typ
typst
MIT License
= 展望 目前本模板还有一些不足之处,有待进一步完善: - 参考文献格式,特别是中文参考文献的格式不完全符合学校有关规定。#link("https://discord.com/channels/1054443721975922748/1094796790559162408/1094928907880386662", "Discord 上的这个对话")显示,Typst 有关功能还在开发中。待有关接口对外开放后,本模板将会进行相应的适配。 - 暂时还不支持脚注。 - 需要完善对盲评格式的支持。 - 需要完善奇数页开始选项的效果。
https://github.com/ssotoen/gridlock
https://raw.githubusercontent.com/ssotoen/gridlock/main/docs/gridlock-manual.typ
typst
The Unlicense
// typst c docs/gridlock-manual.typ --font-path docs/fonts --root ../ --pdf-standard a-2b #let typst-toml = toml("../typst.toml") #let project-version = typst-toml.package.version #let project-authors = typst-toml.package.authors.at(0) #show "[version-placeholder]": project-version #import "../src/lib.typ": * #import "@preview/tidy:0.3.0" #set page( numbering: "1", footer: context { let current-page = counter(page).get().first() if current-page > 1 { align(center, text(9pt, numbering("1", current-page))) } } ) #set text( font: "Reforma 1918", stylistic-set: (4, 5), // long-tail Q in roman (4) and italic (5) ) #show math.equation: set text(stylistic-set: none) #set par( justify: true ) #set document( title: "Manual for the Typst package “gridlock” (Version\u{00a0}" + project-version + ")", author: project-authors, date: datetime( year: int(datetime.today().display().slice(count: 4, 0)), month: int(datetime.today().display().slice(count: 2, 5)), day: int(datetime.today().display().slice(count: 2, 8)), hour: 1, minute: 0, second: 0 ) ) #show raw.where(block: true): block.with( width: 100%, fill: gray.lighten(85%), inset: 3mm, radius: 1.5mm, ) #show footnote.entry: it => { let loc = it.note.location() numbering( "1.", ..counter(footnote).at(loc) ) it.note.body } #show outline:set text(number-type: "lining") #page[ #set align(center) #v(1fr) #text(28pt, weight: "bold")[The gridlock package] #text(16pt)[Grid typesetting in Typst] #v(3.5em) Version #project-version #h(3em) #datetime.today().display() #project-authors #v(3em) #link("https://github.com/ssotoen/gridlock")[github.com/ssotoen/gridlock] #v(2fr) #block(width: 33%)[#outline( indent: 1em, depth: 2, )] #v(3em) ] #set page( header: text(9pt, style: "italic")[The gridlock package #h(1fr) v#project-version] ) #let pageref(label) = context { let loc = locate(label) let nums = counter(page).at(loc) link(loc, numbering(loc.page-numbering(), ..nums)) } = About gridlock provides a way to do grid typesetting in Typst. It does this by setting a line height for running text and using this as an invisible grid. Blocks that don’t fit into a line, like headings and figures, are aligned so that the running text after them sits on the grid again. Check out the examples on pages~#pageref(<example>) and #pageref(<example-lines>). = Quick start ```typ #import "@preview/gridlock:[version-placeholder]": * #show: gridlock.with( paper: "a4", margin: (y: 76.445pt), font-size: 11pt, line-height: 13pt ) #lock[= This is a heading] #lorem(30) #figure( placement: auto, caption: [a caption], rect() ) #lorem(30) ``` The ```typc gridlock()``` function sets up the base line height that for the grid. The parameters shown in the example are the default values. If you’re happy with them, you don’t need to pass anything to the function: just do ```typ #show: gridlock.with()```. \ If you want to change the line height, make sure to set the margin so that the text area is an exact multiple of the new line height. Now you can use the ```typc lock()``` function to align any block to the text grid, like the heading shown in the example. Some elements---like the floating figure in the example above---are aligned automatically and do *not* need to be used with ```typc lock()```. You can find a complete list in the function’s description in the next chapter. = Functions #tidy.show-module( tidy.parse-module(read("../src/lib.typ")), first-heading-level: 1, sort-functions: it => { ( "gridlock": 11, "lock": 12, "float-adjustment": 13 ).at(it.name, default: 99) } ) #show math.equation: set text(font: "New Computer Modern Math", stylistic-set: none) #show: gridlock.with( font-size: 11pt, line-height: 13pt ) #page( columns: 2 )[ #lock[= Example] <example> #set heading(outlined: false) Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. This is the second paragraph. Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? #lock[== This is a long heading spanning multiple lines] Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. #footnote[ And here we have a footnote. Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. ] #quote(block: true)[ This is a block quote. And after the second paragraph follows the third paragraph. ] Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. #lock[$ x = (-b ± sqrt(b^2 - 4 a c))/(2a) $] #h(13pt)After this fourth paragraph, we start a new paragraph sequence. Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. - A bulleted list + Indented - Really? Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. #figure( placement: top, caption: [#text(style: "italic")[The Great Wave off Kanagawa] #box[by <NAME>okusai]], image("assets/Tsunami_by_hokusai_19th_century.jpg", width: 217pt) ) Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? ] #page( columns: 2, background: stack( dir: ttb, let n = 0, while n < 53 { v(13pt) line(stroke: 0.1pt, length: 453.56pt) n += 1 }, ) )[ #lock[= Example with grid lines] <example-lines> #set heading(outlined: false) Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. This is the second paragraph. Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? #lock[== This is a long heading spanning multiple lines] Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. #footnote[ And here we have a footnote. Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. ] #quote(block: true)[ This is a block quote. And after the second paragraph follows the third paragraph. ] Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. #lock[$ x = (-b ± sqrt(b^2 - 4 a c))/(2a) $] #h(13pt)After this fourth paragraph, we start a new paragraph sequence. Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. - A bulleted list + Indented - Really? Is there no information? Is there a difference between this text and some nonsense like “Huardest gefburn”? Kjift—not at all! A blind text like this gives you information about the selected font, how the letters are written, and an impression of the look. This text should contain all letters of the alphabet and it should be written in the original language. There is no need for special content, but the length of the words should match the language. #figure( placement: top, caption: [#text(style: "italic")[The Great Wave off Kanagawa] #box[by <NAME>usai]], image("assets/Tsunami_by_hokusai_19th_century.jpg", width: 217pt) ) Hello, here is some text without a meaning. This text should show what a printed text will look like at this place. If you read this text, you will get no information. Really? ]
https://github.com/justmejulian/typst-documentation-template
https://raw.githubusercontent.com/justmejulian/typst-documentation-template/main/theme/cover.typ
typst
#import "constant.typ": body-font, sans-font #let cover( title: "", subtitle: "", program: "", school: "", supervisor: "", advisor: "", author: "", expert: "", proofReader: "", ) = { set page( margin: (left: 30mm, right: 30mm, top: 20mm, bottom: 40mm), numbering: none, number-align: center, ) set text( font: body-font, size: 12pt, lang: "en" ) set par(leading: 1em) // --- Cover --- align(center, image("../figures/logo.png", width: 80%)) v(15mm) align(center, text(font: sans-font, 2em, weight: 700, title)) v(5mm) align(center, text(font: sans-font, 1.5em, weight: 400, subtitle)) v(15mm) align(center, text(font: sans-font, 1.3em, weight: 100, program)) v(5mm) align(center, text(font: sans-font, 1.3em, weight: 100, school)) v(15mm) align(center, box( align(left, grid( columns: 2, column-gutter: 2em, row-gutter: 1em, strong("Author: "), author, strong("Version: "), datetime.today().display(), strong("Advisor: "), advisor, strong("Expert: "), expert, strong("Proof-Reader: "), proofReader, ) ) ) ) }
https://github.com/Anastasia-Labs/project-close-out-reports
https://raw.githubusercontent.com/Anastasia-Labs/project-close-out-reports/main/f10-plug-and-play-01-closeout-report/video-transcript/plug-and-play-01-video-script.typ
typst
#let image-background = image("../../images/Background-Carbon-Anastasia-Labs-01.jpg", height: 100%, fit: "cover") #let image-foreground = image("../../images/Logo-Anastasia-Labs-V-Color02.png", width: 100%, fit: "contain") #let image-header = image("../../images/Logo-Anastasia-Labs-V-Color01.png", height: 75%, fit: "contain") #let fund-link = link("https://projectcatalyst.io/funds/10/f10-developer-ecosystem-the-evolution/plug-and-play-smart-contract-api-a-game-changing-platform-to-deploy-open-source-contracts-instantly")[Catalyst Proposal] #let git-link = link("https://github.com/Anastasia-Labs/plug-n-play-contracts")[Main Github Repo] #let maestro-link = link("https://www.gomaestro.org/smart-contracts")[Maestro Platform] #set page( background: image-background, paper :"a4", margin: (left : 20mm,right : 20mm,top : 40mm,bottom : 30mm) ) // Set default text style #set text(15pt, font: "Montserrat") #v(3cm) // Add vertical space #align(center)[ #box( width: 60%, stroke: none, image-foreground, ) ] #v(1cm) // Add vertical space // Set text style for the report title #set text(20pt, fill: white) // Set text style for the report title #set text(22pt, fill: white) // Center-align the report title #align(center)[#strong[Plug-and-play-01]] #set text(18pt, fill: white) // Center-align the report title #align(center)[#strong[Project Closout Report Script]] #v(5cm) // Set text style for project details #set text(13pt, fill: white) // Display project details #table( columns: 2, stroke: none, [*Project Number*], [1000149], [*Project manager*], [Maestro Team & Anastasia Labs], [*Date Started*], [December 23, 2023], [*Date Completed*], [May 31, 2024], ) // Reset text style to default #set text(fill: luma(0%)) // Display project details #show link: underline #set terms(separator:[: ],hanging-indent: 18mm) #set par(justify: true) #set page( paper: "a4", margin: (left: 20mm, right: 20mm, top: 40mm, bottom: 35mm), background: none, header: [ #align(right)[ #image("../../images/Logo-Anastasia-Labs-V-Color01.png", width: 25%, fit: "contain") ] #v(-0.5cm) #line(length: 100%, stroke: 0.5pt) ], ) #v(20mm) #show link: underline #show outline.entry.where(level: 1): it => { strong(it) } // Initialize page counter #counter(page).update(0) #outline(depth:2, indent: 1em) #pagebreak() #set text(size: 11pt) // Reset text size to 10pt #set page( footer: [ #set text(size: 11pt, fill: gray) #line(length: 100%, stroke: 0.5pt) #v(-3mm) #align(center)[ *Anastasia Labs – Plug-and-play-01* #v(-3mm) Project Closeout Report Script #v(-3mm) // Copyright © // #set text(fill: black) // Anastasia Labs ] #v(-6mm) #align(right)[ #counter(page).display( // Page numbering "1/1", both: true, ) ] ] ) // Display project details #set terms(separator:[: ],hanging-indent: 18mm) #align(center)[ #set text(size: 16pt) #strong["Plug-and-play Smart Contract API: A game-changing platform to deploy open-source contracts instantly"]] \ #text(size: 18pt)[*Slide 1*] = Introduction \ Hello, Cardano community! I'm <NAME> from Anastasia Labs. Today, I am pleased to present the closeout report for our Catalyst Fund 10 project titled: #text(size: 18pt)[*Slide 2*] “Plug-and-Play Smart Contract API: A Game-Changing Platform for Deploying Smart Contracts Instantly.” In this presentation, I'll cover why we embarked on this project, how we executed it, and the outcomes we achieved. Let's dive in! \ // Which challenge did you enter and why? #text(size: 18pt)[*Slide 3*] == Project Context and Importance \ First, a bit of context, The Cardano ecosystem has grown significantly, but developers face challenges in deploying dApps due to the lack of accessible platforms for launching smart contracts. This limitation has hindered the broader adoption of Web3 technologies. The complexity of building secure and reliable smart contracts, combined with insufficient security expertise, poses additional risks. To address these issues, Maestro and Anastasia Labs collaborated on a project to simplify smart contract deployment. Our solution offers ready-to-deploy smart contract APIs, allowing developers to use composable and reusable contracts without delving into complex on/off-chain code. This approach aims to unlock the potential for Web3 adoption by providing a library of open-source contracts that empower developers to innovate and build on Cardano with ease. \ // What was the approach you submitted in your proposal application which was eventually funded? #text(size: 18pt)[*Slide 4*] = Project Objectives \ Our primary objectives were: - To design and develop comprehensive library of secure and reusable smart contracts. We identified the Single Asset Staking, Linear Vesting, and Direct Offer contracts as our first set of contracts to focus on. \ #text(size: 18pt)[*Slide 5*] \ - To provide contracts that have been thoroughly tested to reduce vulnerabilities to Smart Contract exploitation, thereby enhancing the security of Cardano decentralised applications (DApps). - To deliver ready-to-use Smart Contract APIs, along with comprehensive documentation and user-friendly tutorials and simplify the development process with standardized interfaces and best practices. \ // Please explain any particular technical solutions you proposed and the amount of funding you received. #text(size: 18pt)[*Slide 6*] = Execution and Milestones \ We approached the challenge from researching the projects most likely to have the most impact and begun by structuring our project into five key phases, each critical to achieving our objectives. - Phase 1: Design and development - Phase 2: Preparation Phase: Off-Chain SDKs Building - Phase 3: Integration Phase: Smart Contract API Integration - Phase 4: Testing Phase: Smart Contract API Testing - Phase 5: Comprehensive Documentation and Community Engagement \ #text(size: 18pt)[*Slide 7*] == Phase 1: Design & Development \ In this phase, we successfully developed the three essential smart contracts: - *Single Asset Staking Contract:* To enable users to collectively stake digital assets and distribute rewards fairly. - *Linear Vesting Contract:* To provide secure, customizable mechanisms for gradual asset release. - *Direct Offer Contract:* To facilitate peer-to-peer trading of assets. which are available on our #git-link and on #maestro-link. \ #text(size: 18pt)[*Slide 8*] === Single Asset Staking Contracts \ This contract enables users to collectively stake digital assets and distribute rewards fairly, in a completely on-chain and trustless manner. As the name suggests, it allows for a single asset, which can be any Cardano Native Fungible Token, to be staked to earn rewards. The reward itself can be any Cardano Native Fungible Token. \ #text(size: 18pt)[*Slide 9*] === Linear Vesting Contracts: \ With so many projects launching on Cardano, many a times, there arises a requirement of vesting a project's own tokens to either their core team or supporters of the project. Vesting assets to beneficiary in proportion to the elapsed time, i.e. Linear Vesting is a very straightforward and common preference. While there definitely exist more complex vesting requirements, we hope that the contract here will help those looking for something simple for their use case. Or serve as a reference for those who want to build more elaborate arrangements Our Linear Vesting contract aims at providing a reliable mechanism for releasing Cardano Native Tokens gradually over a specified timeframe, with customization options to fit different requirements. \ #text(size: 18pt)[*Slide 10*] === Direct Offer Contracts: \ The Direct Offer project provides a Plutarch-based implementation of a smart contract enabling peer-to-peer trading, in a trustless manner, for the Cardano blockchain. Without the need for a trusted third party or a Decentralized Exchange (DEX), a user can put up any Cardano native asset(s) for sale in exchange for any user-specified native asset(s). \ #text(size: 18pt)[*Slide 11*] == Phase 2: Preparation Phase: Off-Chain SDKs Building \ We built and made available SDKs for each smart contract, simplifying the integration process for developers. These SDKs ensure a seamless connection between the blockchain and off-chain applications, enhancing the utility of our smart contracts. \ #text(size: 18pt)[*Slide 12*] == Phase 3: Integration Phase: Smart Contract API Integration \ In collaboration with Maestro, we integrated our smart contracts into a fully managed service, enabling developers to interact with them via APIs. This phase was crucial for demonstrating the real-world application of our contracts and their integration within existing infrastructure. \ #text(size: 18pt)[*Slide 13*] == Phase 4: Testing Phase: Smart Contract API Testing \ We conducted thorough testing for all three smart contract in our library to ensure their robustness. This rigorous process involved comprehensive code reviews and unit tests, all integrated into our continuous integration/continuous deployment (CI/CD) pipeline. This testing was essential to validate the functionality, reliability, security and performance of our solutions. \ #text(size: 18pt)[*Slide 14*] == Phase 5: Documentation and Community Engagement \ We provided detailed and user-friendly, comprehensive documentation and tutorials to facilitate easy adoption and implementation of our smart contracts. The documentation is designed to cater to developers of all skill levels, ensuring a smooth onboarding process. Furthermore, the team at Maestro showcased the capabilities of our solutions at prominent events, such as the 2023 Cardano Summit in Dubai. \ #text(size: 18pt)[*Slide 15*] \ Here’s an example execution demo of our Single Asset Staking Contract \ #text(size: 18pt)[*Slide 16*] \ Detailed guides and practical examples of our Single Asset Staking Contract can be found in the following links: - Plutarch Contract: https://github.com/Anastasia-Labs/single-asset-staking - SDK: https://github.com/Anastasia-Labs/single-asset-staking-offchain - API: https://docs.gomaestro.org/category/single-asset-staking-1 \ #text(size: 18pt)[*Slide 17*] \ Here’s an example execution demo of our Linear Vesting Contract \ #text(size: 18pt)[*Slide 18*] \ To follow up on more details, guides and practical examples of our Linear Vesting Contract can be found in the following links: - Plutarch Contract: https://github.com/Anastasia-Labs/linear-vesting - SDK: https://github.com/Anastasia-Labs/linear-vesting-offchain - API: https://docs.gomaestro.org/category/linear-vesting-1 \ #text(size: 18pt)[*Slide 19*] \ Here’s an example execution demo of our Direct Offer or Direct Swap Contract \ #text(size: 18pt)[*Slide 20*] \ More details, guides and practical examples of our Direct Offer/ Direct Swap Contract can be found in these links: - Plutarch Contract: https://github.com/Anastasia-Labs/direct-offer - SDK: https://github.com/Anastasia-Labs/direct-offer-offchain - API: https://docs.gomaestro.org/category/direct-swap-1 \ #text(size: 18pt)[*Slide 21*] = Achievements and Outcomes \ We're pleased to highlight the key achievements of our project, which provide a strong base for further innovation in the Cardano ecosystem: - *Implementation of Secure and Modular APIs:* Our team delivered secure, modular, and reusable APIs for Single Asset Staking, Linear Vesting, and Direct Offer Contracts. These APIs simplify the development process by reducing the time needed to build and deploy smart contracts, allowing developers to focus on more complex, application-specific logic. - *Quality Assurance:* We prioritized the robustness and reliability of our smart contracts by conducting thorough code reviews and comprehensive unit tests. This extensive testing ensures our contracts are secure, reliable, and ready for real-world deployment. - *Seamless Integration:* To further ease development, we successfully integrated our smart contracts with Maestro, providing a unified interface for contract deployment and management. This integration simplifies access and interaction with our contracts, making the development process more efficient. #text(size: 18pt)[*Slide 22*] - *Extensive Documentation and Tutorials:* As demonstrated, we are proud to have enriched the Cardano community with rich resources by providing comprehensive documentation and tutorials to help developers quickly understand and utilize our solutions. - *Industry Collaboration and Community Engagement:* We are proud to have collaborated with Maestro, a Key player in the Cardano ecosystem who also deployed the Linear Vesting Contract on Maestro platform and it was already demoed at the Cardano Summit in Dubai. - *Accessible via Demeter:* Other than Maestro, we're happy to say that anyone can try our Single-Asset Staking and Linear Vesting apps on Demeter.run as well. \ #text(size: 18pt)[*Slide 23*] = Key Learnings and Challenges \ Throughout our project, we faced several challenges that provided us with valuable insights. Here are the key learnings we've gathered: - *First, Modular Design and Security.* We've seen the effectiveness of modular, reusable smart contracts. These contracts not only simplify the development process but also enhance security. By providing pre-built, battle-tested contracts, we allow developers to focus more on innovation. Rigorous testing has ensured these contracts are robust against vulnerabilities. - *Second, Simplifying Integration.* We've learned that simplifying the integration process through APIs is crucial. It lowers the barriers for developers, whether they're part of small teams or large enterprises. This simplicity is key to fostering ecosystem growth and encouraging more developers to build on Cardano. - *Third, Balancing Customization with Standardization.* Striking a balance between offering customizable solutions and maintaining standardized interfaces is essential. This balance provides the necessary flexibility while ensuring consistency, which is critical to cater to a broad range of developer needs. - *Lastly, Comprehensive Documentation and Monitoring.* Detailed documentation and user-friendly tutorials are vital for effective developer onboarding. Additionally, implementing monitoring tools for API usage helps us understand user needs and make data-driven improvements. This ensures we can continually refine our offerings to better serve the community. \ #text(size: 18pt)[*Slide 24*] = Future Prospects and Community Impact \ - *Expand Smart Contract Library:* We aim to develop and integrate additional smart contracts, driven by developer feedback and evolving ecosystem needs. This will ensure that our platform remains a valuable resource for the growing Cardano developer community. - *Enhance API Documentation:* We will improve our API documentation by adding more examples, use cases, and best practices, making it even easier for developers to utilize our services effectively. - *Optimize Performance and Scalability:* As the use of our smart contract APIs grows, we will monitor performance closely and optimize to ensure high availability and responsiveness, even under heavy load and as the Cardano ecosystem evolves. - *Collaborate with Ecosystem Partners:* We plan to engage with other projects and partners within the Cardano ecosystem to explore integration opportunities, expanding the reach and adoption of our smart contract APIs. - *Continuously Gather Feedback:* We will maintain an open dialogue with the developer community, regularly soliciting feedback and suggestions. This input will guide our future development, ensuring we continue to meet the needs of Cardano builders. - *Long-Term Commitment:* We will keep our libraries up-to-date with the ever-evolving Cardano ecosystem, ensuring our data structures remain relevant. By maintaining the open-source approach and transparent development process, we believe will inspire confidence and trust within the community. \ #text(size: 18pt)[*Slide 25*] = Conclusion \ In conclusion, we're proud to have introduced the 'Plug and Play 01' smart contract library to the Cardano community. Together with Maestro, we have managed to develop secure, modular, and reusable smart contracts, create comprehensive off-chain SDKs, and provide extensive documentation, ultimately supporting the creation of no-code DApps, making it easier for a wider range of users to build on Cardano. These achievements enhance the capabilities of DApps on Cardano, offering developers valuable tools to innovate and secure their applications. The no-code DApps feature, in particular, opens up blockchain development to those without programming skills, democratizing access to this technology. We believe our work will inspire further innovation and drive adoption within the Cardano ecosystem. We're committed to supporting the developer community and continually improving our offerings to meet their evolving needs, ensuring that our library remains a vital resource for the platform's growth. Thank you for your support and interest in our work. \ #text(size: 18pt)[*Slide 26*] \ For more information, visit our GitHub repository at: https://github.com/Anastasia-Labs/plug-n-play-contracts If you want to know more about Anastasia Labs or contact us, you can visit - Our website at https://anastasialabs.com/ - Maestro Platform https://www.gomaestro.org/smart-contracts - Follow us on twitter at https://x.com/AnastasiaLabs - Join our discord community: https://discord.com/invite/8TYSgwthVy \ #text(size: 18pt)[*Slide 26*] \ See you next time! Thank You and Goodbye! #v(10mm)
https://github.com/kdog3682/2024-typst
https://raw.githubusercontent.com/kdog3682/2024-typst/main/src/render-dialogue.typ
typst
#import "base-utils.typ": * #import "styles.typ" // each dialogue item should be a full-width item // when you // unless using a 2 column layout ... then should be half width // and the name should be placed vertically above #let render-dialogue(data) = { let scope = ( finale: (x) => rect(..styles.finale, str(x)) ) let runner(item) = { let a = heading(item.speaker, level: 3) let b = markup(item.text, scope) if has(item, 'question') { let c = render-question(item) return (a, block(c, b)) } return (a, b) } let items = data.map(runner).flatten() table(..items) } #render(json("dialogue.json")) let render-question(item) = { return mc-item(..item) let source = item.at('source', default: none) let question = item.at('question', default: none) let answers = item.at('answers', default: none) let choices = item.at('choices', default: none) if choices != none { } let markdown = markup(question) // add the ability to make it go // todo the tables ... } // the mc-item does not have its own numbering // this is correct #let mc-item(question, choices, answer, src: none, topic: none, layout: 'vertical', letters: 'ABCD', fill: none) { let num = 1 let top = { let expr = if has(src, 'qnum') { box([*Q#src.qnum* -- *#src.exam*\ ]) } else { bold(src) } expr if topic != none { h(1fr) box(text(fill: blue, blue-emph(topic))) } if topic != none or src != none { v(-5pt) line(length: 100%) } } let vertical = layout == 'vertical' let scope = (:) let styles = if vertical { (:) } else { (width: 275pt) } // this is the question let a = block(..styles, markup(question)) // these are the choices let b = block({ // this is really cool // it implies you can use any item for the enum numbering // doesnt have to abcd or even letters for that matter set enum(numbering: (it) => { let x = letters.at(it - 1) + "." text(weight: "bold", x) }, spacing: 15pt, tight: false, body-indent: 10pt) enum(choices.map((x) => markup(x, scope))) }) let val = if vertical { } else { block(breakable: false, fill: fill, grid(column-gutter: 20pt, columns: (300pt, 1fr), a, b)) } } #let blue-emph(s) = { return emph(text(size: 8pt, resolve-text(s))) }
https://github.com/FilipSolich/CV-Template
https://raw.githubusercontent.com/FilipSolich/CV-Template/main/README.md
markdown
The Unlicense
# CV Template CV Template written in Typst ![Example output](example.png) ## Compile PDF ```sh typst compile --input jobTitle="Developer" --font-path fonts example.typ cv.pdf ```
https://github.com/Area-53-Robotics/53E-Notebook-Over-Under-2023-2024
https://raw.githubusercontent.com/Area-53-Robotics/53E-Notebook-Over-Under-2023-2024/giga-notebook/entries/lift/build-test.typ
typst
Creative Commons Attribution Share Alike 4.0 International
#import "/packages.typ": notebookinator #import notebookinator: * #import themes.radial.components: * #create-body-entry( title: "Build: Lift", type: "build", date: datetime(year: 2024, month: 1, day: 6), author: "<NAME>", witness: "<NAME>", )[ // Outline: // finish basic structure //The lifting lever (or arm) was completely built with the exception of custom //lexan hooks. Mount points for the arm and pneumatics were completely built. The //design was assembled completely. It was decided to rebuild the mount points to //increase structural stability. //The mount points for the arm were completely rebuilt. The mount points for the //pneumatics were partially built and assembled. It was decided that Alan would //finish building the mount points outside of practice. #grid( columns: (1fr, 1fr), gutter: 20pt, [ Once we had the design for the lift nailed down, we began to build it. + We unmounted the old flywheel, as it was in the way of the lift. This required us to take out the 4 screws holding it in place, 2 on the piece of cross bracing on the front towers of the drivetrain (wedge side), and two attached to standoffs on the rear of the drive (intake side). + We Then built the base for the mount of the lift. This is a very similar design to the base of the catapult mount. It consists of a piece of U-channel that we attach to the front drivetrain towers. + We slotted the two pieces of 11 hole c-channel into those mount points to serve as the base for our lift. + We drilled holes in the c-channel with a 1/2" drill bit to make room for the high strength axle. + We built the body of the lift out of 20 hole c-channel and 17" hole half cut. + We added mount points for the piston towers in the same way that we did the lift towers. #admonition( type: "note", )[ This ended up being much more difficult than we anticipated due to how deep into the drivetrain these mounts need to be placed. ] 7. We then added the pistons to the piston towers, and attached them to the lift body. + We attached the flywheel to the lift body. ], [ #figure(image("./1-side.png", width: 95%), caption: "Isometric view") #figure(image("./1-compressed.jpg", width: 95%), caption: "Side view") ], ) ] #create-body-entry( title: "Build: Lift", type: "build", date: datetime(year: 2024, month: 1, day: 12), author: "<NAME>", witness: "<NAME>", )[ // Outline: // Remount flywheel #grid( columns: (1fr, 1fr), gutter: 20pt, [ Open testing the lift, we noticed that the center of gravity was not quite where we needed it to be to get the robot to balance perfectly. #image("./center-of-gravity.svg") We considered a few options to solve this problem, including counterweights to the back of the robot, but eventually we settled on moving the entire lift 2 holes forward. This would place the lift towers in the same place the catapult was, and the piston towers directly against the front (wedge side) drivetrain towers. Making this changed ended up being much harder than we thought it would be, and took the entire meeting to complete. Accessing the screws required to get the U-channel mount points into place was extremely difficult, and require removing much of the gears in the drivetrain. We were not able to complete this change, so we had to finish it next meeting. ], [ #figure(image("./2-iso.jpg", width: 95%), caption: "Isometric view") #figure(image("./2-top.jpg", width: 95%), caption: "Top view") ], ) ] #create-body-entry( title: "Build: Lift", type: "build", date: datetime(year: 2024, month: 1, day: 13), author: "<NAME>", witness: "<NAME>", )[ // Outline: // New hang assist design // Change lift mount point // Finish building #grid( columns: (1fr, 1fr), gutter: 20pt, [ This meeting we finally got the lift mounted in the correct position. We also acquired some 3D printed decoration to put on our robot, supplied by one of our alumni. Once we had the lift in place we: + Remounted the flywheel to the lift with a more structurally sound piece of cross bracing. + Boxed the bracing on the flywheel to make sure it doesn't bend. + Mounted the pneumatic canisters right below the intake. This will help balance the weight of the robot, and keep them out of the way of the lift. + Added rubber bands running from the lift body to the drivetrain to assist the lift in pulling up the robot. We had to be careful in placing them, because we didn't want them to impede the elevation bar from entering the robot. #admonition( type: "build", )[ The lift is finally complete! We look forward to the point advantage this will give us. ] ], [ #figure(image("./3-top.jpg"), caption: "Top view") #figure(image("./3-side.jpg"), caption: "Side view") ], ) ]
https://github.com/Myriad-Dreamin/typst.ts
https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/fuzzers/corpora/text/linebreak-link_02.typ
typst
Apache License 2.0
#import "/contrib/templates/std-tests/preset.typ": * #show: test-page // Ensure that there's no unconditional break at the end of a link. #set page(width: 180pt, height: auto, margin: auto) #set text(11pt) For info see #link("https://myhost.tld").
https://github.com/RanolP/resume
https://raw.githubusercontent.com/RanolP/resume/main/cover.typ
typst
#import "modules/util.typ": * #import "modules/activity.typ": * #import "modules/components.typ": * #import "modules/github.typ": * #import "modules/solved-ac.typ": * #import "metadata.typ": metadata #set page(fill: color.rgb(0, 0, 0, 0)) #let theme = sys.inputs.at("theme", default: "light") #let palette = if theme == "light" { ( foreground1: color.rgb("#1f2328"), foreground2: color.rgb("#495057"), background1: color.rgb("#e6edf3"), link: color.rgb("#1c7ed6"), ) } else { ( foreground1: color.rgb("#e6edf3"), foreground2: color.rgb("#ced4da"), background1: color.rgb("#1f2328"), link: color.rgb("#74c0fc"), ) } #set page(paper: "a4", margin: 0pt) #set text( font: "Pretendard", fill: palette.foreground1, features: ("ss06",), fallback: true, ) #show heading: set text(size: 16pt) #align(center)[ = #text(size: 24pt)[#metadata.name.nickname / #metadata.name.real-korean#super[#upper[#metadata.name.real-english]]] #text(size: 12pt)[ #text(weight: 900, tracking: 2pt)[#metadata.role] #text(weight: 600)[\@] #text(weight: 700, tracking: 1pt)[#metadata.location] ] \ #icon("lucide/mail?color=" + palette.foreground1.to-hex()) #link("mailto:" + metadata.email)[#metadata.email] $bar$ #icon("lucide/phone?color=" + palette.foreground1.to-hex()) #link("tel:" + metadata.phone.join())[#metadata.phone.join(" ")] #text(size: 16pt, weight: 600)[ #set par(leading: 8pt) #metadata.bio.ko.title \ #text(size: 13pt)[#metadata.bio.en.title] ] #icon(if theme == "dark" { "skill-icons/github-dark" } else { "skill-icons/github-light" }) #link("https://github.com/" + metadata.social.github)[\@#metadata.social.github] $bar$ #icon("logos/twitter") #link("https://twitter.com/" + metadata.social.twitter)[\@#metadata.social.twitter] $bar$ #icon-solved-ac() #link("https://solved.ac/profile/" + metadata.social.solved-ac)[ #solved-ac-profile-short(metadata.social.solved-ac) ] ] #line(length: 100%, stroke: 1pt + palette.foreground1) #align(center)[ == 기술#super[Skills] #for row in ( ( tech-list.typescript--short, tech-list.javascript--short, tech-list.css, tech-list.react-and-react-native, tech-list.nextjs, tech-list.solidjs, tech-list.tailwindcss, tech-list.unocss, tech-list.eslint, ), ( tech-list.rust, tech-list.kotlin, tech-list.swift, tech-list.bash, tech-list.gradle, tech-list.git, tech-list.github, tech-list.github-actions, ), ) { set text(size: 8pt) enumerate( row.map(tech => ( icon( if theme == "dark" { tech.at("icon-dark", default: tech.icon) } else { tech.icon }, size: 16pt, bottom: 0pt, ), tech.label, )), ) } ] #workExpList( header: [ == 경력#super[Work Experiences] ], ( workExpEntry( from: datetime(year: 2023, month: 3, day: 20), to: datetime.today(), role: "프론트엔드 엔지니어", organization: "주식회사 라프텔(Laftel)", homepage: link("https://laftel.oopy.io")[laftel.oopy.io], )[ 애니메이션 OTT 서비스 라프텔에서 React와 React Native를 활용한 웹/앱 개발을 맡았습니다. 수행한 주요 업무는 다음과 같습니다. - Firebase를 활용한 A/B 테스트 - react-email과 tailwindcss를 활용한 이메일 템플릿 생성 및 관리, CI 연동 작업 ], ), ) #activityList( header: [ == 기타 활동#super[Other Activities] ], ( activityEntry( from: datetime(year: 2023, month: 11, day: 17), title: belonging([해커톤 멘토 $and$ 심사위원], [쿠씨톤]), )[ #link("https://kucc.co.kr/")[#text( fill: palette.link, )[#underline[KUCC]#sub[Korea University Computer Club]]]에서 주최한 2023년 쿠씨톤에서 해커톤 멘토 및 심사위원을 맡아 Django, React, Pygame 등을 사용하는 멘티들을 서포트하고, 작품을 심사했습니다. ], activityEntry( from: datetime(year: 2022, month: 9, day: 20), title: "NYPC 2022 특별상", )[], ), ) #activityList( header: [ == 프로젝트#super[Projects] ], ( activityEntry( from: datetime(year: 2023, month: 10, day: 29), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("psl-lang/psl"), [ #tech-chips.rust ], ) ], )[ ], activityEntry( from: datetime(year: 2022, month: 8, day: 21), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("RanolP/crowdin-strife"), [ #tech-chips.rust #tech-chips.mysql ], ) ], )[ ], activityEntry( from: datetime(year: 2022, month: 1, day: 9), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("RanolP/measurrred"), [ #tech-chips.rust ], ) ], )[ ], activityEntry( from: datetime(year: 2021, month: 12, day: 10), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("RanolP/bojodog"), [ #tech-chips.typescript #tech-chips.webpack ], ) ], )[ ], activityEntry( from: datetime(year: 2021, month: 11, day: 27), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("RanolP/bojoke"), [ #tech-chips.typescript #tech-chips.vite ], ) ], )[ ], activityEntry( from: datetime(year: 2021, month: 1, day: 4), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("RanolP/rano-lang"), [ #tech-chips.rust #tech-chips.wasm ], ) ], )[ ], activityEntry( from: datetime(year: 2020, month: 10, day: 9), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("RanolP/dalmoori-font"), [ #tech-chips.typescript ], ) ], )[ ], activityEntry( from: datetime(year: 2020, month: 6, day: 21), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("solvedac/unofficial-documentation"), [ #tech-chips.openapi ], ) ], )[ ], activityEntry( from: datetime(year: 2020, month: 5, day: 13), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), link("https://github.com/hanzzok")[#icon("devicon/github", bottom: -1em / 6) hanzzok], [ #tech-chips.rust #tech-chips.wasm #tech-chips.typescript #tech-chips.nextjs ], ) ], )[ ], activityEntry( from: datetime(year: 2020, month: 4, day: 8), title: pad(top: -1em / 4)[ #grid( columns: (1fr, auto), gh-repo("RanolP/boj"), [ #tech-chips.typescript #tech-chips.playwright ], ) ], )[ ], ), ) #align(center)[ == 오픈소스 기여#super[Open Source Contributions] #for (url,) in metadata.oss-contribs { gh-pull-req(url) } #box(width: 15cm)[ #{ let pulls = metadata.oss-contribs.map(((url,)) => gh-pull(url)).sorted(key: pull => ( "none": 0, "OPEN": 1, "MERGED": 2, "CLOSED": 3, ).at(pull.at("state", default: "none"))) let groups = pulls.map(pull => pull.at("state", default: none)).dedup() for group in groups.filter(group => group != none) { [ #for pull in pulls.filter(pull => pull.at("state", default: none) == group) { [ #gh-pull-short( pull, full: metadata.oss-contribs.find(((url,)) => url == pull.url).at("full", default: false), ) ] } \ ] } } ] ] #align(center)[ #text(size: 10pt, fill: palette.foreground2)[ 상기 이력은 #datetime.today().display("[year]년 [month]월 [day]일") 기준입니다 ] ]
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/text/lorem-00.typ
typst
Other
// Test basic call. #lorem(19)
https://github.com/Otto-AA/definitely-not-tuw-thesis
https://raw.githubusercontent.com/Otto-AA/definitely-not-tuw-thesis/main/CONTRIBUTING.md
markdown
MIT No Attribution
# Contributing Any improvements and fixes are welcome! If you notice any inconsistencies with the official template, please open an issue. ## Development setup This project uses [Just](https://just.systems/man/en/) to have some convenience scripts (similar to `make`). You can view those in the [Justfile](./Justfile) and list them with running `just`. ## Testing We have some visual regression tests using [typst-test](https://github.com/tingerrr/typst-test). They sadly differed between CI and my local setup, so they are not enabled currently. To run them use `just test`. If a test fails, you will have pictures of the current rendering, the new one and an overlay of those two for comparison. If you want to update the images, use `just update`.
https://github.com/polarkac/MTG-Stories
https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/037%20-%20Ravnica%20Allegiance/003_The%20Principles%20of%20Unnatural%20Selection.typ
typst
#import "@local/mtgstory:0.2.0": conf #show: doc => conf( "The Principles of Unnatural Selection", set_name: "Ravnica Allegiance", story_date: datetime(day: 06, month: 02, year: 2019), author: "<NAME>", doc ) "Are you sure he's not dead?" Miko says, poking the elder monk in his forehead. The monk sits as still as a statue, strings of kelp rising from his outstretched fingertips, a small patch of coral polyps clinging to his cheek. Several crabs now call the crevices beneath his crossed legs their home. "No touching, Miko," I say. "Please stop acting like an upworlder and reserve some respect for your elders." "How long has he been sitting like this?" asks Chessa, my star pupil, timidly waving her hand in front of the monk's distant stare. The only things that move are the flecks of plankton floating in the ocean water. "#emph[That] is the riddle. Anyone care to solve it?" I ask. I reverse my fin rotation and drift backward, giving my clutch room to examine the monk from all angles. "Days? Weeks? Years?" I laugh as the students swim about, darting this way and that as they turn over shells in hopes of finding a clue. Young ones, always in such a hurry, like a school of razorfish. My smile fades. I love these students, I do, and I'm proud of all the work they've put in, but doubt grips me hard as I count them, over and over, making sure no one's wandered off. Just eight students this year, not much of a clutch. Last year, there were fourteen, and the year before, twenty-two. And before that, I was turning students away for fear I wouldn't be able to keep an eye on them all. Times are changing, and parents are seeking out clutch tenders who side with the Adaptationists—those using bioengineering and other Upwelling principals that have floated down into our oceans through the Simic zonots. I've tried to avoid the political meanderings of the upworlders as much as I can, clinging to the ways of our ancestors, and training students to protect our waters with the same techniques we've used for millennia. #strong[Zonots] A zonot is a huge sinkhole that leads all the way down to the Ravnican oceans, and each serves as a distinct Simic habitat—sort of an oversized, inverted skyscraper. Each zonot has a different culture, ecosystem, and racial distribution, as well as a speaker. Kaszira sits slumped on the fringes of the class, moping as always. I've seen the way she eyes the students in the other clutches, admiring their lab-elongated fins that increase swimming speeds, their high-end skin adaptations that grant them camouflage rivaling the best ocean mimics, or their enhanced vision that enables them to spot invaders before they infiltrate merfolk territory. Some parents have even supported more advanced modifications such as claws to help defend our waters, but their numbers are still few, thank the gods. "Kaszira," I call. "Come over, you're missing out on the lesson." She doesn't respond. I've tried over and over to connect with Kaszira, but her heart just isn't in it. As I swim closer, her eyes flick toward me, then focus on something in front of her. A zooplankton. A dead one. Kaszira's eyes cross as the tiny carcass settles upon her nose. She looks so much like Utopian Speaker Zegana—a younger, more petulant version of our leader, sure, but the resemblance is uncanny: a showy spray of fins (blue with iridescent bands), a thinness to her body that would be unbecoming on most merfolk, but she wears it well. I'd dare call her regal if she'd bother not to slouch so much. As a second dead plankton settles upon her skin, I realize, she's not moping. She's observing. I stop and tread softly in place, not wanting to disturb her. A third plankton lands upon her, and then she turns to me and says flatly, "Seven weeks. That's how long the elder monk has been meditating." My eyes widen. How could she possibly have seen the clue to the riddle from this far away? The children were meant to measure the width of his fasting bands and compare those to the width of his dorsal bands and come up with forty-seven days. Just shy of seven weeks. "I was going to say that!" says Chessa, swimming over and presenting her rockfish spine as evidence, accurate as any upworlder ruler. "I swear on my grandmother's gills!" "Yeah, I was going to say it, too," Miko mocks. Poor kid hasn't solved a single riddle since he's been under my care, but what he lacks in observation skills, he makes up for in courage. The other students all love his antics and join in, laughing. I hush them. "Kaszira was first with her answer. Please allow her to explain." "In ten minutes, seven dead plankton landed on me. Which works out to forty-two per hour. And with an average zooplankton size of one-eighth of a rockfish spine, it'd take about a thousand of them to make a uniform coating on an undisturbed surface the size of a square-fin. The coating on the monk's skin sits at six layers at its thickest points—his nose, the tips of his cheekbones are the most obvious. That works out to seven weeks. Or just shy of it." She swats the plankton from her face, then the glazed look of boredom settles back onto it. "Very keen observations. We'll make a fine protector out of you yet!" I say, hoping my encouragement will spark empathy in her heart. Oh, how I wish for her to wonder about the enormity of the sacrifice the monk has made in all that time, but none of that comes. I sigh. "Now, the other way you could have determined this—" I reel out my own fish spine, a bronzed antique with pearls set along the ridge, and get ready to show the class the traditional way to solve the riddle. "Woah!" Miko says, jaw dropped. It takes a moment for me to realize he's not marveling at my ruler but at <NAME>, senior clutch tender and bane of my existence. I'd heard the rumors, but I couldn't believe he'd actually gone and done it. His entire torso looks to be crustacean in origin, now, rust red and bumpy all over, his arms ending in enormous pincers. "Fine current you've got here, Medge," he says to me, flexing his new body parts and fishing for a compliment. #figure(image("003_The Principles of Unnatural Selection/01.jpg", width: 100%), caption: [Growth-Chamber Guardian | Art by: <NAME>], supplement: none, numbering: none) "It was until you decided to swim into it," I mumble downstream, then wrench a smile onto my face. "Greetings, Ptero. You're looking dapper. Don't tell me~trimmed your fins?" He laughs as his clutch swims up behind him. Twenty-seven of them. And twenty-seven million years' worth of Utopian evolution between them if you added up all the Simic modifications. "Still teaching about fasting bands? I thought they'd nixed that from the curriculum centuries ago." "It still has relevance for those who wish to take the time to learn it," I grumble. "Well, I'm sure you have plenty of time to spend one-on-one with your pupils. A blessing from the old sea gods, for sure! I've got my claws completely full now that my class is absolutely brimming with students. Only sixty-two protectorate positions are open this year, and twenty-four of mine are sure bets—masters of empathy, courage, and observation. But now that I've gotten a good look at the competition, I'm thinking I can place #emph[all] of them." "Sixty-two spots?" I bristle, my fins flexing in agitation. "I thought there were eighty." "The Guardian Project has decided to send in some reinforcements. They're taking eighteen spots. Frog mutants, I think. They're breeding them like guppies these days!" "But this isn't a Simic issue. It's a merfolk issue!" They know next to nothing about the ways of the deep. "Vannifar's orders." Ptero shrugs. "See you at the winners' shell." He snaps a pincer at me, coming inches from nipping a fin. "That is, if any of yours show well enough." And with a whip of those muscular, lab-enhanced thighs, he and his clutch barrel off into the currents. Sixty-two spots. There are a hundred and seventy pupils between all the clutch tenders. Odds were, I'd sneak in at least a few, but with eighteen fewer spots, those are no longer a guarantee. If none of my students have a good showing, I might as well toss my shells to the surface right now. I'd be washed up. Done for. No one would ever entrust me with their children again. But I can't let my negativity rub off on my students. I need to instill all the confidence in them that I can. Ptero was right about one thing, I have the advantage of a small clutch size, and I intend to use it. I'd never dare to take twenty students outside the protection of merfolk territory. Or even fifteen. But eight is few enough that I can keep a close eye on all of them, and the experience of the open ocean will be worth it. To see an elusive krasis in action, to watch how they move. To see them attack. It could give my students a fin up during their showings. "Come on," I say, taking off so quickly that sand kicks up from the sea bed. The students raise their brows in curiosity. "Stick with me. Stay close!" We'll be safe, watching from afar. I wouldn't dare put any of them in actual danger. We swim deeper and deeper into the depths, following a forest of golden kelp stretched down below us, the resplendent coral castles of our home now looking like mere sandcastles upon the horizon. An amphibious beast lumbers, the resplendent benthid, one of the leviathans of the deep that call our sea caves home. We pass under it, Miko running his fingertips upon the beast's soft, slime-coated underbelly. The resplendent benthid has spent thousands and thousands of years with no natural predators. But now the unnatural kind lurk about~ Finally, we see the elusive krasis—an abomination of nature: claws, scales, and deadly spines running down its serpentine tail. The biomancers who'd created them had underestimated how tenacious they were, how smart they were, escaping their enclosures and finding their freedom in our oceans. They say they're fish mutants, but I've never seen a fish with a neck so thick and taut, or with a head baring the stare of an empty skull. I command the students to take cover in the kelp forest, and I press them further and further back as the krasis gets closer and closer to us. Suddenly, we're pinned up against the hull of an old wreck, waterlogged wood creaking at the slightest touch. We hold still. This is where we will make our observations, and pride swells within me as my students cross arms without my instruction, one of the first lessons I'd taught them, so long ago it seems. This technique conceals where one student ends and the next begins, fins overlapping, making us blend in with our surroundings. Then we watch, my heart pounding as the elusive krasis sets its sights on the resplendent benthid we'd just passed. Poor beast doesn't even know what hits him. The benthid manages to puff up in defense, vocal bubble shining bright red—the perfect target. A stinger erupts from the tip of the krasis's tail and strikes a killing blow on the beast's soft underbelly. Then the krasis gorges, clawed hands shoving flesh into its skeletal maw until it's had its fill. The bloodied water attracts other krasis, and they feast as well, and just when the carcass is picked clean minus a few undesirable bits, they spot another amphibious beast in the distance, and swim off after it. "That is what we're up against," I say. "We can protect the creatures that live within our territory, but out here, we haven't the resources, the time, to protect them all." "We could have tried to do something," Miko says, face drawn in horror. "Fluxmagic or some sort of fathom spell~" "Not out here. The danger is too great. You never know what is wading right around the—" "Passss~" rasps a voice from behind us—a dry, brittle, and eerie voice, like nothing that belongs in the sea. "Passs~" The class looks at Miko, expecting it's another of his practical jokes, but he shrugs, and says, "Wasn't me. I think it came from inside the wreck." He rubs his hands against the bow. Not Simic, from what I can tell, but definitely from the upworlds of Ravnica. "We should investigate," he says, one foot already stepping through the giant hole in the hull. "It's too dangerous," I say, pulling him back. "We'll report it to the Guardian Project and let them deal with it." "But the voice. What if they're hurt?" Kaszira says. "Passage~" rasps the voice again. I grimace. We are protectors of life. I've asked the students to set aside their call to action once today. To do it twice, and for the life of a living, breathing person, would all but negate the bravery I've worked so hard to instill upon them. "I will look inside the wreck. I'll need two volunteers to come with me." I look toward Chessa, my best student, but she looks away. I could really use her keen observation, but when her courage falters, so do her other skills. Miko thrusts his hand up, though. No big surprise. I've poured so much of my energy into trying to mold that kid into a proper protector, but his mind's got the constitution of seabed sand. Still, his heart is in the right place, and he never backs down from a fight~something that could come in handy should the situation take a turn for the worst. Kaszira's hand shoots up as well, which catches me by surprise. I've never seen her so interested in anything before, but then again, I'd never seen her around an ancient wreck from an exotic world. She wouldn't be my first choice. My second, or my third, but I consider her as well. I'm not proud to admit it, but ever since I learned she was Speaker Zegana's niece, I've kept a close eye on her, finding ways to encourage her natural strengths. She's a longshot for the winners' shell, but if she does well enough, perhaps the judges would be swayed by her lineage and pass her through. "I choose Kaszira," I say. "And Miko." Chessa looks relieved, but I don't let her off the hook and assign her to lead the watch group while we are inside. So Kaszira, Miko, and I swim into the mouth of the breached hull, into darkness, the iridescence of our fins bathing the surfaces with deathly quiet light. There's a small hold packed with storage barrels, and between them, various sea creatures have found suitable dens. A ladder missing most of its rungs leads up to the deck. We push at the hatch door and instead of opening, it crumbles into splinters. "Eighty years," Kaszira whispers, rubbing a finger through a thick coating of zooplankton skeletons upon the toppled beam that once held the ship's sails. "That's how long this wreck's been down here." "But the zonots haven't been open to the ocean that long," I say. "Yeah, check your calculations," Miko says, jabbing Kaszira in the ribs. "There's nothing wrong with my calculations," she says, punching him back. "Students. Concentrate." Miko looks around the deck, searching for something to break, but then stops dead right next to the helm. "Do you see that?" Barnacles cling to the ship's deck, the only thing that's keeping it from falling apart completely. But there's one area where the barnacles grow in an odd circular pattern. Miko presses his hand through the brittle wood. It gives, and when he pulls his hands back out, he's holding a golden medallion as big as his face, bearing the symbol of a bloated, eight-legged starfish. "This has got to be worth some shells!" he says, yanking at the rope it's attached to, but the knots hold firm. #figure(image("003_The Principles of Unnatural Selection/02.jpg", width: 100%), caption: [Treasure | Art by: <NAME>hm], supplement: none, numbering: none) "Passage~" the raspy voice says again. "Who's there?" I say, gathering the students toward me. "Show yourself!" I expect some merfolk prankster to swim out. I wouldn't put it above Ptero to orchestrate such an awful ruse, but when a figure emerges from the mangled collection of waterlogged wood that once was the stern of the ship, chills ravage me through and through. It's a portly figure, well-built for the near-freezing temperatures of these deep waters, clad in a simple gray frock. It has no discernable fins. Human, perhaps. I've seen a few such curious creatures, but they all either had genetic modifications or clunky apparatus to breathe beneath the water. They were awkward swimmers and moved with the grace of a drunken seahorse. This human doesn't move like that, though. Doesn't move much at all, in fact, but it's steadily coming nearer. A school of spooked angel barbs makes an escape from our intrusion, barreling right through the human's body, which now seems less substantial than jellyflesh. Miko lets loose a bloodcurdling battle cry, then drops the medallion and throws his hands up in defense against the human. "What is that? Some kind of ooze mutant?" he yells. The human locks eyes with Miko. "Passage~" it moans. "It's a ghost," Kaszira says, approaching the human like it's a wild ray. "It can't hurt us. I think." "What do you want from us?" I demand. "Hooome," he says, the word drawn out like his mouth has forgotten how to speak. "Passage home." "You're from the upworld?" Kaszira says excitedly. "The drylands?" The ghost nods. "Ship wrecked. Crew drowned. Except me." He passes a hand through his midsection. "Already dead." "Poor thing," Kaszira says, swimming too close to the ghost for my comfort. "Get back, Kaszira," says Miko, fists still drawn. "He's scared, can't you see? Down here, all alone for years and years." Kaszira sits upon a trunk, next to the ghost. "Just give him a moment to warm up. To remember what it's like to be with other people." Kaszira is showing empathy. And Miko is using his observation skills for a change. I know I should be thinking about getting my class as far away from this wreck as I can, but if exposure to this odd predicament is enough to shock these two into improving their weakest areas, they might #emph[both] have a chance at a good showing. It truly seems as if this ghost has no dominion over ocean life, and as such, doesn't seem to be an immediate threat. "What manner of beast are ye?" the ghost asks, each of the dry words slow and grating against my ears. "Haven't seen nothing like ye in all of Ravnica." "Merfolk, sir," I say. "Merfolk, eh? Ah, beasts of the water, then." He clears his throat several times, but it doesn't resolve the rasp. "I am seaman Andrik, most unfortunate sole survivor of the #emph[Heckless] . Ship was sunk by pirates, long ago. Sat at the bottom of the river for decades, my only pleasure watching the glimmer of sunlight reaching through the blue. Sad sight it was, but I didn't know how good I had it, until the riverbed started sinking, sinking. Swallowed the whole ship and me with it, down a hole and into this here dark and wretched hellscape. I'd given up on seeing a living soul some time ago." #figure(image("003_The Principles of Unnatural Selection/03.jpg", width: 100%), caption: [Island | Art by: Eytan Zana], supplement: none, numbering: none) "He fell through one of the zonots," Kaszira says, almost a whisper. "Sinkholes big enough to swallow up a vessel! Imagine!" I'm imagining~something quite improper. I imagine the confidence my students would gain by helping this poor ghost get back home, and the experience they'd pick up going on a real-world adventure. "We will help you with passage home," I say, "On the condition that you obey our commands. The sea is dangerous, and I won't have you putting me or my students at risk." "And on the condition we can keep this!" Miko raises up the medallion again, eyeing the rope tether impishly. If there's anyone who can break it loose, it's him. The ghost shakes his head. "I'd like you to have that. I really would, but I fear it be a family trinket of no real value. Fake gold. But on your other condition, I do consent. All commands will be obeyed, if only you can reunite me and my ship with the bright blue sky." "Aww," Miko says, laying the medallion back down. "Why don't you just swim back up to the surface? It's not really that far." "My conscience is too heavy, I fear. The remains of the crew are still aboard this ship, and I wouldn't feel fit making that journey without them. It's my sole wish that their families can see them to a proper burial." When I present the idea to the other students, half are excited about it, the other half, less so. "It will be dangerous," I say. "But if we hold by the ways of the protectors, we will pass safely. We know that 'empathy' is the first tenet of merfolk protectorates. Who deserves more empathy than a ghost, kept away from his people for nearly a hundred years? And if we show him no mercy, he'll likely sit here for a hundred more." Two more students agree to come, but there is one holdout: Chessa. "The risks are too great, Tender Medge," she says. "How will we get the ship past the krasis?" "We observe. We conceal. And if that goes wrong, we fight," Kaszira says, swirling her index finger in circles, creating a vortex of bubbles. Magic flows toward her, bright blue whirls of fine tendrils. "We know the spells. We're ready to use them. Is it not our calling to protect the creatures that need protecting?" The other students applaud Kaszira, and Chessa's fin colorings fade in embarrassment. "Of course," she says, trying to save face. "Of course." The students set to task, lightening the load of the ship, emptying the cargo hold of barrels. Miko and Kaszira go to lift the trunk on the deck to toss it over, but the ghost steps up to them. "Not that one," he says. "That trunk contains our sacred tomes, the Divine Contract, carved upon six white granite tablets, dutifully bearing of the pontifications of <NAME> himself! We were of a humble lot, sailing the rivers of Ravnica to spread the good word of Orzhova! And for shame, our ship was wrecked by the feckless whims of pirates." Miko drops his end of the trunk, a brow raised. "Okay~" he says, then whispers to me, "I kind of liked him better when he was just moaning." But then Miko gets the bright idea to scavenge the amphibious beast's vocal sac, one of the few bits leftover on the carcass, using it as a balloon to fill the hull with air and make it more buoyant. He putties the punctures with bass tar, and then we wedge the sac inside the boat and start to inflate it. We spend nearly an hour and a half of constant blowing, but the ship starts to shift in the ocean bed. Kaszira and a couple other students conjure the concealment spells, storing them within conch shells so we can use them in an instant on our passage to Zonot Five. It isn't the closest zonot, but I hear they allow all manner of visitors, so hopefully we won't have trouble surfacing there. Five students guide the boat, and I put my best observers at the bow, on the lookout for krasis. When we spot those threatening shadows passing over us, we veer in the opposite direction. The students work so well together, I'm overcome with a sense of calm. They've all claimed a spot in the winners' shell of my heart. As the water clears, the zonot comes into view, a bright cylindrical opening shimmering light from the surface, all rippling in the distance. Flecks of flotsam passing beneath the zonot catch the light. Then I realize that's not flotsam. It's people. And just as the immense scale of the zonot really starts to sink in, Miko yells "Krasis!" Darkness spreads overhead, a massive shadow. Part shark, part crab—all teeth and claws. "Conceal, conceal!" I say. And together, we draw upon the magic stored within the shells and press our hands against the hull of the ship. The old wreck fades into a rippling blue haze, like the water itself. The krasis passes right over us, coming within feet. Miko almost reaches up to touch it. Almost, but I shoot him a stare as cold as the sea depths. That was close. The ghost starts to fidget as we finally near the zonot. I'm nervous, too. I've only heard of them until now, and I denied that anything made by the upworlders could be as beautiful as they claimed it to be, but it is. Luminous green baubles line the sinkhole's walls, as if they were encouraged to grow that way instead of being stacked by hand. Plant life winds itself into the structure, providing natural support as well as natural beauty. The lab space above seems to be a mix of air and aquatics, and above that, thousands of people meander up a vast spiral stairway. "Wasn't all this when I came through the drain," the ghost says, eyeing the Guardians that hold watch at the base of the zonot. A group of three merfolk swim up to the frog mutant guards, surrender some coinage, and after a brief inspection, they are allowed through into the zonot. #figure(image("003_The Principles of Unnatural Selection/04.jpg", width: 100%), caption: [Simic Guildgate | Art by: <NAME>], supplement: none, numbering: none) "If we explain your predicament, I'm sure they'll grant us safe passage," Kaszira says. "Let's turn away," mutters the ghost. "We'll find another way up." "Your wreck has already lost half of its hull planks," Chessa says. "I don't think it'll last much longer. And we haven't any more concealment spells ready." "Chessa's right," I say. "We should proceed through the zonot. We've already risked so much to get here. The Guardians might look aboard, ask our names and occupations, nothing more. There's nothing to fear." "We cannot!" the ghost screams. The trunk rattles on the deck. Eddies of orange magic surge from the cracks in the lid, like the discharge of deep-sea vents. The waters swell against the ship, and its hull moans. The ghost's soft, tattered edges go hard as it absorbs all that magic, and we get a peek at something much more sinister. Something that #emph[definitely] poses a threat. The students stiffen their fins in response, ready for a fight. "Who are you really?" demands Miko. "And what's in that trunk?" He goes to open it, and the ghost shudders again, then completely sheds his humble visage. Gone is the tattered frock, and he now stands clad in layers upon layers of opulent robes with several necklaces that appear to be made of golden sand dollars. He glows around the edges, giving him a hostile aura. Waters whip around us, swirling like a riptide. Miko gets a good grip on the trunk and won't let go. The worn lid finally gives out, and the waters turn tornadic, a storm within the sea. Miko bangs his head on the debris and goes still, drifting off. I gasp and give up my hold on the boat to swim after him. The visibility is poor through the turbulence: bubbles and debris everywhere, but I've never lost a student, and I don't intend to lose one now. I find him, pull him close, and then fight my way back. The contents of the chest fly around us, definitely not sacred tomes, but treasure of some kind. Old and beautiful, each piece bears the brand of the Simic Combine. "You stole these," I say. "Your crew wasn't pontificating! You were pirating!" "He could have returned to the upworld decades ago," Kaszira says, "but he was too greedy to leave his treasure behind." "Or maybe he didn't leave because he #emph[couldn't] leave," says Chessa, frantically scraping the barnacles off the medallion. A vortex opens right next to her, trying to rip it from her grip. She holds tight, squinting at the writing. "It's some kind of contractual signet. It's bound to the boat, so he is, too." The water stills. "Put that down, fish," the ghost says, eyes enflamed. Chessa stares back at him, wedges a foot against the deck, then she and Kaszira pull until the entire plank anchoring the medallion's rope comes up. "I'd toss this overboard, but you don't deserve the honor of resting upon the ocean floor," Chessa says. I don't know what's gotten into her, but she swims off with the medallion, toward the krasis we'd just escaped. The ghost is tugged after her, tethered by old yet resilient magic. "Chessa!" I yell out. "Chessa." But she's so strong. So fast, so courageous, I'd never be able to catch up. "Let her go," Miko whispers, just barely conscious. "She needs this." Chessa's reached the krasis, all those sharp teeth gleaming. She teases it, looking like a suitable snack, then when the beast opens its maw, Chessa ducks out of the way and chucks the medallion inside. "Let's see how you like your new master," she says to the ghost as the beast's jaws snap, and the medallion finds its way down the gullet. The krasis's attention turns to the curiosity that is the ghost as Chessa swims away. Teeth gnash futilely through the apparition as he disappears inside the krasis, dragged behind the medallion. The wreck is obliterated, but the class gathers up the Simic artifacts in their arms. We swim up to the zonot and explain to the guardians what had happened. One of the guardians takes interest in a dark piece of iridescent metal with a filigree of shells and claws. She calls over her boss. The elf mage looks down, and his eyes go wide as he carefully picks it up. His mouth gropes for words before he finally says, "If I'm not mistaken, this is <NAME>'s keyrune. Stolen nearly a century ago. There's a popular exhibit about it here in the zonot. Come. I shall bring you to the museum curator, so you can tell your tale." #figure(image("003_The Principles of Unnatural Selection/05.jpg", width: 100%), caption: [Simic Keyrune | Art by: <NAME>], supplement: none, numbering: none) We step out of the water at the base of the zonot, so eager and so curious that even the sudden weight of the upworld pressing upon our bones doesn't slow us down. After traveling a few flights up, though, I wish I'd spent more time in the sea caves, acclimating my body to breathing air. I'm left feeling dizzy, trying to take in all the inventions created by the Simic. My people. Seeing the fruits of the Upwelling, maybe I understand it a little better now. We're fed and allowed to groom our fins in wading pools as the museum staff scurries about in preparation for the installation. Finally, the curator greets us, and we're given a tour of the museum. Many eras of Simic history are laid out before us. There's an entire section dedicated to the intellectual pursuits of Momir Vig, and the curator directs us to the crowning piece: the last cytoplast Vig had created before his downfall. It stands on a pedestal, surrounded by three guardians. The amorphous blob gleefully undulates under the spotlights, in an eternal wait to connect with some poor soul and manipulate their genetics. I shudder, then turn my attention toward the new exhibit to be unveiled~#emph[our discovery] ~in front of a throng of museum visitors. "Today is a very important day for us," the curator intones. "One of our lost treasures has been returned to us, and it is my honor to display it here, where it can be observed for centuries to come!" After a lengthy monologue, nearly everyone is peering over each other's shoulders, angling to get their first look at the keyrune used by Momir Vig. They've polished it, and it now shines so brightly, it almost hurts to look at it. Even the guardians watching over the cytoplast have turned their attention to witness such a monumental event. I notice Kaszira has stopped paying attention and is peering through the bubble glass of the next exhibit: a functional lab, with creatures floating in thick gel. Even through the distorted glass, I see them growing, changing, mutating. A shell starts to harden upon soft, young flesh. "If you want to undergo genetic modification," I say to Kaszira. "Then I will appeal to your parents to have you begin treatments. Upon completion, you can retrain for the protectorate under Ptero. Your parents will be sad that you didn't adhere to their Utopian ideas, but if this is where your heart leads you, then you should follow it." Kaszira shakes her head. "I don't want a mutation. And I've always seen the value of Utopian living, but it's not sustainable~not with the krasis out there. Perhaps, if I study up here in the zonots, I could figure out how we could change the fight. Maybe we could give resplendent benthids stronger skin or the ability to camouflage. We can give them a chance to survive. We can keep our Utopian ways and balance them with the ways of the Adaptationists." She makes so much sense. Her empathy showing will be unrivaled. And for the first time, I see Kaszira as her own person, not just the niece of a high-ranking official, who will one day make a great splash of her own. "You will make the Combine proud. You already make me proud." I'm proud of all my students: Chessa, Dimas, Laszlo, Saganderis, Fania, Zyanek, and~Miko? Where has he run off to? I spot him, his finger inches away from touching the now unguarded cytoplast. "Miko!" I yell. "Stop this instant!" He withdraws his finger and turns toward me, but one of his fins clips the pedestal, and the cytoplast tumbles from its perch. The amorphous blob of living cells falls right toward me. I try to move, but out here, in this oppressive, heavy air, my limbs fail me, and I don't have time to get out of the way. The cytoplast smacks me in the chest. The next moment, I feel an oozy substance crawling over my skin, sinking inside of me, seeking out pieces of me to mutate. Pressure builds. I feel eight of my fins thickening, lengthening. Suckerflesh erupts from their undersides, and upon their tips, bulbous mounds of flesh press out, then blink open. Light slips into my mind, accompanied by more images than I can process. I'm seeing the room from various angles, everything around me caught in my sight. Those things at my tentacle tips, they're eyes. Guardians approach, but before they can apprehend me, my class reacts to the threat, and encircle me, a formidable shield of courage and observation, and if this is our fight, we are ready to make a stand. "Step aside," the guardian says to my students. "This merwoman has destroyed a priceless artifact." "And we've given you a new one. We're even. Let us pass," I demand. "Or else." "Or else, what?" the guardian asks, amused at the threat. I can say that I am not proud of what happens next. I blame it on the confusion of mutating so rapidly, of this too-dry air making my thoughts dizzy. A fight ensues, and after a round of broken fins and broken egos, my class and I are tossed into the zonot's brig. Perhaps it is a good thing knowing that our Combine is not fragile enough to be taken down by a group a preteen merfolk and their newly tentacled teacher. Finally, after hours of silence, the door to the brig opens, and in swims Utopian Speaker Zegana. In person, she is more regal than I'd ever imagined. "Kaszira, my niece," she says. "I have spoken with your parents, and they have expressed extreme displeasure with your actions here." "I am sorry, Aunty." Kaszira slumps forward, fins running limply down her back. "I'm afraid 'sorry' isn't going to undo the damage you have wrought," Zegana says. "You and your friends are to return to merfolk territory immediately and are hereby banned from the zonots." My heart sears. Kaszira will no longer be able pursue her education in here. She'll no longer be able to achieve her dream. I cannot let this pass. "If anyone is to be banned from the zonots, my dear speaker, please let it be me. I have put these children in danger, but it is their only desire to serve the Simic Combine in the best way they know how. Chessa here, has overcome her fears and displayed an extreme act of courage. Miko has proven his observation skills. And Kaszira has not only shown empathy, she's found her calling. She wants to study in the zonots and take her knowledge back to the oceans, where she can help strengthen us all." "Is this true?" Zegana asks of Kaszira. Kaszira nods, now standing tall, straight as a rockfish spine. "It is, Aunty. More than anything." #figure(image("003_The Principles of Unnatural Selection/06.jpg", width: 100%), caption: [Zegana, Utopian Speaker | Art by: <NAME>], supplement: none, numbering: none) Zegana turns and leaves us without another word. Several minutes later, we're being led out of the cell, then out of the zonot, back into the ocean. I never thought I'd be so excited to see the seabed again. On the day of the showing, Ptero's crab claws quiver as he sees my class, their confidence like an aura around them. And sure enough, each of my students secures a place in the protectorate, except two. Miko~because, well, he's Miko, but he's come along further than any of my expectations, and I'll gladly have him repeat with me next year. Kaszira also doesn't claim a space, either. She doesn't appear for her showing at all, in fact. But as my students take their spots upon the pearlescent center stage that is the winners' shell, I see her in the audience~dressed in Simic biomancer robes. She's done it. She's going to the zonots to study. I breathe a sigh of relief. "Thanks for giving me the kick in the fin I needed," I say to Ptero as he passes. Nearly half of his clutch had placed. Not as good as he'd expected, but the competition overall was the best I'd seen in years. "No need to mock me," he grumbles, eyeing my tentacles with a palpable jealously. I know I was against adaptations, but this one suits me quite nicely. I can see everything, all around me. There's no way a student will escape my sight now. "I'm not mocking you. I've avoided Adaptationist ideas. You've cast off the Utopian ones. We both embraced the extremes, when we should have taken the time to learn from one another, and to find a place in the middle." He looks up at me, surprised I haven't taken this opportunity to shame him like he's done me so many times. "Maybe next year we can team up," he says, "tend our clutches together. We can assure we're sending the best and brightest out to be our ocean's protectors." Next year. I really like the sound of that, and I have a feeling, it's going to be the best clutch ever.
https://github.com/eratio08/learn-typst
https://raw.githubusercontent.com/eratio08/learn-typst/main/tutorial.typ
typst
= Introduction In this report, we will explore the various factors that influence fluid dynamics in glaciers and how they contribute to the formation and behaviour of these natural structures. == Emphasis _This is a test_ == Lists + Test - Test - Test + Test + Test == Figures #figure(image("placeholder.png", width: 20%), caption: [ Placeholder image Test ]) <placeholder> == References Testing some reference @placeholder and @wwdc-network. == Bib Using Hayagriva or BibLateX. #bibliography("basic.yml") == Math The equation $Q = rho A c + C$ defines the glacial flow rate. Inlined equation. $ Q = rho A c + C $ === Sub & Superscript Total displace solid glacial flow: $ 7.32 beta + sum_(i=0)^nabla Q_i / 2 $ $ 7.32 beta + sum_(i=0)^nabla (Q_i (a_i - epsilon)) / 2 $ $ v := vec(x_1, x_2, x_3) $ $ a arrow.squiggly b $ == Code Blocks ```rust fn main() { println!("Hello World!"); } ``` == Set Rules Don't do this ```typst #par(justify: true)[ = Background In the case of glaciers, fluid dynamics principles can be used to understand how the movement and behaviour of the ice is influenced by factors such as temperature, pressure, and the presence of other fluids (such as water). ] ``` But rather ```typst #set par(justify: true) ``` #set text(font: "New Computer Modern", size: 10pt) #set page( paper: "a6", margin: (x: 1.8cm, y: 1.5cm), ) #set par( justify: true, leading: 0.52em, ) = Background In the case of glaciers, fluid dynamics principles can be used to understand how the movement and behaviour of the ice is influenced by factors such as temperature, pressure, and the presence of other fluids (such as water). #set page( paper: "a4", ) = Heading numbering #set heading(numbering: "1.") = Introduction #lorem(10) == Background #lorem(12) == Methods #lorem(15) #set heading(numbering: "1.a") = Introduction #lorem(10) == Background #lorem(12) == Methods #lorem(15) = Show Rule Redefine how typst display certain elements using `show`. ```typst #show "ArtosFlow": name => box[ #box(image( "placeholder.png", height: 0.7em, )) #name ] ``` #show "ArtosFlow": name => box[ #box(image( "placeholder.png", height: 0.7em, )) #name ] This report is embedded in the ArtosFlow project. ArtosFlow is a project of the Artos Institute. = Advanced Styling #let title = [ A fluid dynamic model for glacier flow ] #set page( paper: "us-letter", header: align(right + horizon, title), numbering: "(1/1)", ) #set par(justify: true) #set text( font: "Linux Libertine", size: 11pt, ) #align(center, text(17pt)[ *#title* ]) #grid( columns: (1fr, 1fr), align(center)[ <NAME> \ Artos Institute \ #link("mailto:<EMAIL>") ], align(center)[ Dr. <NAME> \ Artos Institute \ #link("mailto:<EMAIL>") ] ) #align(center)[ #set par(justify: false) *Abstract* \ #lorem(80) ] #show: rest => columns(2, rest) #show heading: it => [ #set heading(numbering: none) #set align(center) #set text(12pt, weight: "regular") #block(smallcaps(it.body)) ] #show heading.where(level: 1): it => block(width: 100%)[ #set align(center) #set text(12pt, weight: "regular") #smallcaps(it.body) ] #show heading.where(level: 2): it => text( size: 11pt, weight: "regular", style: "italic", it.body + [.], ) = Introduction #lorem(20) == Morivation #lorem(300) = Related Work #lorem(200) = LaTeX Look ```typst #set page(margin: 1.75in) #set par(leading: 0.55em, first-line-indent: 1.8em, justify: true) #set text(font: "New Computer Modern") #show raw: set text(font: "New Computer Modern Mono") #show par: set block(spacing: 0.55em) #show heading: set block(above: 1.4em, below: 1em) ```
https://github.com/maxgraw/bachelor
https://raw.githubusercontent.com/maxgraw/bachelor/main/apps/document/src/5-implementation/hosting.typ
typst
Um die Anwendung zu validieren und die Evaluation durchzuführen, musste sie auf einer Website veröffentlicht werden. Hierfür wurde das JavaScript-Framework SvelteKit verwendet @svelte-kit. Die Anwendung wurde unter der URL #link("https://ar.maxgraw.com") veröffentlicht. Durch die Erstellung der Komponenten als Web Components konnte die Anwendung einfach in das SvelteKit-Projekt importiert und genutzt werden.
https://github.com/An-314/Notes-of-DSA
https://raw.githubusercontent.com/An-314/Notes-of-DSA/main/binary_search_tree.typ
typst
= 二叉搜索树 BST(Binary Search Tree) 各数据项依所持关键码而彼此区分,循关键码访问:call-by-KEY。关键码之间必须同时支持比较(大小)与比对(相等)。数据集中的数据项,统一地表示和实现为词条(entry)形式。 词条 ```cpp template <typename K, typename V> struct Entry { //词条模板类 K key; V value; //关键码、数值 Entry( K k = K(), V v = V() ) : key(k), value(v) {}; //默认构造函数 Entry( Entry<K, V> const & e ) : key(e.key), value(e.value) {}; //克隆 // 比较器、判等器(从此,不必严格区分词条及其对应的关键码) bool operator< ( Entry<K, V> const & e ) { return key < e.key; } //小于 bool operator> ( Entry<K, V> const & e ) { return key > e.key; } //大于 bool operator==( Entry<K, V> const & e ) { return key == e.key; } //等于 bool operator!=( Entry<K, V> const & e ) { return key != e.key; } //不等 }; ``` == 顺序性——BST的中序遍历 BST的存储需要保证: - 任一节点均不小/大于其左/右*后代* 与 任一节点均不小于/不大于其左/右*孩子*并不等效 三位一体:节点 $~$ 词条 $~$ 关键码 #figure( image("fig\BST\1.png",width: 80%), caption:"BST的顺序性" ) 顺序性虽只是对局部特征的刻画,却可导出BST的整体特征:*BST的中序遍历序列,必然单调非降*。 BST留出这样的接口: ```cpp template <typename T> class BST : public BinTree<T> { public: //以virtual修饰,以便派生类重写 virtual BinNodePosi<T> & search( const T & ); //查找 virtual BinNodePosi<T> insert( const T & ); //插入 virtual bool remove( const T & ); //删除 protected: BinNodePosi<T> _hot; //命中节点的父亲 BinNodePosi<T> connect34( //3+4重构 BinNodePosi<T>, BinNodePosi<T>, BinNodePosi<T>, BinNodePosi<T>, BinNodePosi<T>, BinNodePosi<T>, BinNodePosi<T> ); BinNodePosi<T> rotateAt( BinNodePosi<T> ); //旋转调整 }; ``` == BST的基本算法与实现 === 查找`search()` 从根节点出发,逐步地缩小查找范围,直到发现目标(成功), 或抵达空树(失败)。本质上讲,就是有序向量的二分查找。 ```cpp template <typename T> BinNodePosi<T> & BST<T>::search( const T & e ) { if ( !_root || e == _root->data ) //空树,或恰在树根命中 { _hot = NULL; return _root; } for ( _hot = _root; ; ) { //否则,自顶而下 BinNodePosi<T> & v = ( e < _hot->data ) ? _hot->lc : _hot->rc; //深入一层 if ( !v || e == v->data ) return v; _hot = v; //一旦命中或抵达叶子,随即返回 } //返回目标节点位置的引用,以便后续插入、删除操作 } //无论命中或失败, _hot均指向v之父亲(v是根时, hot为NULL) ``` 复杂度是$O(h)$,其中$h$是BST的高度。若BST退化为链,则复杂度退化为$O(n)$。 === 插入`insert()` 先借助`search(e)`确定插入位置及方向。 若`e`尚不存在, 则再将新节点作为叶子插入 - `_hot`为新节点的父亲 - `v = search(e)`为`_hot`对新孩子的引用 令`_hot`通过`v`指向新节点 ```cpp template <typename T> BinNodePosi<T> BST<T>::insert( const T & e ) { BinNodePosi<T> & x = search( e ); //查找目标(留意_hot的设置) if ( ! x ) { //既禁止雷同元素,故仅在查找失败时才实施插入操作 x = new BinNode<T>( e, _hot ); //在x处创建新节点,以_hot为父亲 _size++; updateHeightAbove( x ); //更新全树规模,更新x及其历代祖先的高度 } return x; //无论e是否存在于原树中,至此总有x->data == e } //验证:对于首个节点插入之类的边界情况,均可正确处置 ``` 时间主要消耗耗于`search(e)`和`updateHeightAbove(x)`,均为$O(h)$,其中$h$是BST的高度。 === 删除`remove()` ```cpp template <typename T> bool BST<T>::remove( const T & e ) { BinNodePosi<T> & x = search( e ); //定位目标节点 if ( !x ) return false; //确认目标存在(此时_hot为x的父亲) removeAt( x, _hot ); _size--; //分两大类情况实施删除 _size--; updateHeightAbove( _hot ); //更新全树规模,更新_hot及其历代祖先的高度 return true; } //删除成功与否,由返回值指示 ``` 这样,时间主要消耗于`search(e)`和`updateHeightAbove(x)`,后面证明`removeAt(x, _hot)`也为$O(h)$,其中$h$是BST的高度。 删除将分为两种情况: 1. 单分支 该节点只有一个孩子,直接将其孩子接入其父亲即可。 ```cpp template <typename T> static BinNodePosi<T> removeAt( BinNodePosi<T> & x, BinNodePosi<T> & hot ) { BinNodePosi<T> w = x; //实际被摘除的节点,初值同x BinNodePosi<T> succ = NULL; //实际被删除节点的接替者 if ( ! HasLChild( *x ) ) succ = x = x->rc; //左子树为空 else if ( ! HasRChild( *x ) ) succ = x = x->lc; //右子树为空 else { /* ...左、右子树并存的情况,略微复杂些... */ } hot = w->parent; //记录实际被删除节点的父亲 if ( succ ) succ->parent = hot; //将被删除节点的接替者与hot相联 release( w->data ); release( w ); return succ; //释放被摘除节点,返回接替者 } //此类情况仅需O(1)时间 ``` 2. 双分支 该节点有两个孩子,需要找到其直接后继(或直接前驱)节点,将其值替换到该节点,然后删除直接后继(或直接前驱)节点。由于直接后继一定没有左儿子,从而转化为单分支情况。 ```cpp template <typename T> static BinNodePosi<T> removeAt( BinNodePosi<T> & x, BinNodePosi<T> & hot ) { /* ...... */ else { //若x的左、右子树并存,则 w = w->succ(); swap( x->data, w->data ); //令*x与其后继*w互换数据 BinNodePosi<T> u = w->parent; //原问题即转化为,摘除非二度的节点w ( u == x ? u->rc : u->lc ) = succ = w->rc; //兼顾特殊情况: u可能就是x } /* ...... */ } //时间主要消耗于succ(),正比于x的高度——更精确地, search()与succ()总共不过O(h) ``` == 平衡二叉搜索树BBST === 平衡 若不能有效地控制树高,就无法体现出BST相对于向量、列表等数据结构的明显优势,比如在最(较)坏情况下,二叉搜索树可能彻底地(接近地) 退化为列表,此时的性能不仅没有提高,而且因为结构更为复杂,反而会(在常系数意义上)下降。 用两种统计学口径分析平衡性: 1. 随机生成:将$n$个词条${e_i}$随机插入一棵空树按随机排列$sigma = (i_1, i_2, ..., i_n)$,得到一棵随机生成的BST$T$,其高度$h_T$是一个随机变量,假定所有BST等概率地出现,其期望值为$E(h_T) = O(log n)$ 2. 随即组成:将一样的拓扑结构视作一类,随机生成的BST$T$的高度$h_T$是一个随机变量,假定所有BST等概率地出现,其期望值为$E(h_T) = O(sqrt(n))$ $n$个节点组成的BST的个数为$S(n)$则 $ S(n) = sum_(i=1)^(n)S(i-1)S(n-i) = "catalan"(n) = (2n)!/(n!(n+1)!) $ 理想随机在实际中绝难出现:局部性、关联性、(分段)单调性、(近似)周期性、 ...较高甚至极高的BST频繁出现;平衡化处理很有必要。 由$n$个节点组成的二叉树,高度不致低于$floor(log_2(n+1))$ 。达到这一下界时,称作*理想平衡*。 而*渐近平衡*在渐近的意义下,高度不致超过$O(log n)$。满足这样的BST称为*平衡二叉树*(Balanced Binary Search Tree,BBST)。 === 平衡等价变换 #figure( image("fig\BST\3.png",width: 70%), caption:"等价BST" ) 限制条件 + 局部性: 各种BBST都可视作BST的某一子集,相应地满足精心设计的限制条件 - 单次动态修改操作后,至多$O(log n)$处局部不再满足限制条件(可能相继违反,未必同时) - 可在$O(log n)$时间内,使这些局部(以至全树)重新满足 等价变换 + 旋转调整: *序齿不序爵* 刚刚失衡的BST,必可速转换为一棵等价的BBST。 #figure( image("fig\BST\4.png",width: 80%), caption:"等价变换" ) `zig`和`zag`:仅涉及常数个节点,只需调整其间的联接关系;均属于局部的基本操作。调整之后: `v`/`c`深度加/减1,子(全)树高度的变化幅度,上下差异不超过1。 实际上,经过不超过$O(n)$次旋转,等价的BST均可相互转化。 == AVL树 <NAME> & <NAME> (1962) 提出的平衡二叉搜索树,以其发明者的名字命名。 === AVL树的定义 AVL的核心是:*平衡因子*(Balance Factor,BF)。 $ "BF"(v) = "height"(v->l c) - "height"(v->r c) $ AVL在每次操作后要进行维护,保证: $ forall v in T, |"BF"(v)| <= 1 $ AVL树未必理想平衡,但必然渐近平衡。 ==== AVL渐近平衡 对于固定高度$h$的AVL树,其最少节点数$S(h)$满足递推关系: $ S(h) = S(h-1) + S(h-2) + 1 $ 从而$S(h)="fib"(h+3)-1$,从而对于$n$个节点构成的AVL树,其高度不会超过$O(log n)$。 ==== Fibonacci Tree 高度为$h$,规模恰好为$S(h)$的AVL树,称为*Fibonacci树*(Fibonacci Tree)。 是最“瘦”的、临界的AVL树。 ==== AVL接口 ```cpp #define Balanced(x) ( stature( (x).lc ) == stature( (x).rc ) ) //理想平衡 #define BalFac(x) (stature( (x).lc ) - stature( (x).rc ) ) //平衡因子 #define AvlBalanced(x) ( ( -2 < BalFac(x) ) && (BalFac(x) < 2 ) ) //AVL平衡条件 template <typename T> class AVL : public BST<T> { //由BST派生 public: //BST::search()等接口,可直接沿用 BinNodePosi<T> insert( const T & ); //插入(重写) bool remove( const T & ); //删除(重写) }; ``` === 重平衡 AVL树的插入和删除操作,都可能导致局部失衡,需要通过旋转调整来重平衡。 #figure( image("fig\BST\5.png",width: 80%), caption:"AVL树的重平衡" ) - 插入:从祖父开始,每个祖先都有可能失衡,且可能同时失衡。 - 删除:从父亲开始,每个祖先都有可能失衡,但至多一个。 利用旋转变换进行重平衡: - 局部性:所有的旋转都在局部进行,每次只需$O(1)$时间 - 快速性:在每一深度只需检查并旋转至多一次,共$O(log n)$次 ==== 插入 插入分为两种情况: *单旋*:黄色方块恰好存在其一 只需要经过一次`zag`或者`zig`,并且旋转后的子树高度不变严格变回插入之前,即可恢复平衡,不需要再向上探;并且该子树的父亲的`BF`不变,不会导致更高层的失衡。 #figure( image("fig\BST\6.png",width: 80%), caption:"AVL树的插入" ) *双旋* 需要经过两次`zag`或者`zig`,并且旋转后的子树高度不变严格变回插入之前,即可恢复平衡,不需要再向上探;并且该子树的父亲的`BF`不变,不会导致更高层的失衡。 #figure( image("fig\BST\7.png",width: 80%), caption:"AVL树的插入" ) 注意:即便g未失衡,高度亦可能增加。 ```cpp template <typename T> BinNodePosi<T> AVL<T>::insert( const T & e ) { BinNodePosi<T> & x = search( e ); if ( x ) return x; //若目标尚不存在 BinNodePosi<T> xx = x = new BinNode<T>( e, _hot ); _size++; //则创建新节点 // 此时,若x的父亲_hot增高,则祖父有可能失衡 for ( BinNodePosi<T> g = _hot; g; g = g->parent ) //从_hot起,逐层检查各代祖先g if ( ! AvlBalanced( *g ) ) { //一旦发现g失衡,则通过调整恢复平衡 FromParentTo(*g) = rotateAt( tallerChild( tallerChild( g ) ) ); break; //局部子树复衡后,高度必然复原;其祖先亦必如此,故调整结束 } else //否则(g仍平衡) updateHeight( g ); //只需更新其高度(注意:即便g未失衡,高度亦可能增加) return xx; //返回新节点位置 } ``` 插入的时间主要在`search(e)`上,为$O(log n)$,其余操作均为$O(1)$,故总体复杂度为$O(log n)$。 ==== 删除 删除分为两种情况: *单旋*:黄色方块至少存在其一;红色方块可有可无 经过一次`zag`或者`zig`后,可能失衡,需要向上到根部,进行调整。 #figure( image("fig\BST\8.png",width: 80%), caption:"AVL树的删除" ) *双旋* 经过两次`zag`或者`zig`后,可能失衡,需要向上到根部,进行调整。 #figure( image("fig\BST\9.png",width: 80%), caption:"AVL树的删除" ) ```cpp template <typename T> bool AVL<T>::remove( const T & e ) { BinNodePosi<T> & x = search( e ); if ( !x ) return false; //若目标的确存在 removeAt( x, _hot ); _size--; //则在按BST规则删除之后, _hot及祖先均有可能失衡 // 以下,从_hot出发逐层向上,依次检查各代祖先g for ( BinNodePosi<T> g = _hot; g; g = g->parent ) { if ( ! AvlBalanced( *g ) ) //一旦发现g失衡,则通过调整恢复平衡 g = FromParentTo( *g ) = rotateAt( tallerChild( tallerChild( g ) ) ); updateHeight( g ); //更新高度(注意:即便g未曾失衡或已恢复平衡,高度均可能降低) } //可能需做过Ω(logn)次调整;无论是否做过调整,全树高度均可能下降 return true; //删除成功 } ``` ==== (3+4)-重构 `zig`和`zag`的最终是通过(3+4)-重构来实现的。 设`g`为最低的失衡节点,沿最长分支考察祖孙三代: `g ~ p ~ v` 按中序遍历次序,重命名为: `a < b < c`; 它们总共拥有四棵子树(或为空),按中序遍历次序,重命名为:`T0 < T1 < T2 < T3`。 #figure( image("fig\BST\10.png",width: 80%), caption:"(3+4)-重构" ) ```cpp template <typename T> BinNodePosi<T> BST<T>::connect34( BinNodePosi<T> a, BinNodePosi<T> b, BinNodePosi<T> c, BinNodePosi<T> T0, BinNodePosi<T> T1, BinNodePosi<T> T2, BinNodePosi<T> T3) { a->lc = T0; if (T0) T0->parent = a; a->rc = T1; if (T1) T1->parent = a; c->lc = T2; if (T2) T2->parent = c; c->rc = T3; if (T3) T3->parent = c; b->lc = a; a->parent = b; b->rc = c; c->parent = b; updateHeight(a); updateHeight(c); updateHeight(b); return b; } ``` 利用3+4重构,实现`zag`和`zig`: ```cpp template<typename T> BinNodePosi<T> BST<T>::rotateAt( BinNodePosi<T> v ) { BinNodePosi<T> p = v->parent, g = p->parent; if ( IsLChild( * p ) ) //zig if ( IsLChild( * v ) ) { //zig-zig p->parent = g->parent; return connect34( v, p, g, v->lc, v->rc, p->rc, g->rc ); } else { //zig-zag v->parent = g->parent; return connect34( p, v, g, p->lc, v->lc, v->rc, g->rc ); } else //zag if ( IsRChild( * v ) ) { //zag-zag p->parent = g->parent; return connect34( g, p, v, g->lc, p->lc, v->lc, v->rc ); } else { //zag-zig v->parent = g->parent; return connect34( g, v, p, g->lc, v->lc, v->rc, p->rc ); } } ``` === AVL综合评价 优点: - 无论查找、插入或删除,最坏情况下的复杂度均为$O(log n)$ - $O(n)$的存储空间 缺点: - 借助高度或平衡因子,为此需改造元素结构,或额外封装;实测复杂度与理论值尚有差距 - 插入/删除后的旋转,成本不菲 - 删除操作后,最多需旋转$Omega(log n)$次(Knuth:平均仅0.21次) - 若需频繁进行插入/删除操作,未免得不偿失 - 单次动态调整后,全树拓扑结构的变化量可能高达$Omega(log n)$
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/break-continue-11.typ
typst
Other
// Test second block during break flow. // Ref: true #for i in range(10) { table( { [A]; break }, for _ in range(3) [B] ) }
https://github.com/VisualFP/docs
https://raw.githubusercontent.com/VisualFP/docs/main/SA/project_documentation/project_documentation.typ
typst
#import "../style.typ": * #import "../acronyms.typ": * #set text(..sa_text_style) #set heading(..sa_header_style) #show heading.where(level: 1): sa_heading1_show #show heading.where(level: 2): sa_heading2_show #show heading.where(level: 3): sa_heading3_show #show heading.where(level: 4): sa_heading4_show #let metadata = ( title: [VisualFP], description: [Project documentation], organization: [Department of Computer Science \ OST - Eastern Switzerland University of Applied Sciences \ Campus Rapperswil-Jona], thesis: [Student Research Project], term: [Autumn Term 2023], authors: [<NAME>, <NAME>], authors-short: [<NAME>, <NAME>], advisor: [Prof. Dr. <NAME>], partner: [IFS Institute for Software], external-co-examiner: [], internal-co-examiner: [] ); #sa_title_page(metadata) #sa_table_of_contents() #set page(..sa_page_style(metadata)) #part("Project Documentation") #include_section("project_documentation/content/introduction.typ") #include_section("project_documentation/content/project_method.typ") #include_section("project_documentation/content/project_plan.typ") #include_section("project_documentation/content/used_tools.typ") #include_section("project_documentation/content/quality_assurance.typ") #include_section("project_documentation/content/risk_management.typ") #include_section("project_documentation/content/time_tracking.typ") #include_section("project_documentation/content/personal_reports.typ") #include_section("project_documentation/content/meeting_minutes/meeting_minutes.typ") #part("Appendix") #sa_list_of_acronyms() <list_of_acronyms> #sa_bibliography() #sa_list_of_figures() #sa_list_of_tables() #sa_disclaimer()
https://github.com/Jo-Eck/burgers
https://raw.githubusercontent.com/Jo-Eck/burgers/main/paper.typ
typst
#import "template.typ": * #show: ieee_conference.with( title: "Classification of Sandwich Types: A Set-Theoretic Analysis", abstract: [ This paper provides a set-theoretic analysis of the relationships between cheeseburgers, hamburgers, and double hamburgers, offering a comprehensive understanding of their categorization in the context of culinary classification. We examine the subsets and intersections of these sandwich types based on their ingredients and patty counts, emphasizing the variability in their classification under different culinary viewpoints. ], authors: ( ( name: "satoqz", organization: [Sandwich Science Innovations, Inc.], location: [Stuttgart, Germany], email: "<EMAIL>", ), ), ) = Introduction The classification of cheeseburgers, hamburgers, and double hamburgers has long been a subject of debate, with varying perspectives on their categorization. This study aims to clarify these culinary terms and their hierarchical relationships while recognizing the potential for differing viewpoints. = Methods We apply set theory to categorize and define the relationships between these sandwich types: 1. *Cheeseburgers (C)*: A subset of hamburgers, characterized by the inclusion of cheese as a primary ingredient. 2. *Hamburgers (H)*: The broader category encompassing all meat patty sandwiches. This term is applicable to sandwiches both with and without cheese, making it inclusive of cheeseburgers. 3. *Double Hamburgers (D)*: A category defined by the presence of two meat patties, with single hamburgers being a subset of this category. = Results Based on our rigorous set-theoretic analysis, an unexpected paradox emerges regarding the relationships between these sandwich types: - *C #sym.subset H:* Cheeseburgers are a subset of hamburgers, as they are a specific type of hamburger that includes cheese. This implies that cheeseburgers are inherently linked to the broader category of hamburgers. - *C #sym.supset H:* Paradoxically, hamburgers, encompassing all meat patty sandwiches regardless of cheese, are also a subset of cheeseburgers. This indicates that cheeseburgers encompass the more inclusive category of hamburgers. - *H #sym.subset D:* Double hamburgers, defying conventional expectations, encompass single hamburgers as they share the common element of meat patties in a sandwich. This suggests that double hamburgers represent a broader category that includes the traditional single hamburgers. - *H #sym.supset D:* Paradoxically, hamburgers, inclusive of all meat patty sandwiches, are also a superset of double hamburgers. This unexpected relationship implies that double hamburgers, with their two patties, can be seen as a subset of the conventional hamburger category. == Discussion It is important to acknowledge that the classification of these sandwich types presents a bewildering paradox. Our set-theoretic analysis defies traditional categorization by concluding that cheeseburgers, hamburgers, and double hamburgers have overlapping relationships. The unexpected results challenge conventional assumptions and highlight the complexity of culinary classification. == Conclusion In this peculiar study, we employed set theory to examine the relationships between cheeseburgers, hamburgers, and double hamburgers. Our unexpected findings reveal an intriguing oxymoron: cheeseburgers are both a subset and a superset of hamburgers, while hamburgers are likewise both a subset and a superset of cheeseburgers. Moreover, double hamburgers encompass single hamburgers, just as single hamburgers can be seen as a subset of double hamburgers. = Acknowledgments We would like to acknowledge the complex nature of culinary taxonomy and the unforeseen paradoxes that arise through systematic analysis.
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/let-14.typ
typst
Other
// Error: 6-20 not enough elements to destructure #let (..a, b, c, d) = (1, 2)
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/postercise/0.1.0/postercise.typ
typst
Apache License 2.0
#import "/themes/themes.typ" #import "/utils/scripts.typ"
https://github.com/Myriad-Dreamin/typst.ts
https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/fuzzers/corpora/meta/cite-footnote_00.typ
typst
Apache License 2.0
#import "/contrib/templates/std-tests/preset.typ": * #show: test-page Hello @netwok And again: @netwok #pagebreak() #bibliography("/assets/files/works.bib", style: "chicago-notes")
https://github.com/KaiserY/mdbook-typst-pdf
https://raw.githubusercontent.com/KaiserY/mdbook-typst-pdf/main/README-cn.md
markdown
Apache License 2.0
# mdbook-typst-pdf 将 [mdBook](https://github.com/rust-lang/mdBook) 转换为 PDF。 目前主要用于将 [Rust 程序设计语言 简体中文版](https://kaisery.github.io/trpl-zh-cn) 转换为 PDF。 ## 效果演示 [Rust 程序设计语言 简体中文版.pdf](https://kaisery.github.io/trpl-zh-cn/Rust%20%E7%A8%8B%E5%BA%8F%E8%AE%BE%E8%AE%A1%E8%AF%AD%E8%A8%80%20%E7%AE%80%E4%BD%93%E4%B8%AD%E6%96%87%E7%89%88.pdf) ## 参考项目 - https://github.com/typst/typst - https://github.com/rust-lang/mdBook - https://github.com/lbeckman314/mdbook-latex - https://github.com/LegNeato/mdbook-typst
https://github.com/TypstApp-team/typst
https://raw.githubusercontent.com/TypstApp-team/typst/master/tests/typ/layout/flow-orphan.typ
typst
Apache License 2.0
// Test that lines and headings doesn't become orphan. --- #set page(height: 100pt) #lorem(12) = Introduction This is the start and it goes on. --- #set page("a8", height: 140pt) #set text(weight: 700) // Fits fully onto the first page. #set text(blue) #lorem(27) // The first line would fit, but is moved to the second page. #lorem(20) // The second-to-last line is moved to the third page so that the last is isn't // as lonely. #set text(maroon) #lorem(11) #lorem(13) // All three lines go to the next page. #set text(olive) #lorem(10)
https://github.com/rabotaem-incorporated/algebra-conspect-1course
https://raw.githubusercontent.com/rabotaem-incorporated/algebra-conspect-1course/master/sections/04-linear-algebra/16-eigen-values-vectors.typ
typst
Other
#import "../../utils/core.typ": * == Собственные значения и собственные векторы линейного отображения #def[ Оператор $Aa in End(V)$ называется _диагонализируемым_, если $exists E:$ $ [Aa]_E = diag(lambda_1, ..., lambda_n) = mat( lambda_1, ..., 0; dots.v, dots.down, dots.v; 0, ..., lambda_n; ), space "где" lambda_1, ..., lambda_n in K $ ] #notice[ Матрицы _часто_ диагонализируемые. Правда, пока не понятно, что это значит. ] #ticket[Собственные значения. Линейная независимость собственных векторов, принадлежащих разным собственным значениям. Следствия] #def[ Пусть $Aa$ --- линейный оператор. Вектор $v != 0$ называется _собственным вектором_ оператора $Aa$ если существует скаляр $lambda in K$ такой, что $ Aa v = lambda v. $ Такие скаляры $lambda$ называют _собственными значениями_, или _собственными числами_ оператора $Aa$. То есть $lambda$ является собственным значением, если найдется ненулевой $v$ такой, что $Aa v = lambda v$. Или, $lambda$ является собственным значением, если $V_lambda = {v in V bar Aa v = lambda v} != 0.$ Или, если $(Aa - lambda epsilon_V)(v) != 0$. Или, если $Ker (Aa - lambda epsilon_V) != 0$. $V_lambda$ называется _собственным подпространтством_, принадлежащим собственному значению $lambda$. Собственные вектора являются элементами таких пространств. ] #notice[ Тогда матрица отображения $Aa$ является диагонализируемой, тогда и только тогда, когда найдется базис из собственных векторов $Aa$. ] #pr[ Пусть $v_i in V_lambda_i \\ {0}$ --- собственные вектора, $i = 1,..., m$, $lambda_1, ..., lambda_m$ --- различные собственные значения. Тогда $v_1, ..., v_m$ --- ЛНС. ] #proof[ Индукция по $m:$ \ - "База": $m = 1$: $v_1 != 0$ по определению, значит $v_1$ ЛНС - "Переход $m-1 ~~> m$": предположим, что $exists alpha_1, .., alpha_m in K$, не все 0, такие что $ alpha_1 v_1 + ... + alpha_m v_m = 0 quad (*) \ alpha_m = 0 ==> "противоречие с ИП" $ $ (*) ==> Aa(alpha_1 v_1 + ... + alpha_m v_m) = alpha_1 lambda_1 v_1 + ... + alpha_m lambda_m v_m = 0 $ Домножили на $lambda_m$: $ (*) ==> alpha_1 lambda_m v_1 + alpha_2 lambda_m v_2 + ... + alpha_m lambda_m v_m = 0 $ $ alpha_1(lambda_1 - lambda_m)v_1 + ... + alpha_(m-1)(lambda_(m-1) - lambda_m)v_(m-1) = 0, space v_1, ..., v_(m-1) space #[ --- ЛНС по ИП] $ $ ==> alpha_1(underbrace(lambda_1 - lambda_m, != 0)) = ... = alpha_(m-1)(underbrace(lambda_(m-1) - lambda_m, != 0)) = 0 $ $ ==> alpha_1 = ... = alpha_(m-1) = 0 ==> alpha_m v_m = 0 ==> v_m = 0 space #[ --- противоречие условию] $ ] #follow[ Собственных значений $Aa$ не более, чем $dim V$. ] #let pc = sym.plus.circle #follow[ Пусть $lambda_1, ..., lambda_m$ --- собственные значения $V$. Тогда $V_lambda_1 + ... + V_lambda_m = V_lambda_1 pc ... pc V_lambda_m$. ] #proof[ $ forall v_1 in V_1, ..., v_m in V_m, space v_1, ..., v_m - "ЛНС" <==> \ forall i: space V_i sect V_1 + ... + hat(V_i) + ... + V_m = 0 $ ] #ticket[Диагонализируемые операторы. Критерий диагонализируемости в терминах геометрических кратностей (+см. определение выше)] #def[ _Геометрической кратностью_ собственного значения $lambda$ называется $ g_lambda = dim V_lambda. $ ] #pr[ Пусть $lambda_1, ..., lambda_m$ --- все собственные значение оператора $Aa$; $n = dim V$. + $g_(lambda_1) + ... + g_(lambda_m) <= n$ + $Aa$ --- диагонализируем $<==> g_(lambda_1) + ...+ g_(lambda_m) = n$ ] #proof[ + #h(1fr) $ V > V_lambda_1 pc ... pc V_lambda_m ==> dim V &>= dim(V_lambda_1 pc ... pc V_lambda_m) &=\ = dim V_lambda_1 + ... + dim V_lambda_m &= g_lambda_1 + ... + g_lambda_m. $ + #[Можно считать $ [Aa]_E = diag(underbrace((lambda_1, ..., lambda_1), r_1), ..., underbrace((lambda_m, ..., lambda_m), r_m)) \ ==> underbrace(dim(V_(lambda_j)), = g_(lambda_j)) >= r_j, #[так как $V_(lambda_j)$ содержит $r_j$ базисных $==>$ линейно независимых векторов] \ g_(lambda_1) + ... + g_(lambda_m) >= r_1 + ... + r_m = n ==> g_(lambda_1) + ... + g_(lambda_m) = n $ Обратно: пусть $sum g_lambda_j = n$, $E_j$ --- любой базис $V_lambda_j$. $ dim(V_lambda_1 pc ... pc V_lambda_n) = sum g_lambda_j = n ==> V_lambda_1 pc ... pc V_lambda_m = V imply^#[лемма]\ E_1, ..., E_m #[--- базис $V$]. $ Базис из собственных векторов --- диагонализирующий. ] ] #ticket[Характеристический многочлен линейного оператора. Алгебраическая кратность собственного значения] #notice[ $ lambda - "собственное значение" Aa <==> Ker(Aa - lambda epsilon) != 0 <==>\ Aa - lambda epsilon in.not GL(V) <==> [Aa - lambda epsilon]_E in.not GL_n (V) <==> det([Aa - lambda epsilon]_E) = 0 iff^((*)) det((A - lambda E_n)) = 0 $ $ (*): quad [Aa - lambda epsilon_V]_E = [Aa]_E - [lambda epsilon_V]_E = [Aa]_E - lambda E_n. $ ] #def[ Для какого-то $Aa = End V$ можно записать собственные числа как корни _характеристического многочлена матрицы A_, $chi_A = det((A - x mul E_n)) in K[x] subset K(x)$. $ abs(A - x mul E_n) &= (a_(1 1) - x)...(a_(n n) - x) + #[многочлены степени $<= n-2$] \ &= (-1)^n x^n + (-1)^(n-1)(underbrace(a_(1 1) + a_(2 2) + ... + a_(n n), #[$Tr A$ --- След матрицы $A$]))x^(n-1) + ... + det(A) $ ] #lemma[ Пусть $A' = C^(-1)A C, C in GL_n (K)$. Тогда $chi_(A') = chi_A$. ] #proof[ $ chi_(A') = abs(A' - x mul E_n) = abs(C^(-1) A C - x mul C C^(-1)) = abs(C^(-1) A C - C^(-1)( x C)) = \ abs(C^(-1)(A - x E_n)C) = abs(C)^(-1) mul abs(A - x E_n) mul abs(C) = chi_A. $ ] #follow[ Характеристический многочлен оператора $chi_([Aa]_E)$ не зависит от выбора базиса $E$. ] #def[ _Характеристический многочлен оператора $Aa$_ --- $chi_Aa = chi_([Aa]_E)$, в каком-то базисе $E$. ] #pr[ $lambda$ --- собственное значение $Aa$ тогда, и только тогда, когда $chi_Aa(lambda) = 0$. ] #proof[ $ Ker (A - lambda E_n) != 0 <==> abs(A - lambda E_n) = 0 <==> chi_Aa (lambda) = 0. $ ] #def[ _Алгебраической крастностью_ собственного значения $lambda$ называют кратностью $lambda$ как корня $chi_(Aa)$ ] #notice(name: [Не особо важно, просто для понимания])[ Многие матрицы диагонализируемы, потому что корни характеристических многочленов редко совпадают, а когда они совпадают, то довольно часто геометрические кратности равны алгебраическим. ] #ticket[Связь между алгебраической и геометрической кратностью собственного значения (?)] #pr[ $g_lambda <= a_lambda$ ] #let line_end(ident, ..args) = { let end_label = label("line_end_" + ident) show end_label: locate(loc => { let end = loc.position() let begin_label = label("line_begin_" + ident) let start = query(begin_label, loc).last().location().position() box(place(line(end: (start.x - end.x, start.y - end.y), ..args))) }) [#box()#end_label] } #let line_begin(ident) = [#box()#label("line_begin_" + ident)] #proof[ Пусть $e_1, ..., e_g$ --- базис $V_lambda$, $e_(g+1), ..., e_n$ --- дополнение до базиса $E$ пространства $V$. $ [A]_E = mat( lambda, 0, ..., 0, limits(#hide("A"))^#line_begin("B"), dots.v, dots.v; 0, lambda, ..., 0, space, dots.v, dots.v; dots.v, dots.v, dots.down, lambda, space, dots.v, dots.v; #move(dx: -5pt)[#line_begin("A")], #box(), #box(), #box(), #box(), #box(), #move(dx: 5pt)[#line_end("A")] ; 0, 0, ..., 0, space, dots.v, dots.v; 0, 0, ..., 0, limits(#hide("A"))_#line_end("B"), dots.v, dots.v; ) $ $ chi_(Aa) = ... = abs((lambda - x) E_g) mul abs(*) = (lambda - x)^g mul (...) ==> a_lambda >= g_lambda. $ ] #ticket[Критерий диагонализируемости в терминах геометрических и алгебраических кратностей. Примеры недиагонализируемых операторов] #pr[ Для оператора $Aa in End(V)$ эквивалентны: + $Aa$ -- диагонализируем + $chi_(Aa)$ расскладывается на линейные множители, и для всех собственных значений $lambda$ выполняется $g_lambda = a_lambda$ ] #proof[ \ $1 ==> 2$ $ g_(lambda_1) + ... + g_(lambda_m) = n, chi_(diag(alpha_1, ..., alpha_n)) = plus.minus product_(i=1)^n (x - alpha_i) \ a_(lambda_1) + ... + a_(lambda_m) \ g_(lambda_j) <= a_(lambda_j) ==> forall j: g_(lambda_j) = a_(lambda_j) $ \ $2 ==> 1$ $ a_(lambda_1) + ... + a_(lambda_m) = n ==> g_(lambda_1) + ... + g_(lambda_m) = n ==> #[$Aa$ диагонализируем] $ ] #example[ Этот оператор недиагонализируем: $Aa = display(mat(0, 0; 1, 0))$ $ chi_Aa = mat(delim: "|", A - x dot E_n) = mat(-x, 0; 1, -x) = x^2. $ У этой матрицы алгебраическая кратность $a_0$ собственного числа 0 это кратность корня $x^2$, то есть 2, а геометрическая, $1$ так как $dim Lin(e_2) = 1$. ] #ticket[Жорданова нормальная форма (формулировка теоремы)] #th(name: [Жорданова нормальная форма])[ $m in NN, space lambda in L$ $ J_m (lambda) = mat( lambda, 1, 0, ..., 0, 0; 0, lambda, 1, ..., 0, 0; 0, 0, lambda, ..., 0, 0; dots.v, dots.v, dots.v, dots.down, dots.v, dots.v; 0, 0, 0, ..., lambda, 1; 0, 0, 0, ..., 0, lambda; ) in M_m (K) $ Такая матрица называется _Жордановой клеткой_. Жорданова матрица --- блочная диагональная матрица, в которой каждый блок --- Жорданова клетка. Пусть $chi_Aa$ расскладывается на линейные множители. Тогда в $V$ существует базис $E$ такой, что $[Aa]_E$ --- Жорданова матрица. При этом, матрица $[Aa]_E$ определена однозначно, с точностью до порядка блоков. ] #proof[ Без доказательства. ]
https://github.com/kdog3682/mathematical
https://raw.githubusercontent.com/kdog3682/mathematical/main/0.1.0/src/expand.typ
typst
#import "@local/typkit:0.1.0": * #let expand-string(s, evaluate: false, show-original: false, fill: none) = { if is-factorial(s) { let m = int(match(s, "\d+")) let numbers = range(m, 0, step: -1) let a = numbers.map(resolve-content).join(marks.math.times) if evaluate == true { let b = math.bold(str(numbers.product())) return math.equation((a, b).join(marks.math.equals)) } else { return math.equation(a) } } else if is-multiplication(s) { let (a, b) = get-integers(s) let ans = a * b let store = () let smaller = text.with(size: sizes.small) let items = map(b, (x) => $#colored(a, fill)$).intersperse(sym.plus).map(smaller) let attrs = ( stroke: none, align: horizon + center, columns: b, inset: 2pt, ) let content = table(..items, ..attrs) let repeated = boxy(content, stroke: strokes.soft) store.push(repeated) store.push(marks.math.equals) store.push(boxy($#ans$)) if show-original == true { store.insert(0, marks.math.arrow) store.insert(0, mathup(s)) } return math.equation(store.join()) } } #let expand-content(c) = { if is-exponent-content(c) { // base and body } } #let expand(s, ..sink) = { if is-string(s) { expand-string(s, ..sink) } else { expand-content(s, ..sink) } } #let expansion(..sink) = { return expand(..sink) } // #expand("4 times 8") // #panic(expand("3!"))
https://github.com/JeyRunner/tuda-typst-templates
https://raw.githubusercontent.com/JeyRunner/tuda-typst-templates/main/example_tudapub.typ
typst
MIT License
// imports #import "@preview/cetz:0.2.2": canvas, plot #import "@preview/glossarium:0.4.0": make-glossary, print-glossary, gls, glspl #import "@preview/mitex:0.2.3": * // add // - subpar for sub-figures #import "@preview/equate:0.1.0": equate #show: make-glossary //#import "templates/tudapub/tudapub.typ": tudapub //#import "templates/tudapub/tudacolors.typ": tuda_colors #import "templates/lib.typ": * // equation sub numbering #show: equate.with(sub-numbering: true, number-mode: "label") // setup the template #show: tudapub.with( title: [ TUDa Thesis With Typst ], author: "<NAME>", // to deactivate the sub logo text set logo_sub_content_text: none, logo_sub_content_text: [ field of study: \ Some Field of Study \ \ Institute ABC ], accentcolor: "9c", abstract: [ This is a template to write your thesis with the corporate design of #link("https://www.tu-darmstadt.de/")[TU Darmstadt]. For instructions on how to set up this template see @sec_usage. ], bib: bibliography("tests/latex_ref/DEMO-TUDaBibliography.bib", full: true), //, style: "spie") logo_tuda: image("templates/tudapub/logos/tuda_logo.svg"), // logo_institute: image("templates/tudapub/logos/iasLogo.jpeg"), // logo_institute_sizeing_type: "width", // Set the margins of the content pages. // The title page is not affected by this. // Some example margins are defined in 'common/props.typ': // - tud_page_margin_small // same as title page margin // - tud_page_margin_big // E.g. margin: tud_page_margin_small, // E.g. margin: ( // top: 30mm, // left: 31.5mm, // right: 31.5mm, // bottom: 56mm // ), //outline_table_of_contents_style: "adapted", //reduce_heading_space_when_first_on_page: false //figure_numbering_per_chapter: false // Which pages to insert // Pages can be disabled individually. show_pages: ( title_page: true, outline_table_of_contents: true, // "Erklärung zur Abschlussarbeit" thesis_statement_pursuant: true ), thesis_statement_pursuant_include_english_translation: false, // pages after outline that will not be included in the outline additional_pages_after_outline_table_of_contents: [ == List of Symbols - $t$ - time == List of Figures ] ) // test content = Demo of the Template Style This chapter contains lots of demo content to see how the template looks. For usage instructions go to to @sec_usage. == Demo Paragraphs Here is some demo text. #lorem(50) #lorem(110) #lorem(60) == Some Basic Elements This text contains two#footnote[The number two can also be written as 2.] footnotes#footnote[This is a first footnote. \ It has a second line.]. === Figures The following @fig_test represents a demo Figure. #figure( rect(inset: 20pt, fill: gray)[ Here should be an Image ], caption: [The figure caption.] ) <fig_test> We can also make tables, as in @fig_tab_test. #figure( table( columns: 2, [A], [B], [1], [2] ), caption: [This is the table title.] ) <fig_tab_test> The text continues normally after the Figures. #pagebreak() == Test Coding Let's autogenerate some stuff: //#let x = (1, 2, 3) #let x = range(0, 3) #for (i, el) in x.map(el => el*2).enumerate() [ - Element Nr. #i has value #el #circle(fill: color.linear-rgb(100, 100, el*20), width: 12pt) //$circle$ ] == Lists This is a list: + an item + another item This is another list - an item - another item - yet another item #pagebreak() == Let's do some math Bla _blub_ *bold*. Math: $x + y (a+b)/2$. $ "Align:"& \ & x+y^2 && != 27 sum_(n=0)^N e^(i dot pi dot n) \ & "s.t. " && b c \ \ & mat( 1,3 ; 3, 4 )^T && = alpha mat( x ,y ; x_2, y_2 )^T \ \ & underbrace( cal(B) >= B , "This is fancy!") \ x &= y^2 + 12 & "(This does A)" \ y &= z \/ 2 = z / 2 & "(This does B)" #<eq.last> $ In @eq.last we can see cool stuff. Sub equations: $ a &= "with line number" #<eq.second.sub> \ b &= "no line number" \ b &= "with line number" #<eq.second.sub2> $ === Math in Latex This is possible with the package #link("https://github.com/mitex-rs/mitex")[mitex]: You can include the package at the beginning of your document via //```typst #raw(lang: "typst", "#import \"@preview/mitex:0.1.0\": *") //``` . Usage: #block(breakable: false)[ #table( columns: 2, ```latex mitex(` \begin{pmatrix} \dot{r}_x + \omega r_x - \omega p_x \\ \dot{r}_x - \omega r_x + \omega p_x \end{pmatrix} = \begin{pmatrix} +\omega \xi_x - \omega p_x \\ -\omega s_x + \omega p_x \end{pmatrix} `) ```, mitex(` \begin{pmatrix} \dot{r}_x + \omega r_x - \omega p_x \\ \dot{r}_x - \omega r_x + \omega p_x \end{pmatrix} = \begin{pmatrix} +\omega \xi_x - \omega p_x \\ -\omega s_x + \omega p_x \end{pmatrix} `) ) ] === #strike[Adjust Equation spacing] To reduce the spacing above and below block equations use: ```typst #show math.equation: set block(spacing: 0.1em) // does not work! ``` #table( columns: 2, [With default spacing], [With reduced spacing], [ This is Text. $ x^2 = y^2 $ This is Text. ], [ #show math.equation: set block(spacing: 0.5em) This is Text. $ x^2 = y^2 $ This is Text. ] ) == Another Section Some graphics: \ #box(stroke: black, inset: 5mm)[ test in a box #circle(width: 2.2cm, inset: 2mm)[ And in the circle ] ] Some more text here. #lorem(20) In @fig.myfig we can see things. #figure( [ #rect(inset: 20.9pt)[Dummy Test] ], caption: [ This is a figure ] )<fig.myfig> #lorem(100) = Usage of this Template <sec_usage> To use the template write the following in your `main.typ` file (also see the `README.md` of the repository for more details): ```typst #import "templates/tuda-typst-templates/templates/tudapub/tudapub.typ": tudapub #show: tudapub.with( title: [ My Thesis ], author: "<NAME>", accentcolor: "3d" ) = My First Chapter Some Text ``` For the list of possible accent colors to select from see @sec_usage_accentcolors. == Template Options In the following, we show the show-command of this template with all doc and default options. Note that this may not be up to date, thus always also look at the file `templates/tudapub/tudapub.typ`. ```typst #show: tudapub.with( title: [Title], title_german: [Title German], // Adds an abstract page after the title page with the corresponding content. // E.g. abstract: [My abstract text...] abstract: none, // "master" or "bachelor" thesis thesis_type: "master", // The code of the accentcolor. // A list of all available accentcolors is in the list tuda_colors accentcolor: "9c", // Size of the main text font fontsize: 10.909pt, //11pt, // Currently just a4 is supported paper: "a4", // Author name as text, e.g "<NAME>" author: "<NAME>", // Date of submission as string date_of_submission: datetime( year: 2023, month: 10, day: 4, ), location: "Darmstadt", // array of the names of the reviewers reviewer_names: ("SuperSupervisor 1", "SuperSupervisor 2"), // language for correct hyphenation language: "eng", // Set the margins of the content pages. // The title page is not affected by this. // Some example margins are defined in 'common/props.typ': // - tud_page_margin_small // same as title page margin // - tud_page_margin_big // E.g. margin: ( // top: 30mm, // left: 31.5mm, // right: 31.5mm, // bottom: 56mm // ), margin: tud_page_margin_big, // tuda logo - has to be a svg. E.g. image("PATH/TO/LOGO") logo_tuda: image("logos/tuda_logo.svg"), // optional sub-logo of an institute. // E.g. image("logos/iasLogo.jpeg") logo_institute: none, // How to set the size of the optional sub-logo // either "width": use tud_logo_width*(2/3) // or "height": use tud_logo_height*(2/3) logo_institute_sizeing_type: "width", // Move the optional sub-logo horizontally logo_institute_offset_right: 0mm, // An additional white box with content e.g. the institute, ... below the tud logo. // Disable it by setting its value to none. // E.g. logo_sub_content_text: [ Institute A \ filed of study: \ B] logo_sub_content_text: [ field of study: \ Some Field of Study \ \ Institute A ], // The bibliography created with the bibliography(...) function. // When this is not none a references section will appear at the end of the document. // E.g. bib: bibliography("my_references.bib") bib: none, // Add an English translation to the "Erklärung zur Abschlussarbeit". thesis_statement_pursuant_include_english_translation: false, // Which pages to insert // Pages can be disabled individually. show_pages: ( title_page: true, outline_table_of_contents: true, // "Erklärung zur Abschlussarbeit" thesis_statement_pursuant: true ), // Insert additional pages directly after the title page. // E.g. additional_pages_after_title_page: [ // = Notes // #pagebreak() // = Another Page // ] additional_pages_after_title_page: none, // Insert additional pages directly after the title page. // E.g. additional_pages_after_title_page: [ // = Notes // #pagebreak() // = Another Page // ] additional_pages_before_outline_table_of_contents: none, // Insert additional pages directly after the title page. // E.g. additional_pages_after_title_page: [ // = Notes // #pagebreak() // = Another Page // ] additional_pages_after_outline_table_of_contents: none, // For headings with a height level than this number no number will be shown. // The heading with the lowest level has level 1. // Note that the numbers of the first two levels will always be shown. heading_numbering_max_level: 3, // In the outline the max heading level will be shown. // The heading with the lowest level has level 1. outline_table_of_contents_max_level: 3, // Set space above the heading to zero if it's the first element on a page. // This is currently implemented as a hack (check the y pos of the heading). // Thus when you experience compilation problems (slow, no convergence) set this to false. reduce_heading_space_when_first_on_page: true, // How the table of contents outline is displayed. // Either "adapted": use the default typst outline and adapt the style // or "rewritten": use own custom outline implementation which better reproduces the look of the original latex template. // Note that this may be less stable than "adapted", thus when you notice visual problems with the outline switch to "adapted". outline_table_of_contents_style: "rewritten", // Use own rewritten footnote display implementation. // This may be less stable than the built-in footnote display impl. // Thus when having problems with the rendering of footnote disable this option. footnote_rewritten_fix_alignment: true, // When footnote_rewritten_fix_alignment is true, add a hanging intent to multiline footnotes. footnote_rewritten_fix_alignment_hanging_indent: true, // Use 'Roboto Slab' instead of 'Robot' font for figure captions. figure_caption_font_roboto_slab: true, // Figures have the numbering <chapter-nr>.<figure-nr> figure_numbering_per_chapter: true, // Equations have the numbering <chapter-nr>.<equation-nr> // @todo This seems to increase the equation number in steps of 2 instead of one equation_numbering_per_chapter: false, ) ``` == TUDa Accent Color List <sec_usage_accentcolors> The list of colors that can be used in the template argument `accentcolor`: #grid( columns: auto, rows: auto, for (key, color) in tuda_colors { box( inset: 3pt, width: 100% / 3, box( height: auto, inset: 4pt, outset: 0pt, width: 100%, fill: rgb(color) )[ #set align(center) #key ] ) } ) = Glossary #print-glossary(( // minimal term (key: "kuleuven", short: "KU Leuven"), // a term with a long form (key: "unamur", short: "UNamur", long: "Université de Namur"), // no long form here (key: "kdecom", short: "KDE Community", desc:"An international team developing and distributing Open Source software."), // a full term with description containing markup ( key: "oidc", short: "OIDC", long: "OpenID Connect", desc: [OpenID is an open standard and decentralized authentication protocol promoted by the non-profit #link("https://en.wikipedia.org/wiki/OpenID#OpenID_Foundation")[OpenID Foundation].]), ), show-all: true )
https://github.com/tony-rsa/thonifho.muhali.cv
https://raw.githubusercontent.com/tony-rsa/thonifho.muhali.cv/main/src/en.typ
typst
MIT License
#import "template.typ": * #show: layout #let lang = "en" #cvHeader(hasPhoto: true, align: left, lang) #autoImport("experience", lang) #autoImport("education", lang) #autoImport("projects", lang) #autoImport("skills", lang)
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/keyle/0.1.1/README.md
markdown
Apache License 2.0
# keyle <p align="center"> <a href="https://github.com/magicwenli/keyle/blob/main/doc/keyle.pdf"> <img alt="Documentation" src="https://img.shields.io/website?down_message=offline&label=manual&up_color=007aff&up_message=online&url=https://github.com/magicwenli/keyle/blob/main/doc/keyle.pdf" /> </a> <a href="https://github.com/magicwenli/keyle/blob/main/LICENSE"> <img alt="MIT License" src="https://img.shields.io/badge/license-MIT-brightgreen"> </a> </p> A simple way to style keyboard shortcuts in your documentation. This package was inspired by [auth0/kbd](https://auth0.github.io/kbd/) and [dogezen/badgery](https://github.com/dogezen/badgery). Send them respect and love. ## Usage Please see the [keyle.pdf](https://github.com/magicwenli/keyle/blob/main/doc/keyle.pdf) for more documentation. `keyle` is imported using: ```typst #import "@preview/keyle:0.1.1" ``` ### Example ![About](doc/keyle.png) ## License MIT
https://github.com/kdog3682/typkit
https://raw.githubusercontent.com/kdog3682/typkit/main/0.1.0/src/eval.typ
typst
#import "validation.typ": is-content #import "ao.typ": create-scope #import "str-utils.typ": has-extension, sub #import "marks.typ" #let markup(x, ..modules) = { let scope = create-scope(..modules) if is-content(x) { return x } let s = if has-extension(x) { read(x) } else { x } return eval(str(s), mode: "markup", scope: scope) } #let markup-factory(..modules) = { let scope = create-scope(..modules) let markup(x) = { if is-content(x) { return x } let s = if has-extension(x) { read(x) } else { str(x) } return eval(s, mode: "markup", scope: scope) } return markup } #let fix-math(s) = { let ref = ( "*": "dot", "=": "equals", ) let callback(m) = { return " " + ref.at(m.captures.at(0)) + " " } return sub(str(s), " +([*=]) +", callback) } #let mathup(s) = { if is-content(s) { return s } return eval(fix-math(s), mode: "math", scope: dictionary(marks.math)) }
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/text/quotes-02.typ
typst
Other
// Test sentences with numbers and apostrophes. The 5'11" 'quick' brown fox jumps over the "lazy" dog's ear. He said "I'm a big fella."
https://github.com/SkiFire13/typst-prooftree
https://raw.githubusercontent.com/SkiFire13/typst-prooftree/master/prooftree.typ
typst
MIT License
#let prooftree( spacing: ( horizontal: 1em, vertical: 0.5em, lateral: 0.5em, ), label: ( // TODO: split offset into horizontal and vertical offset: -0.1em, side: left, padding: 0.2em, ), line-stroke: 0.5pt, ..rules ) = context { // Check parameters and compute normalized settings let settings = { // Check basic validity of `rules`. if rules.pos().len() == 0 { panic("The `rules` argument cannot be empty.") } // Check the types of the parameters. assert( type(spacing) == "dictionary", message: "The value `" + repr(spacing) + "` of the `spacing` argument was expected" + "to have type `dictionary` but instead had type `" + type(spacing) + "`." ) assert( type(label) == "dictionary", message: "The value `" + repr(label) + "` of the `label` argument was expected" + "to have type `dictionary` but instead had type `" + type(label) + "`." ) assert( type(line-stroke) == "length", message: "The value `" + repr(line-stroke) + "` of the `line-stroke` argument was expected" + "to have type `length` but instead had type `" + type(line-stroke) + "`." ) // Check validity of `spacing`'s keys. for (key, value) in spacing { if key not in ("horizontal", "vertical", "lateral", "h", "v", "l") { panic("The key `" + key + "` in the `spacing` argument `" + repr(spacing) + "` was not expected.") } if type(value) != "length" { panic( "The value `" + repr(value) + "` of the key `" + key + "` in the `spacing` argument `" + repr(spacing) + "` was expected to have type `length` but instead had type `" + type(value) + "`." ) } } // Check exclusivity of `spacing`'s keys. let mutually_exclusive(key1, key2, keys) = { assert( key1 not in keys or key2 not in keys, message: "The keys `" + key1 + "` and `" + key2 + "` in the `spacing` argument `" + repr(spacing) + "` are mutually exclusive." ) } mutually_exclusive("horizontal", "h", spacing.keys()) mutually_exclusive("vertical", "v", spacing.keys()) mutually_exclusive("lateral", "l", spacing.keys()) // Check validity of `label`'s keys. let expected = ("offset": "length", "side": "alignment", "padding": "length") for (key, value) in label { if key not in expected { panic("The key `" + key + "` in the `label` argument `" + repr(label) + "` was not expected.") } if type(value) != expected.at(key) { panic( "The value `" + repr(value) + "` of the key `" + key + "` in the `label` argument `" + repr(label) + "` was expected to have type `" + type.at(key) + "` but instead had type `" + type(value) + "`." ) } } if "side" in label { assert( label.side == left or label.side == right, message: "The value for the key `side` in the argument `label` can only be either " + "`left` (default) or `right`, but instead was `" + repr(label.side) + "`." ) } ( spacing: ( horizontal: spacing.at("horizontal", default: spacing.at("h", default: 1.5em)).to-absolute(), vertical: spacing.at("vertical", default: spacing.at("v", default: 0.5em)).to-absolute(), lateral: spacing.at("lateral", default: spacing.at("l", default: 0.5em)).to-absolute(), ), label: ( offset: label.at("offset", default: -0.1em).to-absolute(), side: label.at("side", default: left), padding: label.at("padding", default: 0.2em).to-absolute(), ), line-stroke: line-stroke.to-absolute(), ) } // Holds the current "pending" rules, i.e. those without a parent let stack = () // Holds all the measures let layouts = () // First pass: compute the layout of each rule given the one of its children for (i, rule) in rules.pos().enumerate() { let to_pop = rule.__prooftree_to_pop let measure_func = rule.__prooftree_measure_func assert( to_pop <= stack.len(), message: "The rule `" + repr(rule.__prooftree_raw) + "` was expecting at least " + str(to_pop) + " rules in the stack, but only " + str(stack.len()) + " were present." ) // Remove the children from the stack let children = stack.slice(stack.len() - to_pop) stack = stack.slice(0, stack.len() - to_pop) // Compute the layout and push let layout = measure_func(i, settings, children) stack.push(layout) layouts.push(layout) } assert( stack.len() == 1, message: "Some rule remained unmatched: " + str(stack.len()) + " roots were found but only 1 was expected." ) let last = stack.pop() let content = { let offsets = range(rules.pos().len()).map(_ => (0pt, 0pt)) // Second pass: backward draw each rule and compute offset of children for (i, rule) in rules.pos().enumerate().rev() { let (dx, dy) = offsets.at(i) let layout = layouts.at(i) // Update the offsets of the children for (j, cdx, cdy) in layout.at("children_offsets", default: ()) { offsets.at(j) = (dx + cdx, dy + cdy) } // Draw at the correct offset let draw_func = rule.__prooftree_draw_func place(left + bottom, dx: dx, dy: -dy, draw_func(settings, layout)) } } block(width: last.width, height: last.height, content) } #let axiom(label: none, body) = { // Check arguments { // Check the type of `label`. assert( type(label) in ("string", "content", "none"), message: "The type of the `label` argument `" + repr(label) + "` was expected to be " + "`none`, `string` or `content` but was instead `" + type(label) + "`." ) } // TODO: allow the label to be aligned on left, right or center (default and current). ( __prooftree_raw: body, __prooftree_to_pop: 0, __prooftree_measure_func: (i, settings, children) => { // Compute the size of the body let body_size = measure(body) let body_width = body_size.width.to-absolute() let body_height = body_size.height.to-absolute() // Compute width of the base (including space) let base_width = body_width + 2 * settings.spacing.lateral // Update layout if a label is present let (width, height) = (base_width, body_height) let base_side = 0pt let (label_left, label_bottom) = (0pt, 0pt) if label != none { // Compute the size of the label let label_size = measure(label) let label_width = label_size.width let label_height = label_size.height // Update width and offsets from the left width = calc.max(base_width, label_width) base_side = (width - base_width) / 2 label_left = (width - label_width) / 2 // Compute bottom offset and update height label_bottom = height + 1.5 * settings.spacing.vertical height = label_bottom + label_height } return ( index: i, width: width, height: height, base_left: base_side, base_right: base_side, main_left: base_side, main_right: base_side, // Extra for draw body_left: base_side + settings.spacing.lateral, label_left: label_left, label_bottom: label_bottom, ) }, __prooftree_draw_func: (settings, l) => { // Draw body place(left + bottom, dx: l.body_left, body) // Draw label if label != none { place(left + bottom, dx: l.label_left, dy: -l.label_bottom, label) } } ) } #let rule( n: 1, label: none, root ) = { // Check arguments { // Check validity of the `n` parameter assert( type(n) == "integer", message: "The type of the `n` argument `" + repr(n) + "` was expected to be " + "`integer` but was instead `" + type(n) + "`." ) // Check the type of `label`. assert( type(label) in ("string", "dictionary", "content", "none"), message: "The type of the `label` argument `" + repr(label) + "` was expected to be " + "`none`, `string`, `content` or `dictionary` but was instead `" + type(label) + "`." ) // If the type of `label` was string then it's good, otherwise we need to check its keys. if type(label) == "dictionary" { for (key, value) in label { // TODO: maybe consider allowing `top`, `top-left` and `top-right` if `rule(n: 0)` gets changed. if key not in ("left", "right") { panic("The key `" + key + "` in the `label` argument `" + repr(label) + "` was not expected.") } if type(value) not in ("string", "content") { panic( "The value `" + repr(value) + "` of the key `" + key + "` in the `label` argument `" + repr(label) + "` was expected to have type `string` or `content` but instead had type `" + type(value) + "`." ) } } } } ( __prooftree_raw: root, __prooftree_to_pop: n, __prooftree_measure_func: (i, settings, children) => { let width(it) = measure(it).width.to-absolute() let height(it) = measure(it).height.to-absolute() let label = label if type(label) == "none" { label = (left: none, right: none) } if type(label) in ("string", "content") { label = ( left: if settings.label.side == left { label } else { none }, right: if settings.label.side == right { label } else { none } ) } label = ( left: label.at("left", default: none), right: label.at("right", default: none), ) // Size of root let root_width = width(root) let root_height = height(root) // Width of base, which includes spacing as well let base_width = 2 * settings.spacing.lateral + root_width // Bottom offset of the line and children let line_bottom = root_height + settings.spacing.vertical let children_bottom = line_bottom + settings.spacing.vertical // Left/right offset of bases of extreme children let (child_base_left, child_base_right) = (0pt, 0pt) if n != 0 { child_base_left = children.first().base_left child_base_right = children.last().base_right } // Width and height of children, and width of their combined bases let children_width = children .map(c => c.width) .intersperse(settings.spacing.horizontal) .sum() let children_height = children.map(c => c.height).fold(0pt, calc.max) let children_base_width = children_width - child_base_left - child_base_right // Width of the line let line_width = calc.max(children_base_width, base_width) // Left/right offsets of lateral children main let (child_main_left, child_main_right) = (0pt, 0pt) if n != 0 { child_main_left = children.first().main_left child_main_right = children.last().main_right } // Offset of bases from line start (same for left/right) let base_from_line = (line_width - base_width) / 2 let children_base_from_line = (line_width - children_base_width) / 2 // Space for labels let (label_left_width, label_right_width) = (0pt, 0pt) let (label_left_height, label_right_height) = (0pt, 0pt) if label.left != none { label_left_width = width(label.left) + settings.label.padding label_left_height = height(label.left) } if label.right != none { label_right_width = width(label.right) + settings.label.padding label_right_height = height(label.right) } // Left/right offsets of line = max of labels and children main let line_left = calc.max(label_left_width, child_base_left - children_base_from_line) let line_right = calc.max(label_right_width, child_base_right - children_base_from_line) // Left/right offsets of base let base_left = line_left + base_from_line let base_right = line_right + base_from_line // Left/right offsets of children let children_left = line_left + children_base_from_line - child_base_left let children_right = line_right + children_base_from_line - child_base_right // Left/right offsets of main let main_left = calc.min(line_left, children_left + child_main_left) let main_right = calc.min(line_right, children_right + child_main_right) // Full width and height let width = line_left + line_width + line_right let height = children_bottom + children_height // Incrementally compute the relative offset of each child let children_offsets = () for c in children { children_offsets.push((c.index, children_left, children_bottom)) children_left += c.width + settings.spacing.horizontal } ( index: i, width: width, height: height, base_left: base_left, base_right: base_right, main_left: main_left, main_right: main_right, children_offsets: children_offsets, // Extra for draw label: label, root_left: base_left + settings.spacing.lateral, line_left: line_left, line_bottom: line_bottom, line_width: line_width, label_left: line_left - label_left_width, label_right: line_left + line_width + settings.label.padding, label_left_bottom: root_height + settings.spacing.vertical + settings.line-stroke / 2 - label_left_height / 2 - settings.label.offset, label_right_bottom: root_height + settings.spacing.vertical + settings.line-stroke / 2 - label_right_height / 2 - settings.label.offset, ) }, __prooftree_draw_func: (settings, l) => { // Draw root content place(left + bottom, dx: l.root_left, root) // Draw line place(left + bottom, dx: l.line_left, dy: -l.line_bottom, line(length: l.line_width, stroke: settings.line-stroke)) // Draw labels if l.label.left != none { place(left + bottom, dx: l.label_left, dy: -l.label_left_bottom, l.label.left) } if l.label.right != none { place(left + bottom, dx: l.label_right, dy: -l.label_right_bottom, l.label.right) } } ) }
https://github.com/typst-community/harbinger
https://raw.githubusercontent.com/typst-community/harbinger/main/manual.typ
typst
MIT License
#import "@preview/tidy:0.2.0" #import "@preview/harbinger:1.0.0" #set text(font: "New Computer Modern Sans") #align(center)[ #text(24pt)[*harbinger*] #v(1em, weak:true) #text(12pt)[A package for shadow boxes in Typst.] ] #show terms.item: it => [- #par(hanging-indent: 1em)[*#it.term:* #it.description]] #show heading: it => [ #if it.level==1 and it.numbering!=none { pagebreak(weak: true) } #if it.level > 2 { it.body }else { it } ] #let docs = tidy.parse-module((read("src/shadow-box.typ"),read("src/fast-shadow-box.typ")).join(), scope: (harbinger:harbinger) ) #tidy.show-module(docs, show-outline:false, sort-functions:none)
https://github.com/Lslightly/TypstTemplates
https://raw.githubusercontent.com/Lslightly/TypstTemplates/main/templates/assignment.typ
typst
MIT License
// The project function defines how your document looks. // It takes your content and some metadata and formats it. // Go ahead and customize it to your liking! #import "font.typ": * #import "code.typ": * #let project(title: "", authors: (), body) = { // Set the document's basic properties. set document(author: authors.map(a => a.name), title: title) set page(numbering: "1", number-align: center) set text(font: 字体.楷体, lang: "zh") set heading(numbering: (..nums) => if nums.pos().len() == 1 { [题] } else { (nums.pos().slice(1).map(str).join(".")+")") } ) // Title row. align(center)[ #block(text(weight: 700, 1.75em, title)) ] // Author information. pad( top: 0.5em, bottom: 0.5em, x: 2em, grid( columns: (1fr,) * calc.min(3, authors.len()), gutter: 1em, ..authors.map(author => align(center)[ *#author.name* \ #author.email ]), ), ) // Main body. set par(justify: true) body }
https://github.com/liuguangxi/fractusist
https://raw.githubusercontent.com/liuguangxi/fractusist/main/tests/test-hilbert-curve.typ
typst
MIT License
#set document(date: none) #import "/src/lib.typ": * #set page(margin: 1cm) = n = 1 #align(center)[ #hilbert-curve(1, step-size: 50) ] = n = 2 #align(center)[ #hilbert-curve(2, step-size: 30, stroke-style: blue) ] = n = 3 #align(center)[ #hilbert-curve(3, step-size: 30, stroke-style: red + 4pt) ] = n = 4 #align(center)[ #hilbert-curve(4, step-size: 15, stroke-style: stroke(paint: gray, thickness: 5pt, cap: "round", join: "round")) ] #pagebreak(weak: true) = n = 5 #align(center)[ #hilbert-curve(5, step-size: 8, stroke-style: stroke(paint: purple, thickness: 2pt, cap: "square")) ] = n = 6 #align(center)[ #hilbert-curve(6, step-size: 6, stroke-style: stroke(paint: gradient.linear(..color.map.crest, angle: 45deg), thickness: 4pt, cap: "square")) ] #pagebreak(weak: true)
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/metro/0.1.0/src/impl/qty.typ
typst
Apache License 2.0
#import "num.typ": num #import "unit.typ": unit #let qty( number, unt, e: none, pm: none, allow-quantity-breaks: false, ..options ) = { let result = { num(number, e: e, pm: pm, ..options) $space.thin$ unit(unt, ..options) } return if allow-quantity-breaks { result } else { box(result) } }
https://github.com/LDemetrios/Typst4k
https://raw.githubusercontent.com/LDemetrios/Typst4k/master/src/test/resources/suite/foundations/str.typ
typst
// Test the string methods. --- str-constructor --- // Test conversion to string. #test(str(123), "123") #test(str(123, base: 3), "11120") #test(str(-123, base: 16), "−7b") #test(str(9223372036854775807, base: 36), "1y2p0ij32e8e7") #test(str(50.14), "50.14") #test(str(10 / 3).len() > 10, true) --- str-from-float --- // Test the `str` function with floats. #test(str(12.0), "12") #test(str(3.14), "3.14") #test(str(1234567890.0), "1234567890") #test(str(0123456789.0), "123456789") #test(str(0.0), "0") #test(str(-0.0), "0") #test(str(-1.0), "−1") #test(str(-9876543210.0), "−9876543210") #test(str(-0987654321.0), "−987654321") #test(str(-3.14), "−3.14") #test(str(4.0 - 8.0), "−4") --- str-from-decimal --- // Test the `str` function with decimals. #test(str(decimal("12")), "12") #test(str(decimal("12.0")), "12.0") #test(str(decimal("3.14")), "3.14") #test(str(decimal("1234567890.0")), "1234567890.0") #test(str(decimal("0123456789.0")), "123456789.0") #test(str(decimal("0.0")), "0.0") #test(str(decimal("-0.0")), "0.0") #test(str(decimal("-1.0")), "−1.0") #test(str(decimal("-9876543210.0")), "−9876543210.0") #test(str(decimal("-0987654321.0")), "−987654321.0") #test(str(decimal("-3.14")), "−3.14") #test(str(decimal("-3.9191919191919191919191919195")), "−3.9191919191919191919191919195") #test(str(decimal("5.0000000000")), "5.0000000000") #test(str(decimal("4.0") - decimal("8.0")), "−4.0") #test(str(decimal("4") - decimal("8")), "−4") --- str-from-int --- // Test the `str` function with integers. #test(str(12), "12") #test(str(1234567890), "1234567890") #test(str(0123456789), "123456789") #test(str(0), "0") #test(str(-0), "0") #test(str(-1), "−1") #test(str(-9876543210), "−9876543210") #test(str(-0987654321), "−987654321") #test(str(4 - 8), "−4") --- str-constructor-bad-type --- // Error: 6-8 expected integer, float, decimal, version, bytes, label, type, or string, found content #str([]) --- str-constructor-bad-base --- // Error: 17-19 base must be between 2 and 36 #str(123, base: 99) --- str-constructor-unsupported-base --- // Error: 18-19 base is only supported for integers #str(1.23, base: 2) --- str-from-and-to-unicode --- // Test the unicode function. #test(str.from-unicode(97), "a") #test(str.to-unicode("a"), 97) --- str-from-unicode-bad-type --- // Error: 19-22 expected integer, found content #str.from-unicode([a]) --- str-to-unicode-bad-type --- // Error: 17-21 expected exactly one character #str.to-unicode("ab") --- str-from-unicode-negative --- // Error: 19-21 number must be at least zero #str.from-unicode(-1) --- str-from-unicode-bad-value --- // Error: 2-28 0x110000 is not a valid codepoint #str.from-unicode(0x110000) // 0x10ffff is the highest valid code point --- string-len --- // Test the `len` method. #test("Hello World!".len(), 12) --- string-first-and-last --- // Test the `first` and `last` methods. #test("Hello".first(), "H") #test("Hello".last(), "o") #test("🏳️‍🌈A🏳️‍⚧️".first(), "🏳️‍🌈") #test("🏳️‍🌈A🏳️‍⚧️".last(), "🏳️‍⚧️") --- string-first-empty --- // Error: 2-12 string is empty #"".first() --- string-last-empty --- // Error: 2-11 string is empty #"".last() --- string-at --- // Test the `at` method. #test("Hello".at(1), "e") #test("Hello".at(4), "o") #test("Hello".at(-1), "o") #test("Hello".at(-2), "l") #test("Hey: 🏳️‍🌈 there!".at(5), "🏳️‍🌈") --- string-at-default --- // Test `at`'s 'default' parameter. #test("z", "Hello".at(5, default: "z")) --- string-at-not-a-char-boundary --- // Error: 2-14 string index 2 is not a character boundary #"🏳️‍🌈".at(2) --- string-at-out-of-bounds --- // Error: 2-15 no default value was specified and string index out of bounds (index: 5, len: 5) #"Hello".at(5) --- string-at-at-default-other-type --- #test("Hello".at(5, default: (a: 10)), (a: 10)) --- string-slice --- // Test the `slice` method. #test("abc".slice(1, 2), "b") #test("abc🏡def".slice(2, 7), "c🏡") #test("abc🏡def".slice(2, -2), "c🏡d") #test("abc🏡def".slice(-3, -1), "de") --- string-slice-not-a-char-boundary --- // Error: 2-21 string index -1 is not a character boundary #"🏳️‍🌈".slice(0, -1) --- string-clusters --- // Test the `clusters` and `codepoints` methods. #test("abc".clusters(), ("a", "b", "c")) #test("abc".clusters(), ("a", "b", "c")) #test("🏳️‍🌈!".clusters(), ("🏳️‍🌈", "!")) --- string-codepoints --- #test("🏳️‍🌈!".codepoints(), ("🏳", "\u{fe0f}", "\u{200d}", "🌈", "!")) --- string-contains --- // Test the `contains` method. #test("abc".contains("b"), true) #test("b" in "abc", true) #test("1234f".contains(regex("\d")), true) #test(regex("\d") in "1234f", true) #test("abc".contains("d"), false) #test("1234g" in "1234f", false) #test("abc".contains(regex("^[abc]$")), false) #test("abc".contains(regex("^[abc]+$")), true) --- string-starts-with --- // Test the `starts-with` and `ends-with` methods. #test("Typst".starts-with("Ty"), true) #test("Typst".starts-with(regex("[Tt]ys")), false) #test("Typst".starts-with("st"), false) --- string-ends-with --- #test("Typst".ends-with("st"), true) #test("Typst".ends-with(regex("\d*")), true) #test("Typst".ends-with(regex("\d+")), false) #test("Typ12".ends-with(regex("\d+")), true) #test("typst13".ends-with(regex("1[0-9]")), true) #test("typst113".ends-with(regex("1[0-9]")), true) #test("typst23".ends-with(regex("1[0-9]")), false) --- string-find-and-position --- // Test the `find` and `position` methods. #let date = regex("\d{2}:\d{2}") #test("Hello World".find("World"), "World") #test("Hello World".position("World"), 6) #test("It's 12:13 now".find(date), "12:13") #test("It's 12:13 now".position(date), 5) --- string-match --- // Test the `match` method. #test("Is there a".match("for this?"), none) #test( "The time of my life.".match(regex("[mit]+e")), (start: 4, end: 8, text: "time", captures: ()), ) --- string-matches --- // Test the `matches` method. #test("Hello there".matches("\d"), ()) #test("Day by Day.".matches("Day"), ( (start: 0, end: 3, text: "Day", captures: ()), (start: 7, end: 10, text: "Day", captures: ()), )) // Compute the sum of all timestamps in the text. #let timesum(text) = { let time = 0 for match in text.matches(regex("(\d+):(\d+)")) { let caps = match.captures time += 60 * int(caps.at(0)) + int(caps.at(1)) } str(int(time / 60)) + ":" + str(calc.rem(time, 60)) } #test(timesum(""), "0:0") #test(timesum("2:70"), "3:10") #test(timesum("1:20, 2:10, 0:40"), "4:10") --- string-replace --- // Test the `replace` method with `Str` replacements. #test("ABC".replace("", "-"), "-A-B-C-") #test("Ok".replace("Ok", "Nope", count: 0), "Ok") #test("to add?".replace("", "How ", count: 1), "How to add?") #test("AB C DEF GH J".replace(" ", ",", count: 2), "AB,C,DEF GH J") #test("Walcemo" .replace("o", "k") .replace("e", "o") .replace("k", "e") .replace("a", "e"), "Welcome" ) #test("123".replace(regex("\d$"), "_"), "12_") #test("123".replace(regex("\d{1,2}$"), "__"), "1__") --- string-replace-function --- // Test the `replace` method with `Func` replacements. #test("abc".replace(regex("[a-z]"), m => { str(m.start) + m.text + str(m.end) }), "0a11b22c3") #test("abcd, efgh".replace(regex("\w+"), m => { upper(m.text) }), "ABCD, EFGH") #test("hello : world".replace(regex("^(.+)\s*(:)\s*(.+)$"), m => { upper(m.captures.at(0)) + m.captures.at(1) + " " + upper(m.captures.at(2)) }), "HELLO : WORLD") #test("hello world, lorem ipsum".replace(regex("(\w+) (\w+)"), m => { m.captures.at(1) + " " + m.captures.at(0) }), "world hello, ipsum lorem") #test("hello world, lorem ipsum".replace(regex("(\w+) (\w+)"), count: 1, m => { m.captures.at(1) + " " + m.captures.at(0) }), "world hello, lorem ipsum") #test("123 456".replace(regex("[a-z]+"), "a"), "123 456") #test("abc".replace("", m => "-"), "-a-b-c-") #test("abc".replace("", m => "-", count: 1), "-abc") #test("123".replace("abc", m => ""), "123") #test("123".replace("abc", m => "", count: 2), "123") #test("a123b123c".replace("123", m => { str(m.start) + "-" + str(m.end) }), "a1-4b5-8c") #test("halla warld".replace("a", m => { if m.start == 1 { "e" } else if m.start == 4 or m.start == 7 { "o" } }), "hello world") #test("aaa".replace("a", m => str(m.captures.len())), "000") --- string-replace-function-bad-type --- // Error: 23-24 expected string, found integer #"123".replace("123", m => 1) --- string-replace-bad-type --- // Error: 23-32 expected string or function, found array #"123".replace("123", (1, 2, 3)) --- string-trim-basic --- // Test the `trim` method; the pattern is not provided. #let str = "Typst, LaTeX, Word, InDesign" #let array = ("Typst", "LaTeX", "Word", "InDesign") #test(str.split(",").map(s => s.trim()), array) #test("".trim(), "") #test(" ".trim(), "") #test("\t".trim(), "") #test("\n".trim(), "") #test("\t \n".trim(), "") #test(" abc ".trim(at: start), "abc ") #test("\tabc ".trim(at: start), "abc ") #test("abc\n".trim(at: end), "abc") #test(" abc ".trim(at: end, repeat: true), " abc") #test(" abc".trim(at: start, repeat: false), "abc") --- string-trim-pattern-str --- // Test the `trim` method; the pattern is a string. #test("aabcaa".trim("a", repeat: false), "abca") #test("aabca".trim("a", at: start), "bca") #test("aabcaa".trim("a", at: end, repeat: false), "aabca") #test(" abc\n".trim("\n"), " abc") #test("whole".trim("whole", at: start), "") --- string-trim-pattern-regex --- // Test the `trim` method; the pattern is a regex. #test("".trim(regex(".")), "") #test("123abc456".trim(regex("\d")), "abc") #test("123abc456".trim(regex("\d"), repeat: false), "23abc45") #test("123a4b5c678".trim(regex("\d"), repeat: true), "a4b5c") #test("123a4b5c678".trim(regex("\d"), repeat: false), "23a4b5c67") #test("123abc456".trim(regex("\d"), at: start), "abc456") #test("123abc456".trim(regex("\d"), at: end), "123abc") #test("123abc456".trim(regex("\d+"), at: end, repeat: false), "123abc") #test("123abc456".trim(regex("\d{1,2}$"), repeat: false), "123abc4") #test("hello world".trim(regex(".")), "") #test("12306".trim(regex("\d"), at: start), "") #test("12306abc".trim(regex("\d"), at: start), "abc") #test("whole".trim(regex("whole"), at: start), "") #test("12306".trim(regex("\d"), at: end), "") #test("abc12306".trim(regex("\d"), at: end), "abc") #test("whole".trim(regex("whole"), at: end), "") --- string-trim-at-bad-alignment --- // Error: 17-21 expected either `start` or `end` #"abc".trim(at: left) --- string-split --- // Test the `split` method. #test("abc".split(""), ("", "a", "b", "c", "")) #test("abc".split("b"), ("a", "c")) #test("a123c".split(regex("\d")), ("a", "", "", "c")) #test("a123c".split(regex("\d+")), ("a", "c")) --- string-rev --- // Test the `rev` method. #test("abc".rev(), "cba") #test("ax̂e".rev(), "ex̂a") --- string-unclosed --- // Error: 2-2:1 unclosed string #"hello\"
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/unichar/0.1.0/ucd/block-D7B0.typ
typst
Apache License 2.0
#let data = ( ("<NAME> O-YEO", "Lo", 0), ("<NAME> O-O-I", "Lo", 0), ("<NAME>-A", "Lo", 0), ("<NAME> YO-AE", "Lo", 0), ("<NAME> YO-EO", "Lo", 0), ("<NAME> U-YEO", "Lo", 0), ("<NAME> U-I-I", "Lo", 0), ("<NAME>U-AE", "Lo", 0), ("<NAME>-O", "Lo", 0), ("<NAME> EU-A", "Lo", 0), ("<NAME> EU-EO", "Lo", 0), ("<NAME>ONG EU-E", "Lo", 0), ("<NAME> EU-O", "Lo", 0), ("<NAME> I-YA-O", "Lo", 0), ("<NAME> I-YAE", "Lo", 0), ("<NAME> I-YEO", "Lo", 0), ("<NAME> I-YE", "Lo", 0), ("<NAME> I-O-I", "Lo", 0), ("<NAME> I-YO", "Lo", 0), ("<NAME>-YU", "Lo", 0), ("<NAME> I-I", "Lo", 0), ("<NAME>A-A", "Lo", 0), ("<NAME>A-E", "Lo", 0), (), (), (), (), ("<NAME>-RIEUL", "Lo", 0), ("<NAME>-CHIEUCH", "Lo", 0), ("<NAME>", "Lo", 0), ("<NAME> SSANGTIKEUT-PIEUP", "Lo", 0), ("<NAME> TIKEUT-PIEUP", "Lo", 0), ("<NAME> TIKEUT-SIOS", "Lo", 0), ("<NAME> TIKEUT-SIOS-KIYEOK", "Lo", 0), ("<NAME> TIKEUT-CIEUC", "Lo", 0), ("<NAME> TIKEUT-CHIEUCH", "Lo", 0), ("<NAME> TIKEUT-THIEUTH", "Lo", 0), ("<NAME>-SSANGKIYEOK", "Lo", 0), ("<NAME>UL-KIYEOK-HIEUH", "Lo", 0), ("<NAME>-KHIEUKH", "Lo", 0), ("<NAME>UL-MIEUM-HIEUH", "Lo", 0), ("<NAME> RIEUL-PIEUP-TIKEUT", "Lo", 0), ("<NAME> RIEUL-PIEUP-PHIEUPH", "Lo", 0), ("<NAME> RIEUL-YESIEUNG", "Lo", 0), ("<NAME> RIEUL-YEORINHIEUH-HIEUH", "Lo", 0), ("<NAME>", "Lo", 0), ("<NAME>-NIEUN", "Lo", 0), ("<NAME>-SSANGNIEUN", "Lo", 0), ("<NAME>", "Lo", 0), ("<NAME> MIEUM-PIEUP-SIOS", "Lo", 0), ("<NAME> MIEUM-CIEUC", "Lo", 0), ("H<NAME>ONGSEONG PIEUP-TIKEUT", "Lo", 0), ("H<NAME>ONG PIEUP-RIEUL-PHIEUPH", "Lo", 0), ("H<NAME> PIEUP-MIEUM", "Lo", 0), ("<NAME> SSANGPIEUP", "Lo", 0), ("H<NAME>ONG PIEUP-SIOS-TIKEUT", "Lo", 0), ("H<NAME> PIEUP-CIEUC", "Lo", 0), ("H<NAME> PIEUP-CHIEUCH", "Lo", 0), ("<NAME> SIOS-MIEUM", "Lo", 0), ("HANGUL JONGSEONG SIOS-KAPYEOUNPIEUP", "Lo", 0), ("H<NAME>ONG SSANGSIOS-KIYEOK", "Lo", 0), ("H<NAME>ONG SSANGSIOS-TIKEUT", "Lo", 0), ("H<NAME>ONG SIOS-PANSIOS", "Lo", 0), ("HANGUL JONGSEONG SIOS-CIEUC", "Lo", 0), ("HANGUL JONGSEONG SIOS-CHIEUCH", "Lo", 0), ("HANGUL JONGSEONG SIOS-THIEUTH", "Lo", 0), ("H<NAME>ONG SIOS-HIEUH", "Lo", 0), ("H<NAME> PANSIOS-PIEUP", "Lo", 0), ("H<NAME> PANSIOS-KAPYEOUNPIEUP", "Lo", 0), ("<NAME> YESIEUNG-MIEUM", "Lo", 0), ("H<NAME> YESIEUNG-HIEUH", "Lo", 0), ("<NAME>ONG CIEUC-PIEUP", "Lo", 0), ("<NAME>UC-SSANGPIEUP", "Lo", 0), ("<NAME>", "Lo", 0), ("<NAME>", "Lo", 0), ("<NAME>", "Lo", 0), )
https://github.com/pascalguttmann/typst-template-report-lab
https://raw.githubusercontent.com/pascalguttmann/typst-template-report-lab/main/template-report-lab.typ
typst
MIT License
#let date = datetime.today().display( "[day padding:space] [month repr:short] [year repr:full]" ) #let titlepage-lange( title: "titlepage(title: \"title\")", authors: ( ( name: "titlepage(authors: ((name: \"name\")))", affiliation: "titlepage(authors: ((affiliation: \"affiliation\")))", email: "titlepage(authors: ((email: \"email\")))", ), ( name: "titlepage(authors: ((name: \"name\")))", affiliation: "titlepage(authors: ((affiliation: \"affiliation\")))", email: "titlepage(authors: ((email: \"email\")))", ), ), course: "titlepage(course: \"course\")", lecture: "titlepage(lecture: \"lecture\")", semester: "titlepage(semester: \"semester\")", group: "titlepage(group: \"group\")", date: "titlepage(date: \"date\")", ) = { set text(size: 20pt) set page( header: none, footer: none, columns: 1, fill: none, ) grid( columns: 100%, rows: auto, gutter: 1fr, align: center + horizon, grid.cell( align: right + top, image(width: 5cm, "hfu-logo.png") ), course, lecture, [Semester: #semester], title, [Group No.: #group], for author in authors [ #author.name (Matr. No.: #author.affiliation)\ ], date, [], ) pagebreak(weak: true) } #let outline-contents() = { show outline.entry.where( level: 1 ): it => { v(12pt, weak: true) strong(it) } outline( depth: 3, indent: auto, ) pagebreak(weak: true) } #let conf( title: "Title", authors: (), group: 0, course: "Studycourse", semester: 0, lecture: "Lecture", date: date, titlepage: titlepage-lange, appendix: [], doc ) = { set page( paper: "a4", flipped: false, margin: auto, columns: 1, header: none, footer: none, number-align: center, ) set text( font: "New Computer Modern", size: 11pt, ) set heading( numbering: "1." ) show heading.where(level: 1): it => [ #pagebreak(weak: true) #it ] show raw: set text(font: "New Computer Modern Mono") show raw.where(block: false): box.with( fill: luma(240), inset: (x: 3pt, y: 0pt), outset: (y: 3pt), radius: 2pt, ) show raw.where(block: true): block.with( width: 100%, fill: luma(240), inset: 1em, radius: 4pt, ) titlepage( title: title, authors: authors, group: group, course: course, semester: semester, lecture: lecture, date: date, ) outline-contents() set par( leading: 0.55em, first-line-indent: 1.8em, justify: true, ) set math.equation(numbering: "(1)") doc [= Literature] bibliography( "bibliography.bib", title: none, full: false, style: "ieee", ) set heading( numbering: "A.1.", ) counter(heading).update(0) appendix } #show: doc => conf( title: [Layout Demonstration], authors: ( ( name: "<NAME>", affiliation: "275358", email: "<EMAIL>", ), ( name: "<NAME>", affiliation: "275358", email: "<EMAIL>", ), ), group: -1, course: "Smart Systems", lecture: "Optical Systems Laboratory", semester: -1, date: date, doc, ) = Introduction #lorem(500) = First Heading #lorem(50) #lorem(50) ```bash git commit -m "my msg" # commit ``` There are also inline code blocks: `inline`. == FS Heading #lorem(200) === third level heading ==== fourth level ==== fourth level second time == SS Heading #lorem(150) = Second Heading #lorem(50)
https://github.com/japrozs/resume
https://raw.githubusercontent.com/japrozs/resume/master/resume.typ
typst
#import "template.typ": * // Load CV data from YAML #let cvdata = yaml("resume.yml") #let uservars = ( headingfont: "Linux Libertine", // Set font for headings bodyfont: "EB Garamond", // Set font for body fontsize: 10pt, // 10pt, 11pt, 12pt linespacing: 6pt, showAddress: true, // true/false Show address in contact info showNumber: true, // true/false Show phone number in contact info ) // setrules and showrules can be overridden by re-declaring it here // #let setrules(doc) = { // // Add custom document style rules here // // doc // } #let customrules(doc) = { // Add custom document style rules here set page( paper: "us-letter", // a4, us-letter numbering: "1", number-align: center, // left, center, right margin: 1.25cm, // 1.25cm, 1.87cm, 2.5cm ) // set text(font: "New Computer Modern") doc } #let cvinit(doc) = { doc = setrules(uservars, doc) doc = showrules(uservars, doc) doc = customrules(doc) doc } // Each section function can be overridden by re-declaring it here // #let cveducation = [] // Content #show: doc => cvinit(doc) // #show heading.where(level: 2): it => { // // underline(it.body) // // underline(stroke: 0.5pt, offset: 5pt, )[#upper(text(it.body))] // it // } #cvheading(cvdata, uservars) #grid( columns: (1fr, 1fr), column-gutter: 1.5em, [ #cveducation(cvdata) \ #cvwork(cvdata) \ // #cvaffiliations(cvdata) \ #cvskills(cvdata, isbreakable:true) \ #cvcertificates(cvdata) \ #cvawards(cvdata) ], [ #cvprojects(cvdata) \ #cvcoursework(cvdata) \ #cvopensource(cvdata) // #cvpublications(cvdata) \ // #cvreferences(cvdata) ] ) // Single column layout // #cveducation(cvdata) \ // #cvwork(cvdata) \ // // #cvaffiliations(cvdata) \ // #cvskills(cvdata, isbreakable:true) \ // #cvcertificates(cvdata) \ // #cvawards(cvdata) \ // #cvprojects(cvdata) \ // #cvcoursework(cvdata) \ // #cvopensource(cvdata) // // #cvpublications(cvdata) \ // // #cvreferences(cvdata) #footnote[Code for this resume hosted at https://github.com/japrozs/resume]
https://github.com/Research-Team-Fcode/NodeJS
https://raw.githubusercontent.com/Research-Team-Fcode/NodeJS/main/main.typ
typst
#import "@preview/fletcher:0.4.3" as fletcher: diagram, node, edge #let title = [ NodeJS ] #set heading(numbering: "1.") #show par: set block(spacing: 0.65em) #set par(first-line-indent: 1em, justify: true) #align(center + horizon, text(size: 32pt, weight: 400)[ *#title* ]) #pagebreak() #outline(indent: auto) #pagebreak() = Definition - *Node.js* is an open-source server environment, server-side JavaScript runtime environment. It allows developers to run JavaScript code outside of a web browser and on the server. - *Node.js* runs the V8 JavaScript engine, the core of Google Chrome, outside of the browser. - *Node.js* is free. - *Node.js* uses an event-driven, non-blocking I/O model. - *Node.js* runs on various platforms (Windows, Linux, Unix, Mac OS X, ...) - *Node.js* provides a large ecosystem of modules and libraries, making it easier for developers to build server-side applications with JavaScript. = Use cases *Node.js* is a versatile technology that can be used for a wide range of use cases and there are various libraries and frameworks available to support each use case. Here are some common *Node.js* use cases along with associated libraries or frameworks: == Web servers *Node.js* is widely used in web servers because of its non-blocking I/O model and event-driven architecture. It allows developers to build scalable and efficient web servers that can handle a large number of concurrent connections. - Express.js: A fast and minimalist web framework that allows you to build web applications and APIs. - Koa.js: A modern, lightweight web framework designed for high-performance web applications. - Electronjs: The Electron framework lets you write cross-platform desktop applications using JavaScript, HTML, and CSS. It is based on *Node.js* and Chromium and is used by Visual Studio Code and many other apps. == Real-time applications Due to its event-driven architecture, *Node.js* is well-suited for building real-time applications such as chat applications, collaborative editing tools, and multiplayer games. - Socket.io: A library that enables real-time, bidirectional communication between clients and servers. - Sails.js: A full-featured MVC _(Model-View-Controller)_ framework that includes real-time capabilities. == API development: *Node.js* is commonly used to build RESTful APIs and microservices, providing a lightweight and efficient backend for front-end applications or mobile apps. - Restify: A framework specifically designed for building REST APIs. - Hapi.js: A powerful framework for building APIs and websites that includes support for developer-friendly features like input validation and authentication. == Command-line tools: *Node.js* provides a rich set of APIs for interacting with the file system, network, and operating system, making it an excellent choice for building command-line tools and scripts. - Commander.js: A feature-rich library for building command-line interfaces (CLIs) with *Node.js*. - Inquirer.js: A library for creating interactive command-line interfaces with a wide range of user prompts. == Data streaming: *Node.js* is particularly effective in handling streaming data, such as real-time analytics, file uploads/downloads, and audio/video processing. - Async.js: A utility library that provides powerful functions for handling asynchronous operations. - Fastify: A performant and low-overhead web framework suitable for building efficient applications, including data processing tasks. == IoT applications: With its lightweight footprint, event-driven architecture, and support for asynchronous programming, *Node.js* is well-suited for building IoT _(Internet of Things)_ applications and controlling embedded devices. - Johnny-Five: A JavaScript robotics framework for *Node.js* that supports a wide range of devices and platforms. - Cylon.js: A web-based JavaScript robotics framework for *Node.js* that provides a simple, unified API for interacting with various physical devices. = Special things about nodejs Node.js is a powerful, open-source, server-side runtime environment that allows developers to build scalable applications using JavaScript. Here are some special things about Node.js: == Event-driven architecture and Non-blocking I/O model One of the most remarkable and standout features of Node.js is undoubtedly its event-driven architecture and non-blocking I/O model. These features offer several notable benefits such as scalability and responsiveness that significantly enhance the efficiency and performance of applications. == Single Language for Frontend and Backend With Node.js, developers can use JavaScript both on the server side and the client side, which leads to code reusability, reduced complexity, and faster development. == NPM (Node Package Manager) Node.js has a built-in package manager called NPM, which hosts thousands of open-source packages and modules. NPM makes it easy for developers to find, install, and manage dependencies for their projects, greatly accelerating the development process. == Large Ecosystem and Active Community Node.js has a vibrant and active community that constantly contributes to its growth. This has resulted in a wide array of libraries, frameworks, and toolsets that enhance the capabilities of Node.js. This large ecosystem greatly reduces the development time and effort required for building applications. == Cross-platform Compatibility Node.js can run on various platforms, including Windows, macOS, and Linux, making it highly flexible and versatile. #pagebreak() = Functionality == File system: To handle file operations like creating, reading, deleting, etc., Nodejs provides an inbuilt module called FS (File System). - Common use for the File System module: - The *fs.readFile()* method is used to read files ```js const fs = require('fs); fs.readFile('Fcode.html); ``` - The *fs.appendFile()* method appends specified content to a file. If the file does not exist, the file will be created: ```js const fs = require('fs); fs.appendFile('Fcode.txt', 'Adding text to file'); ``` - The *fs.appendFile()* method appends the specified content at the end of the specified file: - The *fs.open()* method takes a FileSystem flag as the second argument, the specified file is opened for writing. If the file does not exist, an empty file is created: ```js const fs = require('fs); fs.open('Fcode.txt', 'w'); ``` - The *fs.writeFile()* method replaces the specified file and content if it exists. If the file does not exist, a new file, containing the specified content, will be created: ```js const fs = require('fs); fs.writeFile('Fcode.txt', 'Hello and smile'); ``` - The *fs.writeFile()* method replaces the specified file and content: ```js const fs = require('fs); fs.writeFile('Fcode.txt', 'Hello to my crew'); ``` - The *fs.rename()* method renames the specified file: ```js const fs = require('fs); fs.rename('FIn4.txt', 'Fcode.txt'); ``` - The *fs.unlink()* method deletes the specified file: ```js const fs = require('fs); fs.unlink('Membername.txt'); ``` #pagebreak() == Net, HTTP/HTTPS === HTTP _(Hypertext Transfer Protocol)_ - HTTP is like a language that your web browser and the website’s server use to talk to each other. It’s how you get information from the server onto your browser. - Imagine if everyone spoke English, and a hacker who knows English could easily understand any information you send. That’s how HTTP works—everything is in plain text. - When you visit a website your browser sends a request to the server, and the server responds with the page you see. - Features: - Plain text communication. - Used for sending HTML documents, images, and videos to your browser. - Operates at the application layer of networking. === HTTPS _(Hypertext Transfer Protocol Secure)_ - HTTPS is like a secret language. It encrypts the communication between your browser and the server so that hackers (hopefully) can’t understand it. - When you access a bank’s website using HTTPS, your data is protected. Even if a hacker intercepts it, they won’t understand the encrypted conversation. - When you visit a secure site (like your online banking), the URL starts with “https://” (e.g., your bank’s website). - Features: - Encrypted communication. - Boosts your site’s ranking on Google. - Protects against phishing attacks. - Uses SSL certificates for security. === Main Differences: - Encryption: - HTTP: No encryption layer. - HTTPS: Enabled encryption. - Data Protection: - HTTP: Data is not secure. - HTTPS: Data is protected. - Google Ranking: - HTTP: No ranking boost. - HTTPS: Boosts your ranking. - Phishing Protection: - HTTP: No protection. - HTTPS: Guards against phishing. #pagebreak() == Promise, async, await === Event loop - Nodejs event loop is a semi-infinite loop, polling and blocking on the OS until some in a set of file descriptors are ready. The loop exits when it no longer has any event to wait for - The event loop uses epoll on Linux, kqueue on MacOS and BSD for polling - The ways Nodejs handles polling could be categorized into three cases: - Pollable file descriptors: can be directly waited on, including sockets (net, dgram, http, https, tls, child process pipes, stdin, stdout, stderr) - Time: the next timeout can be directly waited on - Others: using uv thread pool to facilitate polling #figure( caption: "Event loop diagram", )[ #diagram( node-defocus: 0, spacing: (1cm, 1.5cm), edge-stroke: 1pt, crossing-thickness: 5, mark-scale: 70%, node-fill: luma(97%), node-outset: 3pt, { let blob(pos, label, tint: white, ..args) = node( pos, align(center, label), fill: tint.lighten(60%), stroke: 1pt + tint.darken(20%), ..args, ) blob((0, 0), "timers") blob((0, 1), "pending callbacks") blob((0, 2), "idle, repair") blob((0, 3), "poll") blob((1, 3), "incoming: \n connections, data, ...") blob((0, 4), "check") blob((0, 5), "close callbacks") }, { edge((0, 0), (0, 1), "-|>") edge((0, 1), (0, 2), "-|>") edge((0, 2), (0, 3), "-|>") edge((0, 3), (0, 4), "-|>") edge((1, 3), (0, 3), "-|>") edge((0, 4), (0, 5), "-|>") }, ) ] === Promise Nodejs promise provides high-level APIs to add functions to be executed when events occur in the event loop === Async, await - The async function declaration creates a binding of a new async function to a given name. The await keyword is permitted within the function body, enabling asynchronous, promise-based behavior to be written in a cleaner style and avoiding the need to configure promise chains explicitly. - Async, await enables the use of ordinary try/catch blocks around asynchronous code instead of .catch in promise chains #pagebreak() == Worker threads - The worker thread module implements a form of threading that provides parallelism in nodejs - Worker threads are not OS threads. They are distinct child processes, which means they can't directly access the execution context of their parents. - Communication between the main application and worker threads is facilitated by an event-based messaging system - Worker threads are most suitable for CPU-bound operations, consisting of image editing, video editing, cryptography, and complex mathematical operations,… - Example use cases of worker thread module: ```js const { Worker, isMainThread, parentPort, workerData, } = require('node:worker_threads'); if (isMainThread) { const data = { "editor.suggest.snippetsPreventQuickSuggestions": false, "editor.suggest.matchOnWordStartOnly": false, "editor.foldingImportsByDefault": true, "editor.inlineSuggest.enabled": true, "editor.suggest.localityBonus": true, "editor.suggestSelection": "first", "editor.accessibilitySupport": "off", "editor.stickyScroll.enabled": true, "editor.smoothScrolling": true, } const worker = new Worker(__filename, { workerData: data, }); let result; worker.on('message', (data) => { result = data; }) worker.on('exit', () => console.log(result)) } else { const data = workerData; parentPort.postMessage(JSON.stringify(data)); } ``` #pagebreak() == C/C++ addons === V8 Engine - V8 is Google’s open-source high-performance JavaScript and WebAssembly engine, written in C++. It is used in Chrome and in Node.js, among others. - V8 is at the core of Node.js. === Why do we need C++? - You can use existing, proven, and efficient algorithms or libraries already written for C/C++. - You can develop applications that need hardware-level or OS-level operations. - You can run CPU-intensive operations much faster in C++ than JavaScript. === C++ addons - Addons are dynamically linked shared objects written in C++. The require() function can load addons as ordinary Node.js modules. Addons provide an interface between JavaScript and C/C++ libraries. - There are three options for implementing addons: Node-API, nan, or direct use of internal V8, libuv, and Node.js libraries. #pagebreak() == WASI - WebAssembly _(abbreviated Wasm)_ is a binary instruction format for a stack-based virtual machine. Wasm is designed as a portable compilation target for programming languages, enabling deployment on the web for client and server applications. - WebAssembly System Interface _(WASI)_ - WASI is a modular system interface for WebAssembly. As described in the initial announcement, it’s focused on security and portability. - Source Code: _lib/wasi.js_ - The WASI API provides an implementation of the WebAssembly System Interface specification. WASI gives WebAssembly applications access to the underlying operating system via a collection of POSIX-like functions. #pagebreak() = Comparison == Javascript/NodeJs vs Golang - Programming Language’s type: - JavaScript: scripting language, object-oriented programming (OOP). - Go compiled language, procedural programming. - Ability to run: - JavaScript/Node.js: runs on many platforms through the JS engine. - Go: compiles directly to machine code for multiple platforms. - Performance: - JavaScript/Node.js: slower because it requires interpretation. - Go: faster thanks to direct compilation. - Multi-threading: - JavaScript/Node.js: js is single-threaded, Node.js supports multi-threading but handles inefficiently. - Go: supports efficient multi-threading in m:n model. - Frontend – Backend - JavaScript/Node.js: developing a true client-server system works really well. - Go: focuses more on the backend, especially for developing high-performance concurrent services on the server. ⇒ In general, Go is specifically designed for server-side, higher performance but more complex. JavaScript is simple and easy for beginners to use. == Javascript/NodeJs vs C\# - Programming Languages's type - JavaScript is a scripting language designed for the web environment. - C\# is a fully-featured object-oriented programming language. - Execution Environment - JavaScript/Node.js runs on the JavaScript V8 Engine. - C\# runs on Microsoft's .NET Framework/Core. - Performance - JavaScript/Node.js is slower because it requires code interpretation. - C\# is faster because it compiles directly to machine code. - Application Scope - JavaScript/Node.js is popular for web/server-side programming. - C\#: More versatile, can be used to develop many different types of applications. ⇒ In general, C\# is widely used to develop applications on Windows such as desktop, web, mobile, games, and business applications. Js is strong in the web field.
https://github.com/tlsnotary/docs-mdbook
https://raw.githubusercontent.com/tlsnotary/docs-mdbook/main/research/ghash.typ
typst
#set page(paper: "a4") #set par(justify: true) #set text(size: 12pt) #show link: underline = GHASH We want to compute GHASH MAC in 2PC which is of the form $sum_(k=1)^l H^k dot b_k$, where $H^k, b_k in "GF"(2^128)$. $H$ is split into additive shares for parties $P_A$ and $P_B$, such that $P_A$ knows $H_1$ and $P_B$ knows $H_2$ and $H = H_1 + H_2$. We now need to compute additive shares of powers of $H$. == Functionality $cal(F)_(H^l)$ On input $(H_1)$ from $P_A$ and $H_2$ from $P_B$, the functionality returns all the $H_(1,k)$ to $P_A$ and $H_(2,k)$ to $P_B$ for $k = 2...l$, such that $H_(1,k) + H_(2,k) = (H_1 + H_2)^k$. == Protocols The following protocols all implement the functionality $cal(F)_(H^l)$. All protocols guarantee privacy for $H_1$ and $H_2$, i.e. there is no leakage to the other party. All protocols are implementations with unpredictable errors, that means correctness is *not* guaranteed in the presence of a malicious adversary deviating from the protocol. This is tolerable in the context of TLSNotary. We will assume that $l$, which determines the highest power $H^l$ both parties want to compute is a compile-time constant, so that it does not complicate protocol and performance analysis. When computing bandwidths of protocols, we assume that both parties have access to a sufficient number of pre-distributed random OTs. In order to simplify the computation of rounds, we assume that there is a sufficient number of pre-distributed ROLEs available. This means we ignore rounds for setting up ROLEs, because this can be batched and is needed for every protocol discussed here. The following table gives an overview about the different protocols: #align(center)[ #table( columns: (auto, auto, auto, auto), inset: 10pt, align: horizon + center, [*Protocol*], [*0 Issue*], [*Rounds*], [*Bandwidth*], $Pi_"A2M"$, "yes", [ Off: 0\ On: 1.5\ ], [ Off: 0\ On: 2.1 MB\ ], $Pi_"ROLE + OLE"$, "yes", [ Off: 0.5\ On: 0.5\ ], [ Off: 2.1 MB\ On: 128 bit\ ], $Pi_"ROLE + OLE + Zero"$, "no", [ Off: 0.5\ On: 0.5\ ], [ Off: 6.3 MB\ On: 256 bit\ ], $Pi_"Beaver"$, "no", [ Off: 2\ On: 0.5\ ], [ Off: 4.2 MB\ On: 128 bit\ ], ) ] === A2M Protocol This protocol converts the additive shares $H_"1/2"$ into multiplicative shares $H_"1/2"^*$. Then both parties can locally computer higher powers $H_(1"/"2)^k^*$. Afterwards they convert these higher powers back into additive shares $H_("1/2", k)$. ==== Protocol $Pi_"A2M"^l$ + $P_A$ samples a random field element $r arrow.l "GF"(2^128)$. + Both parties call $cal(F)_"OLE" (r, H_2) -> (x, y)$. So $P_A$ knows $(r, x)$ and $P_B$ knows $(H_2, y)$ and it holds that $r dot H_2 = x + y$. + $P_A$ defines $m = r dot H_1 + x$ and sends $m$ to $P_B$. + $P_A$ defines $H_1^* = r^(-1)$ and $P_B$ defines $H_2^* = m + y$. + Both parties locally compute $H_"1/2"^k^*$ for $k = 2...l$. + Both parties call $cal(F)_"OLE" (H_1^k^*, H_2^k^*) arrow.r (H_"1,k", H_"2,k")$ for $k = 2...l$. + $P_A$ outputs $H_"1,k"$ and $P_B$ outputs $H_"2,k"$. ==== Performance Analysis The protocol has no offline communication, all the communication takes place online with 1.5 rounds (steps 2, 3, 6). The bandwidth of the protocol is $1026 dot (128 + 128^2) + 1026 dot 128 + 128 approx 2.1 "MB"$. === ROLE + OLE Protocol This protocol is nearly identical to the original GHASH construction from #link("https://eprint.iacr.org/2023/964")[XYWY23]. It only addresses the leakage of $H_(1"/"2)$ in the presence of a malicious adversary using $0$ as an input for $cal(F)_"OLE"$. Instead of using $cal(F)_"OLE"$ for all powers $k = 1...l$, we replace the first invocation of $cal(F)_"OLE"$ with $cal(F)_"ROLE"$ and then only use $cal(F)_"OLE"$ for $k = 2...l$. The 0 issue is still present for higher powers of $H$, but it can be fixed with the zero check. ==== Protocol $Pi_"ROLE + OLE"^l$ + Both parties call $cal(F)_"ROLE"$, so that $P_A$ gets $(a_1, x_1)$ and $P_B$ gets $(b_1, y_1)$. + $P_A$ defines $(r_A, r_1) := (a_1, x_1)$ and $P_B$ defines $(r_B, r_2) := (b_1, y_1)$. + $P_A$ locally computes $r_A^k$ and $P_B$ locally computes $r_B^k$, for $k=2...l$. + Both parties call $cal(F)_"OLE" (r_A^k, r_B^k) arrow.r (r_(1,k), r_(2,k))$, so that $P_A$ gets $r_(1,k)$ and $P_B$ gets $r_(2,k)$ for $k = 2...l$. + $P_A$ opens $d_1 = H_1 - r_1$ and $P_B$ opens $d_2 = H_2 - r_2$, so that both parties know $d = d_1 + d_2 = (H_1 + H_2) - (r_1 +r_2)$. + Define the polynomials $f_k$ over $"GF"(2^128)$, with $f_k (x) := (d + x)^k = sum_(j=0)^k f_(j,k) dot x^j$. $P_A$ locally evaluates and outputs $H_(1,k) = f_k (r_(1,k))$ and $P_B$ locally evaluates and outputs $H_(2,k) = f_k (r_(2,k))$ for $k = 1...l$. ==== Analysis of 0 issue The OLEs of step 4 are still vulnerable to the 0 issue. This allows a malicious $P_A$ to learn all the $r_(2,k), k = 2...l$ and by that also all the $H_(2,k)$. $P_A$ can then output some arbitrary $s_k in bb(F)$ in step 6, which allows him to completely set all the $H^k$ for $k = 2...l$. However, he will not be able to set $r_(2,1)$, which means he cannot set $H^1$. He is also not able to remove it from $"MAC" = sum_(k=1)^l H^k dot b_k$, if for example some $b_k = b_(k')$, because he would need to know $r_(2,1)$ for that. So in other words if $"MAC" = "MAC"_1 + "MAC"_2$, then $"MAC"_2$ always contains some private, uncontrollable mask $H_2^1 dot b_1$, which prevents $P_A$ from completely controlling the $"MAC"$. Thus, fixing the 0 issue is optional. ==== Performance Analysis - The protocol only needs 0.5 offline round (step 4) and 0.5 online round (step 5). This holds even if the zero-check is applied. - The protocol has an upload/download size of - *Offline*: - *Without zero-check*: $1026 dot (128 + 128^2) + 1025 dot 128 approx 2.1 "MB"$ - *With zero-check*: Approximately 2-times overhead, so $approx 6.3 "MB"$ - *Online*: - *Without zero-check*: $128 "bit"$ - *With zero-check*: $256 "bit"$ === Beaver Protocol This protocol is nearly identical to the original GHASH construction from #link("https://eprint.iacr.org/2023/964")[XYWY23]. It only addresses the leakage of $H_(1"/"2)$ in the presence of a malicious adversary using $0$ as an input for $cal(F)_"OLE"$. Instead of using $cal(F)_"OLE"$ , we sample $r = r_1 + r_2$ randomly and compute the higher powers of additive shares with $cal(F)_"Beaver"$. This protocol does not suffer from the 0 issue. ==== Protocol $Pi_"Beaver"^l$ + Both parties sample a random field element. $P_A$ samples $r_1 arrow.l "GF"(2^128)$ and $P_B$ samples $r_1 arrow.l "GF"(2^128)$. + Both parties repeatedly call $cal(F)_"Beaver" (r_(1,k - 1), r_1, r_(2,k - 1), r_2) -> (r_(1, k), r_(2, k))$ for $k = 2...l$. + $P_A$ opens $d_1 = H_1 - r_1$ and $P_B$ opens $d_2 = H_2 - r_2$, so that both parties know $d = d_1 + d_2 = (H_1 + H_2) - (r_1 +r_2)$. + Define the polynomials $f_k$ over $"GF"(2^128)$, with $f_k (x) := (d + x)^k = sum_(j=0)^k f_(j,k) dot x^j$. $P_A$ locally evaluates and outputs $H_(1,k) = f_k (r_(1,k))$ and $P_B$ locally evaluates and outputs $H_(2,k) = f_k (r_(2,k))$ for $k = 1...l$. ==== Performance Analysis - By using free-squaring in $"GF"(2^128)$ and batching calls to $cal(F)_"Beaver"$ the protocol needs 2 offline rounds (repeatedly step 2) and 0.5 online round (step 3). - The protocol has an upload/download size of - *Offline*: $1025 dot (128 + 128^2) + 1025 dot 128 approx 2.1 "MB"$ - *Online*: $128 "bit"$
https://github.com/leesum1/brilliant-cv
https://raw.githubusercontent.com/leesum1/brilliant-cv/master/modules_en/education.typ
typst
// Imports #import "@preview/brilliant-cv:2.0.2": cvSection, cvEntry, hBar #let metadata = toml("../metadata.toml") #let cvSection = cvSection.with(metadata: metadata) #let cvEntry = cvEntry.with(metadata: metadata) #cvSection("Education") #cvEntry( title: [Master of Computer Science], society: [Hangzhou Dianzi University], date: [2022 - 2025], location: [Hangzhou, Zhejiang], logo: image("../src/logos/hdu.png"), description: list([Courses: Operating Systems #hBar() Data Structures #hBar() Computer Organization #hBar() Computer Networks ]), ) #cvEntry( title: [Bachelor of Electronic Science and Technology], society: [Central South University of Forestry and Technology], date: [2018 - 2022], location: [Changsha, Hunan], logo: image("../src/logos/csuft.png"), description: list([Courses: Analog Circuit Design #hBar() Digital Circuit Design #hBar() Embedded Programming ]), )
https://github.com/soul667/typst_template
https://raw.githubusercontent.com/soul667/typst_template/main/基本模板/use.typ
typst
#set text(top-edge: 0.7em, bottom-edge: -0.3em) #set par(leading: 2em) #lorem(6) #lorem(7)
https://github.com/kdog3682/2024-typst
https://raw.githubusercontent.com/kdog3682/2024-typst/main/src/today.typ
typst
#let o = (0,0) #let p34 = (3, 4) #import "@preview/cetz:0.2.0" #cetz.canvas({ import cetz.draw: * let red-circle(pos) = { circle(pos, radius: 5pt, fill: red, stroke: none) } let grid-line(a, b) = { line(a, b, name: "holder", stroke: none) } let arrow(a, b) = { let arrow-attrs = ( mark: ( start: ">", fill: black, length: 3pt, ), stroke: ( paint: black, dash: "dotted" ) ) line(a, b, ..arrow-attrs) } rect( name: "rec", o, p34, stroke: (thickness: 0.5pt, dash: "dotted"), ) rect( name: "half", fill: yellow.lighten(80%), stroke: (thickness: 0.5pt, dash: "dotted"), "rec.north-west", "rec.south" ) grid-line("rec.north", "rec.center") red-circle("rec.north-east") red-circle("rec.center") red-circle("holder.mid") arrow("rec.center", "holder.mid") content(o, [hi]) })
https://github.com/jonatchoum/typst-thesis-template
https://raw.githubusercontent.com/jonatchoum/typst-thesis-template/main/README.md
markdown
# typst-thesis-template Template to write thesis based on the amazing work of eduardz1 [UniTO-typst-template](https://github.com/eduardz1/unito-typst-template) Add support for french, and my needs at CESI and TotalEnergies as an apprentice.
https://github.com/cadojo/correspondence
https://raw.githubusercontent.com/cadojo/correspondence/main/src/vita/src/skills.typ
typst
MIT License
#let skillslist = state("skillslist", ()) #let skill( name, notes, ) = { let title = [ #heading(level: 3, name) #notes ] skillslist.update(current => current + (title,)) } #let skills(header: "Technical Skills") = { locate( loc => { let skillslist = skillslist.final(loc) if skillslist.len() > 0 { heading(level: 2, header) line(length: 100%, stroke: 1pt + black) skillslist.join() } } ) }
https://github.com/Enter-tainer/typstyle
https://raw.githubusercontent.com/Enter-tainer/typstyle/master/docs/limitations.md
markdown
Apache License 2.0
To keep source code valid, typstyle will give up formatting in certain cases. This is a list of what typstyle will not format. ## Overall ### Markup lines Typstyle only formats the code, it does not format the markup lines. It will keep the markup lines as is. Specifically, if a line contains text(`ast::Expr::Text`), the whole line will be kept as is. ### Math mode Overall there is very few formatting in math mode. It is not well implemented. ### @typstyle off Why: It is a directive to turn off formatting. ### When there are block comment child, gives up formatting the whole node. Why: We currently cannot handle block comment in all places. It is hard to handle. ```typst #let f(a, /* they */ b) = if /* are */ a > b { a } /* everywhere */ else { b } ``` ### Multiline raw with single backtick. Why: it is white space dependent ```typst `a b` is not `a b` ``` ### When a child contains # in math mode, gives up formatting the whole node. Why: hash can appear anywhere, we cannot handle it very well. ```typst $f(a+b, size: #1em)$ ``` ### Args in math mode. Why: it works very different. like 2d args, trailing commas ```typst $mat(a,,;,b,;,,c)$ ``` ## Special ### Table Typstyle currently tries to format tables into a rectangular shape. However, it only do such formatting when the table is simple enough. Namely: 1. no comments 2. no spread args 3. no named args, or named args appears before all pos args 4. no `table/grid.vline/hline/cell` 5. `columns` is int or array
https://github.com/mariunaise/HDA-Thesis
https://raw.githubusercontent.com/mariunaise/HDA-Thesis/master/content/SMHD.typ
typst
#import "@preview/drafting:0.2.0": * #import "@preview/glossarium:0.4.1": * = S-Metric Helper Data Method <chap:smhd> A metric based @hda generates helper data at PUF enrollment to provide more reliable results at the reconstruction stage. Each of these metrics correspond to a quantizer with different bounds to lower the risk of bit or symbol errors during reconstruction. For this kind of @hda, the generated metric is used as helper data and thus does not have to be kept secret. == Background Before we turn to a concrete realization of the S-Metric method, let's take a look at its predecessor, the Two-Metric Helper Data Method. /*=== Distribution Independency <sect:dist_independency> The publications for the Two-Metric approach @tmhd1 and @tmhd2, as well as the generalized S-Metric approach @smhd make the assumption, that the PUF readout is zero-mean Gaussian distributed @smhd. We propose, that a Gaussian distributed input for S-Metric quantization is not required for the operation of this quantizing algorithm. Instead, any distribution can be used for input values given, that a CDF exists for that distribution and its parameters are known. As already mentioned in @tilde-domain, this transformation will result in uniformly distributed values, where equi-probable areas in the real domain correspond to equi-distant areas in the Tilde-Domain. Contrary to @tmhd1, @tmhd2 and @smhd, which display relevant areas as equi-probable in a normal distribution, we will use equi-distant areas in a uniform distribution for better understandability. It has to be mentioned, that instead of transforming all values of the PUF readout into the Tilde-Domain, we could also use an inverse CDF to transform the bounds of our evenly spaced areas into the real domain with (normal) distributed values, which can be assessed as remarkably less computationally complex.#margin-note[Das erst später] */ === Two-Metric Helper Data Method <sect:tmhd> The simplest form of a metric-based @hda is the Two-Metric Helper Data Method. Its quantization only yields symbols of 1-bit width and it only uses a single bit of helper data to store the choice of metric. @fig:tmhd_example_enroll and @fig:tmhd_example_reconstruct illustrate an example enrollment and reconstruction process. Consider the marked point the value of the initial measurement and the marked range our margin of error. If we now were to use the original quantizer shown in @fig:tmhd_example_enroll during both the enrollment and the reconstruction phases, we would risk a bit error, because the margin of error overlaps with the lower quantization bound $-a$, which we can call a point of uncertainty. To alleviate this we generated helper data during enrollment as depicted in @fig:tmhd_enroll, we can make use of a different quantizer $cal(R)(1, 2, x)$ whose boundaries do not overlap with the error margin. #scale(x: 90%, y: 90%)[ #figure( grid( columns: (1fr, 1fr), [#figure( include("../graphics/quantizers/two-metric/example_enroll.typ"), caption: [Example enrollment]) <fig:tmhd_example_enroll>], [#figure( include("../graphics/quantizers/two-metric/example_reconstruct.typ"), caption: [Example reconstruction]) <fig:tmhd_example_reconstruct>] ), caption: [Example enrollment and reconstruction of @tmhdt. The window function describes the quantizer used to define the resulting bit. The red dot shows a possible @puf readout measurement with its blue marked strip as margin of error.])] Publications @tmhd1 and @tmhd2 find all the relevant bounds for the enrollment and reconstruction phases under the assumption that the PUF readout (our input value $x$) is zero-mean Gaussian distributed. //Because the parameters for symbol width and number of metrics always stays the same, it is easier to calculate #m//argin-note[obdA annehmen hier] the bounds for 8 equi-probable areas with a standard deviation of $sigma = 1$ first and then multiplying them with the estimated standard deviation of the PUF readout. Because the parameters for symbol width and number of metrics always stay the same, we can -- without loss of generality -- assume the standard deviation as $sigma = 1$ and calculate the bounds for 8 equi-probable areas for this distribution. This is done by finding two bounds $a$ and $b$ such, that $ integral_a^b f_X(x) \dx = 1/8 $ This operation yields 9 bounds defining these areas $-infinity$, $-\T1$, $-a$, $-\T2$, $0$, $\T2$, $a$, $\T1$ and $+infinity$. During the enrollment phase, we will use $plus.minus a$ as our quantizing bounds, returning $0$ if the absolute value of $x$ is smaller than $a$ and $1$ otherwise. The corresponding metric is chosen based on the following conditions: $ M = cases( \M1\, x < -a or 0 < x < a, \M2\, -a < x or 1 < a < x )space.en. $ @fig:tmhd_enroll shows the curve of a quantizer $cal(Q)$ that would be used during the Two-Metric enrollment phase. #scale(x: 90%, y: 90%)[ #grid( columns: (1fr, 1fr), [#figure( include("../graphics/quantizers/two-metric/enrollment.typ"), caption: [Two-Metric enrollment]) <fig:tmhd_enroll>], [#figure( include("../graphics/quantizers/two-metric/reconstruction.typ"), caption: [Two-Metric reconstruction]) <fig:tmhd_reconstruct>] ) ] As previously described, each of these metrics correspond to a different quantizer. In the reconstruction phase, we can use the generated helper data and define a reconstructed bit based on the chosen metric as follows: $ #grid( columns: (1fr, 1fr), align: (center, center), math.equation($\M1: k = cases(0\, x < \T1 or \T2 < x, 1\, -\T1 < x < \T2),$, block: true, numbering: none), math.equation($\M2: k = cases(0\, x < -\T2 or \T1 < x, 1\, -\T2 < x < \T1).$, block: true, numbering: none) ) $ @fig:tmhd_reconstruct illustrates the basic idea behind the Two-Metric method. Using the helper data, we will move the bounds of the original quantizer (@fig:tmhd_example_enroll) one octile to each side, yielding two new quantizers. The advantage of this method comes from moving the point of uncertainty away from our enrollment-time readout. === #gls("smhdt", long: true) Going on, the Two-Metric Helper Data Method can be generalized as shown in @smhd. This generalization allows for higher-order bit quantization and the use of more than two metrics. A key difference to the Two-Metric approach is the alignment of quantization areas. Methods described in @tmhd1 and @tmhd2 use two bounds for 1-bit quantization, namely $plus.minus a$. Contrary, the method introduced by Fischer in @smhd would look more like a sign-based quantizer if the configuration $cal(Q)(2, 1)$ is used, using only one quantization bound at $x=0$. @fig:smhd_compar1 and @fig:smhd_compar2 illustrate this difference, . #grid( columns: (1fr, 1fr), [#figure( include("../graphics/quantizers/s-metric/s-metric-compar1.typ"), caption: [Two-Metric enrollment] )<fig:smhd_compar1>], [#figure( include("../graphics/quantizers/s-metric/s-metric-compar2.typ"), caption: [S-Metric enrollment with 1-bit configuration] )<fig:smhd_compar2>] ) The generalization consists of two components: - *Higher-order bit quantization* \ We can introduce more steps to our quantizer and use them to extract more than one bit out of our PUF readout. - *More than two metrics* \ Instead of splitting each quantizer into only two equi-probable parts, we can increase the number of metrics at the cost of generating more helper data to increase reliability. == Realization<sect:smhd_implementation> We will now propose a specific realization of the S-Metric Helper Data Method. \ Instead of using the @puf readout directly for @smhdt, we can use a @cdf to transform these values into the tilde domain. The only requirement we would need to meet here is that the @cdf of the probability distribution used is known. This allows us to use equi-distant bounds for the quantizer instead of equi-probable ones. From now on we will use the following syntax for quantizers that use the S-Metric Helper Data Method: $ cal(Q)(S, M, tilde(x)), $ where $S$ defines the number of metrics, $M$ the number of bits and $tilde(x)$ a Tilde-Domain transformed PUF measurement. === Enrollment To enroll our PUF key, we will first need to define the quantizer for higher order bit quantization and helper data generation. Because our transformed PUF readout $tilde(x)$ can be interpreted as a realization of a uniformly distributed variable $tilde(X)$, we can define the width $Delta$ of our quantizer bins as follows: $ Delta = frac(1, 2^M) . $<eq:delta> For example, if we were to extract a symbol with the width of 2 bits from our PUF readout, we would need to evenly space $2^2 = 4$ bins. Using equation @eq:delta, the step size for a 2-bit quantizer would result to: $ Delta' = lr(frac(1, 2^M) mid(|))_(M=2)= frac(1, 4) . $ @fig:smhd_two_bit shows a plot of the resulting quantizer function that would yield symbols with two bits for one measurement $tilde(x)$. #figure( include("../graphics/quantizers/two-bit-enroll.typ"), caption: [2-bit quantizer] )<fig:smhd_two_bit> Right now, this quantizer wouldn't help us generating any helper data. To achieve that, we will need to divide a symbol step -- one, that returns the corresponding quantized symbol - into multiple sub-steps. Using $S$, we can define the step size $Delta_S$ as the division of $Delta$ by $S$: $ Delta_S = frac(Delta, S) = frac(1, 2^M dot S) $<eq:delta_s> /*After this definition #margin-note[Absatz nochmal neu], we need to make an adjustment to our previously defined quantizer function, because we cannot simply return the quantized value based on a quantizer with step size $Delta_s$. That would just increase the amounts of bits we will extract out of one measurement. Instead, we will need to return a tuple, consisting of the quantized symbol and the metric ascertained that we will save as helper data for later. */ We can now redefine our previously defined quantizer function to not only return the quantized symbol, but a tuple consisting of the quantized symbol and the metric ascertained that we will save as helper data for later. Going on in our example, we could choose the amount of our metrics to be 2. According to @eq:delta_s, we would then half our step size: $ Delta'_S = lr(frac(Delta', S)mid(|))_(S=2) = frac(1, 4 dot 2) = frac(1, 8) $ This means, we can update our quantizer function with the new step size $Delta'_S = frac(1, 8)$ and redefining its output as a tuple consisting of bit value and helper data. We can visualize the quantizer that we will use during the enrollment phase of a 2-bit 2-metric configuration as depicted in @fig:smhd_2_2_en. #grid( columns: (1fr, 1fr), [#scale(x: 80%, y: 80%)[ #figure( include("../graphics/quantizers/s-metric/2_2_en.typ"), caption: [2-bit 2-metric enrollment] ) <fig:smhd_2_2_en>]], [#scale(x: 80%, y: 80%)[ #figure( include("../graphics/quantizers/s-metric/3_2_en.typ"), caption: [2-bit 3-metric enrollment] ) <fig:smhd_3_2_en>]]) To better demonstrate the generalization to $S$-metrics, @fig:smhd_3_2_en shows a 2-bit quantizer that generates helper data based on three metrics instead of two. In that sense, increasing the number of metrics will increase the number of sub-steps for each symbol. We can now perform the enrollment of a full PUF readout. Each measurement will be quantized with out quantizer $cal(E)$, returning a tuple consisting of the quantized symbol and helper data. $ kappa_i = cal(E)(s, m, tilde(x_i)) = (k, h)_i space.en. $ <eq:smhd_quant> Performing the operation of @eq:smhd_quant for our whole set of measurements will yield a vector of tuples $bold(kappa)$. === Reconstruction We already demonstrated the basic principle of the reconstruction phase in section @sect:tmhd, which showed the advantage of using more than one quantizer during reconstruction. We will call our repeated measurement of $tilde(x)$ that is subject to a certain error $tilde(x^*)$. To perform reconstruction with $tilde(x^*)$, we will first need to find all $S$ quantizers for which we generated the helper data in the previous step and then choose the one corresponding to the saved metric. We have to distinguish the two cases, that $S$ is either even or odd:\ If $S$ is even, we need to define $S$ quantizers offset by multiples of $phi$. We can define the ideal position for the quantizer bounds based on its corresponding metric as centered around the center of the metric. We can find these new bounds graphically as depicted in @fig:smhd_find_bound_graph. We first determine the x-values of the centers of a metric (here M1, as shown with the arrows). We can then place the quantizer steps with step size $Delta$ (@eq:delta) evenly spaced around these points. If the resulting quantizer bound is smaller than $0$ or bigger than $1$, we will either add or subtract $1$ from its value so it stays in the defined range of the tilde domain. With these new points for the vertical steps of $cal(Q)$, we can draw the new quantizer for the first metric in @fig:smhd_found_bound_graph. #grid( columns: (1fr, 0.1fr, 1fr), [#scale(x: 70%, y: 70%)[ #figure( include("../graphics/quantizers/s-metric/2_2_find_quantizer.typ"), caption: [Ideal centers and bounds for the M1 quantizer] )<fig:smhd_find_bound_graph>]], [#align(center)[#align(horizon)[#text(25pt)[$arrow.r.double$]]]], [#scale(x: 70%, y: 70%)[ #figure( include("../graphics/quantizers/s-metric/2_2_found_quantizer1.typ"), caption: [Quantizer for the first metric] )<fig:smhd_found_bound_graph>]] ) As for metric 2, we can apply the same strategy and find the points for the vertical steps to be at $1/16, 5/16, 9/16$ and $13/16$. This quantizer is shown together with the first-metric quantizer in @fig:smhd_2_2_reconstruction, forming the complete quantizer for the reconstruction phase of a 2-bit 2-metric configuration $cal(R)(2,2,tilde(x))$. #grid( columns: (1fr, 1fr), [ #scale(x: 80%, y: 80%)[ #figure( include("../graphics/quantizers/s-metric/2_2_reconstruction.typ"), caption: [2-bit 2-metric reconstruction quantizer] )<fig:smhd_2_2_reconstruction> ] ], [ #scale(x: 80%, y: 80%)[ #figure( include("../graphics/quantizers/s-metric/3_2_reconstruction.typ"), caption: [2-bit 3-metric reconstruction quantizer], )<fig:smhd_3_2_reconstruction> ] ] ) Analytically, the offset we are applying to $cal(E)(2, 2, tilde(x))$ can be defined as $ Phi = lr(frac(1, 2^M dot S dot 2)mid(|))_(M=2, S=2) = 1 / 16 space.en. $<eq:offset> $Phi$ is the constant that we will multiply with a certain metric index $i in [- S/2, ..., S/2]$ to obtain the metric offset $phi$, which is used to define each of the $S$ different quantizers for reconstruction. //This is also shown in @fig:smhd_2_2_reconstruction, as our quantizer curve is moved $1/16$ to the left and the right. In @fig:smhd_2_2_reconstruction, the two metric indices $i = plus.minus 1$ will be multiplied with $Phi$, yielding two quantizers, one moved $1/16$ to the left and one moved $1/16$ to the right. If a odd number of metrics is given, the offset can still be calculated using @eq:offset. Additionally, we will keep the original quantizer used during enrollment as the quantizer for metric $(s-1)/2$ (@fig:smhd_3_2_reconstruction). To find all metric offsets for values of $S > 3$, we can use @alg:find_offsets. We can calculate $phi$ based on $S$ and $M$ using @eq:offset. The resulting list of offsets is correctly ordered and can be mapped to the corresponding metrics in ascending order.// as we will show in @fig:4_2_offsets and @fig:6_2_offsets. #figure( kind: "algorithm", supplement: [Algorithm], include("../pseudocode/offsets.typ") )<alg:find_offsets> ==== Offset properties<par:offset_props> //#inline-note[Diese section ist hier etwas fehl am Platz, ich weiß nur nicht genau wohin damit. Außerdem ist sie ein bisschen durcheinander geschrieben] Before we go on and experimentally test this realization of the S-Metric method, let's look deeper into the properties of the metric offset value $phi$. Comparing @fig:smhd_2_2_reconstruction, @fig:smhd_3_2_reconstruction and their respective values of @eq:offset, we can observe, that the offset $Phi$ gets smaller the more metrics we use. #figure( table( columns: (11), inset: 7pt, align: center + horizon, [$M$], [1],[2],[3],[4],[5],[6],[7],[8],[9],[10], [$Phi$],[$1/8$],table.cell(fill: gray)[$1/16$], [$1/24$], table.cell(fill:gray)[$1/32$], [$1/40$], table.cell(fill:gray)[$1/48$], [$1/56$], table.cell(fill:gray)[$1/64$], [$1/72$], table.cell(fill:gray)[$1/80$] ), caption: [Offset values for 2-bit configurations] )<tab:offsets> As previously stated, we will need to define $S$ quantizers, $S/2$ times to the left and $S/2$ times to the right. For example, setting the parameter $S$ to $4$ means we will need to move the enrollment quantizer $2$ times to the left and right. As we can see in @fig:4_2_offsets, $phi$ for the maximum metric indices $i = plus.minus 2$ are identical to the offsets of a 2-bit 2-metric configuration. In fact, this property carries on for higher even numbers of metrics, as shown in @fig:6_2_offsets. #grid( columns: (1fr, 1fr), [#figure( table( columns: (5), inset: 7pt, align: center + horizon, [$bold(i)$], [$-2$], [$-1$], [$1$], [$2$], [*Metric*], [M1], [M2], [M3], [M4], [$bold(phi)$], [$-frac(1, 16)$], [$-frac(1, 32)$], [$frac(1, 32)$], [$frac(1, 16)$] ), caption: [2-bit 4-metric offsets] )<fig:4_2_offsets> ], [#figure( table( columns: (7), align: center + horizon, inset: 7pt, [$bold(i)$], [$-3$], [$-2$], [$-1$], [$1$], [$2$], [$3$], [*Metric*], [M1], [M2], [M3], [M4], [M5], [M6], [$bold(phi)$], [$-frac(1, 16)$], [$-frac(1, 24)$], [$-frac(1, 48)$], [$frac(1, 48)$], [$frac(1, 24)$], [$frac(1, 16)$] ), caption: [2-bit 6-metric offsets] )<fig:6_2_offsets> ] ) At $s=6$ metrics, the biggest metric offset we encounter is $phi = 1/16$ at $i = plus.minus 3$.\ This biggest (or maximum) offset is of particular interest to us, as it tells us how far we deviate from the original quantizer used during enrollment. The maximum offset for a 2-bit configuration $phi$ is $1/16$ and we only introduce smaller offsets in between if we use a higher even number of metrics. More formally, we can define the maximum metric offset as follows: $ phi_"max" = frac(floor(frac(S,2)), 2^M dot S dot 2) $ /*More formally, we can define the maximum metric offset for an even number of metrics as follows: $ phi_("max,even") = frac(frac(S,2), 2^M dot S dot 2) = frac(1, 2^M dot 4) $<eq:max_offset_even> Here, we multiply $phi$ from @eq:offset by the maximum metric index $i_"max" = S/2$. Now, if we want to find the maximum offset for a odd number of metrics, we need to modify @eq:max_offset_even, more specifically its numerator. For that reason, we will decrease the parameter $m$ by $1$, that way we will still perform a division without remainder: $ phi_"max,odd" &= frac(frac(S-1, 2), 2^n dot S dot 2)\ &= lr(frac(S-1, 2^M dot S dot 4)mid(|))_(M=2, S=3) = 1/24 $ */ //It is important to note, that $phi_"max,odd"$, unlike $phi_"max,even"$, is dependent on the parameter $S$ as we can see in @tb:odd_offsets. It is important to note, that $phi_"max"$ is dependent on the parameter $S$ if $S$ is an odd number. #figure( table( columns: (5), align: center + horizon, inset: 7pt, [*S*],[3],[5],[7],[9], [$bold(phi_"max,odd")$],[$1/24$],[$1/20$],[$3/56$],[$1/18$] ), caption: [2-bit maximum offsets, odd] )<tb:odd_offsets> The higher $S$ is chosen, the closer we approximate $phi_"max"$ for even choices of $S$, as shown in @eq:offset_limes. This means, while also keeping the original quantizer during the reconstruction phase, the maximum offset for an odd number of metrics will always be smaller than for an even number. $ lim_(S arrow.r infinity) phi_"max,odd" &= frac(floor(frac(S,2)), 2^M dot S dot 2) = frac(S-1, 2^M dot S dot 4) #<eq:offset_limes>\ &= frac(1, 2^M dot 4) = phi_"max,even" $ Because $phi_"max,odd"$ only approximates $phi_"max,even"$ if $S arrow.r infinity$ we can assume, that configurations with an even number of metrics will always perform marginally better than configurations with odd numbers of metrics because the bigger maximum offset allows for better reconstructing capabilities. //#margin-note[Sehr unglücklich mit der formulierung hier] == Improvements<sect:smhd_improvements> The S-Metric Helper Data Method proposed by Fischer in @smhd can be improved by using Gray-coded labels for the quantized symbols instead of naive labelling. #align(center)[ #scale(x: 80%, y: 80%)[ #figure( include("../graphics/quantizers/two-bit-enroll-gray.typ"), caption: [Gray Coded 2-bit quantizer] )<fig:2-bit-gray>]] @fig:2-bit-gray shows a 2-bit quantizer with gray-coded labelling. In this example, we have an advantage at $tilde(x) approx 0.5$, because a quantization error only returns one wrong bit instead of two. Furthermore, the transformation into the Tilde-Domain could also be performed using the @ecdf to achieve a more precise uniform distribution because we do not have to estimate a standard deviation of the input values. //#inline-note[Hier vielleicht noch eine Grafik zur Visualisierung?] == Experiments<sect:smhd_experiments> We tested the implementation of @sect:smhd_implementation with the dataset of @dataset. The dataset contains counts of positives edges of a ring oscillator at a set evaluation time $D$. Based on the count and the evaluation time, the frequency of a ring oscillator can be calculated using: $f = 2 dot frac(k, D)$. Because we want to analyze the performance of the S-Metric method over different temperatures, both during enrollment and reconstruction, we are limited to the experimental measurements of @dataset which varied the temperature during the FPGA operation. We will have measurements of $50$ FPGA boards available with $1600$ and $1696$ ring oscillators each. The two measurement sets are obtained from different slices of the FPGA board where the only difference to note is the number of ring oscillators available. To obtain the values to be processed, we subtract them in pairs, yielding $800$ and $848$ ring oscillator frequency differences _df_.\ Because we can assume that the frequencies _f_ are i.i.d., the difference _df_ can also be assumed to be i.i.d. To apply the values _df_ to our implementation of the S-Metric method, we will first transform them into the Tilde-Domain using an inverse CDF, resulting in uniform distributed values $tilde(x)$. Our resulting dataset consists of #glspl("ber") for quantization symbol widths of up to $6 "bits"$ evaluated with generated helper-data from up to $100 "metrics"$. In the following section, we will often set the maximum number of metrics to be $S=100$. This choice refers to the asymptotic behaviour of the @ber and can be equated with the choice $S arrow infinity$. //We chose not to perform simulations for bit widths higher than $6 "bits"$, as we will see later that we have already reached a bit error rate of approx. $10%$ for these configurations. #pagebreak() === Results & Discussion The bit error rate of different S-Metric configurations for naive labelling can be seen in @fig:global_errorrates. For this analysis, enrollment and reconstruction were both performed at room temperature. //and the quantizer was naively labelled. #figure( image("../graphics/25_25_all_error_rates_fixed.svg", width: 90%), caption: [Bit error rates for same-temperature execution. Here we can already observe the asymptotic #glspl("ber") for higher metric numbers. The error rate is scaled logarithmically here.] )<fig:global_errorrates> We can observe two key properties of the S-Metric method in @fig:global_errorrates. //The exponential growth of the error rate of classic 1-metric configurations can be observed through the increase of the error rates. The exponential growth of the @ber can be observed if we set $S=1$ and increase $M$ up to $6$. Also, as we expanded on in @par:offset_props, at some point using more metrics will no longer improve the bit error rate of the key. At a symbol width of $M >= 6$ bits, no further improvement through the S-Metric method can be observed. #figure( include("../graphics/plots/errorrates_changerate.typ"), caption: [Asymptotic performance of @smhdt] )<fig:errorrates_changerate> This tendency can also be shown through @fig:errorrates_changerate. Here, we calculated the quotient of the bit error rate using one metric and 100 metrics. From $M >= 6$ onwards, $(op("BER")(1, 2^M)) / (op("BER")(100, 2^M))$ approaches $~1$, which means, no real improvement is possible anymore through the S-Metric method. ==== Impact of helper data size The amount of helper data bits required by @smhdt is defined as a function of the number of metrics as $log_2(S)$. The overall extracted-bits to helper-data-bits ratio can be defined here as $cal(r) = frac(M, log_2(S))$ #figure( table( columns: (7), inset: 7pt, align: center + horizon, [$bold(M)$], [$1$], [$2$], [$3$], [$4$], [$5$], [$6$], [$bold(S)$], [$2$], [$4$], [$8$], [$16$], [$32$], [$64$], [*@ber*], [$0.012$], [$0.9 dot 10^(-4)$], [$0.002$], [$0.025$], [$0.857$], [$0.148$], ), caption: [S-Metric performance with same bit-to-metric ratios] )<fig:smhd_ratio_performance> If we take a look at the error rates of configurations for which $cal(r)$ is $800 dot 1$, we can observe a decline in performance of @smhdt for general higher-bit quantization processes. This behaviour is also shown in @fig:smhd_ratio_performance. ==== Impact of temperature<sect:impact_of_temperature> We will now take a look at the impact on the error rates of changing the temperature both during the enrollment and the reconstruction phase. The most common case to look at, is if we consider a fixed temperature during enrollment, most likely $25°C$. Since we wont always be able to recreate lab-like conditions during the reconstruction phase, it makes sense to look at the error rates at which reconstruction was performed at different temperatures. #figure( include("../graphics/plots/temperature/25_5_re.typ"), caption: [#glspl("ber") for reconstruction at different temperatures. Generally, the further we move away from the enrollment temperature, the worse the #gls("ber") gets. ] )<fig:smhd_tmp_reconstruction> @fig:smhd_tmp_reconstruction shows the results of this experiment conducted with a 2-bit configuration.\ As we can see, the further we move away from the temperature of enrollment, the higher the #glspl("ber"). We can observe this property well in detail in @fig:global_diffs. #scale(x: 90%, y: 90%)[ #figure( include("../graphics/plots/temperature/global_diffs/global_diffs.typ"), caption: [#glspl("ber") for different enrollment and reconstruction temperatures. The lower number in the operating configuration is assigned to the enrollment phase, the upper one to the reconstruction phase. The correlation between the #gls("ber") and the temperature is clearly visible here] )<fig:global_diffs>] Here, we compared the asymptotic performance of @smhdt for different temperatures both during enrollment and reconstruction. First we can observe that the optimum temperature for the operation of @smhdt in both phases for the dataset @dataset is $35°C$ instead of the expected $25°C$. Furthermore, the @ber seems to be almost directly determined by the absolute temperature difference, especially at higher temperature differences, showing that the further apart the temperatures of the two phases are, the higher the @ber. ==== Gray coding In @sect:smhd_improvements, we discussed how a gray coded labelling for the quantizer could improve the bit error rates of the S-Metric method. Because we only change the labelling of the quantizing bins and do not make any changes to #gls("smhdt") itself, we can assume that the effects of temperature on the quantization process are directly translated to the gray-coded case. @fig:smhd_gray_coding shows the comparison of applying #gls("smhdt") at room temperature for both naive and gray-coded labels. There we can already observe the improvement of using gray-coded labelling, but the impact of this change of labels can really be seen in @tab:gray_coded_impact. As we can see, the improvement rises rapidly to a peak at a bit width of M=3 and then falls again slightly. This effect can be explained with the exponential rise of the #gls("ber") for higher bit widths $M$. For $M>3$ the rise of the #gls("ber") predominates the possible improvement by applying a gray-coded labelling. #figure( table( columns: (6), align: center + horizon, inset: 7pt, [1],[2],[3],[4], [5], [6], [$0%$], [$24.75%$], [$47.45%$], [$46.97%$], [$45.91%$], [$37.73%$] ), caption: [Improvement of using gray-coded instead of naive labelling, per bit width] )<tab:gray_coded_impact> #figure( image("./../graphics/plots/gray_coding/3dplot.svg"), caption: [Comparison between #glspl("ber") using naive labelling and gray-coded labelling] )<fig:smhd_gray_coding> Using the dataset, we can estimate the average improvement for using gray-coded labelling to be at $33%$.
https://github.com/polarkac/MTG-Stories
https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/004%20-%20Dragon's%20Maze/005_The%20Pursuit%2C%20Part%201.typ
typst
#import "@local/mtgstory:0.2.0": conf #show: doc => conf( "The Pursuit, Part 1", set_name: "Dragon's Maze", story_date: datetime(day: 01, month: 05, year: 2013), author: "<NAME>", doc ) Alayer of tension permeated the laboratory that made even the signature Izzet hiss of steam and the creaking of countless moving parts seem distant. For the aging guildmage, Madarrak, nothing existed at that moment except anticipation. #figure(image("005_The Pursuit, Part 1/01.jpg", width: 100%), caption: [], supplement: none, numbering: none) Anticipation, and of course the experiment. There's always an experiment. This one loomed in the middle of the laboratory, an enormous mechanical construct that resembled a suit of armor that could have been fashioned for an ogre. But in place of head or helm was an array of control levers, pressure gauges, and a seat, upon which sat a nervous goblin. He watched the goblin go through a sequence of turning knobs and flipping switches. All Madarrak needed was a breakthrough. A faint whirring sound broke the silence. It started as slight hum but gained in intensity until it became a rumble, more felt than heard. The air became noticeably dry. "Here it comes!" said Castan, a vedalken attendant at Madarrak's side, who strained to be heard over the din. She flicked her goggles from her forehead to her eyes just as a bolt of brilliant blue electricity crackled into being, suspended between two conductor coils that protruded from the construct's shoulders precariously close to the goblin pilot. The strand of electricity danced frantically, and soon other bolts arced out into the lab, forcing Madarrak and his attendant to watch the display from behind a heavy wooden work table. The goblin cranked a lever. The hulk stepped forward, its heavy footfall ringing as the sound of success in Madarrak's ears. It took another step. And another. It was walking, and it was stable, working just as he had designed it to work. Then something unintended occurred. The construct picked up speed, and in an instant Madarrak saw it bound across the lab. Before being tossed from the his seat, the goblin pilot steered it toward the door. Bursting from the lab with increasing velocity, the construct hurtled down the stone basement corridor and out of sight. Madarrak winced with each crashing step, and Castan ran to the open doorway in time to see the construct smash straight through the stone wall at the other end of the corridor and tumble to the floor, unmoving, in the heap of rubble it created. Errant bolts of electrical energy sporadically crackled across its chassis. #figure(image("005_The Pursuit, Part 1/02.jpg", width: 100%), caption: [], supplement: none, numbering: none) #v(0.35em) #line(length: 100%, stroke: rgb(90%, 90%, 90%)) #v(0.35em) The cleanup went quickly, as destruction was routine in the testing facilities of the Izzet. With the strength of a dozen goblins and Yzaak, the hulking Cyclops in Madarrak's service, the construct was hauled back to the laboratory. "You were very close with that one, Mentor," Castan said, picking through the rubble of the room. "The Bi-Pedal Conveyance Apparatus will get the attention of the Izmagnus." "The Izmagnus means nothing to me. Only the attention of Niv-Mizzet matters. But we are close. We will simply have to, um..." Madarrak trailed off. Something in the debris caught his eye. In the gap formed by two fallen wall stones, a light was flashing. Castan tried to complete her mentor's thought, "Yes, we will fix it. I believe the issue is how the intake of mana is being regulated. Clearly the system could not handle it, but I have a few ideas about how this can be remedied. Mentor?" Madarrak held a plank of wood in his hands, which he used to leverage one of the stones aside. The light, emanating from a palm-sized disk, continued to flash in regular intervals, and Madarrak scooped it up from the splintered remains of a small wooden box that had been smashed to pieces in the collapse. "What did you find?" "I'm not familiar with it," said Madarrak. Mentor and attendant stood and observed the flashing light long enough to discern a simple pattern in its regular pulses, which it cycled through over and over again. Madarrak turned the object over in his hand, and he saw the Izzet dragon icon etched there. Below the dragon were etched three tiny circles, arranged so each one made the corner of an upside down triangle. As he intently studied the shapes, hoping to divine their meaning, or perhaps recall some distant bit of relevant information, the object jolted out of hand and smacked him on the forehead. "Mentor!" The old mage reeled backward, and with a force of its own, the object slid toward the hole in the wall. Before it could get far, Castan pinned it under her boot. #v(0.35em) #line(length: 100%, stroke: rgb(90%, 90%, 90%)) #v(0.35em) Madarrak sat on a stool, hunched forward, with his hands on his knees. This was the highest he'd been in Nivix, the towering guildhall of the Izzet League, in many years. Niv-Mizzet, the Firemind, the Dracogenius, was discussing the object he had found, perhaps even discussing him. The old man's fingers tapped as he waited. He heard a door creak open, then close. He rose to see a vedalken man, adorned in the signature Izzet blue and red stripes, leaning on a mizzium cane. The vedalken spoke as he closed the distance between him and Madarrak. "Have you ever heard of a hypermana focusing lens?" "I'm sorry, <NAME>?" "No matter. I didn't think so." The vedalken, Niv-Mizzet's chief attendant, reached Madarrak. "But you have heard of the chemister, <NAME>, correct?" "Of course," Madarrak said. "I was new to the League when he vanished while conducting an experiment. I heard he was talented." #figure(image("005_The Pursuit, Part 1/03.jpg", width: 100%), caption: [], supplement: none, numbering: none) "Quite true. He disappeared testing a hypermana focusing lens, and after some time, it was obviously assumed that something had gone terribly wrong." The chamberlain reached into his pocket and pulled out the blinking object. "As it turns out, it was not wrong, but an unintended success. You see, Niv-Mizzet recognized this. The Dracogenius explained it to be a homing mechanism, a receiver, created to locate something over even vast distances. You have seen this move, seemingly of its own will, correct?" Madarrak his touched bruised forehead and nodded. "You see," continued the chamberlain, "this one belonged to <NAME>." He indicated the three circles on the object. "Apparently, it was sealed away by his attendants along with much of his equipment, and it would have stayed hidden if not your boon of a catastrophe. Niv-Mizzet was transfixed by this for more than moment, Madarrak. He said that the only way this could continue to function was if something still existed to be found. <NAME> did not disappear. He teleported. Niv-Mizzet wants you to find <NAME> and whatever transported him away, and he wants you to bring them back." <NAME> handed the pulsing object back to Madarrak. #v(0.35em) #line(length: 100%, stroke: rgb(90%, 90%, 90%)) #v(0.35em) When Madarrak returned to his lab, he wasted no time preparing to depart on his mission for the guild leader. "Castan," he said, "prepare my things." Castan poked her head out from behind a colossal book. "Hmm? Oh, while you were speaking with Niv-Mizzet, I was able to fix—" "I did not speak with him directly, but we are now on his errand and we are to depart immediately. Quickly now!" #v(0.35em) #line(length: 100%, stroke: rgb(90%, 90%, 90%)) #v(0.35em) It was quiet outside the cacophony of the Nivix, but in the dank tunnels of Ravnica's Undercity and interconnecting sewers, the quiet was palpable. The walls were slick with algae that swallowed any noise, giving the air a certain thickness. With a great sense of purpose, Madarrak took long strides into the darkness. Both Castan, a lanky vedalken, and Yzaak the Cyclops, struggled to keep up. #figure(image("005_The Pursuit, Part 1/04.jpg", width: 100%), caption: [], supplement: none, numbering: none) The sewer stretched on for miles, moving ever downward, the trio's steps guided by the regular pulsing of the receiver that struggled against Madarrak's grip. They passed through the tunnels that occasionally opened up into vast caverns, where the darkness pressed in on them. Where the light of their lamps pierced the dark, the architecture resembled twisted reflections of Orzhov cathedrals. Moisture dripped from the unseen ceiling. A drop landed on Castan's head, slid down her neck to run down her back. She shuddered. "Mentor, you've said nothing for hours." "With good reason!" Madarrak lashed out in a whisper. "We are close now. All the more reason to remain silent." With a sudden burst of force, the receiver lurched out of the guildmage's hand, clattered to the floor. "Ahh!" He scrambled after it, but it slid out of his reach and across the stones in short bursts of motion. Yzaak leapt forward to protect his master, knocking Castan aside with his bulk. She stumbled, lost her footing, and the weight of her pack pulled her off balance and to the floor, which met her with a soft squish. She quickly moved to prop herself up with her hands, but when they pressed against the floor, a greasy gelatinous substance oozed between her fingers and covered her hands. Eyes widening, she inhaled sharply to scream, but she only sucked in a putrid fume that made her gag and double over in the filth. #figure(image("005_The Pursuit, Part 1/05.jpg", width: 100%), caption: [], supplement: none, numbering: none) "Get up!" rasped Madarrak. "Quickly now!" He had regained possession of the receiver, which he now gripped with both hands. Castan looked at her mentor, who stood, gazing out into the darkness. "We have to move." Her eyes followed the beam of light cast by her mentor's lamp. At the other end, eyes stared back. Castan rose to her feet. "A rot farmer. He may not be interested in us. Let's keep moving." Madarrak once again followed the lead of the receiver. The Cyclops fell in line behind. Castan lingered for a moment, just long enough for her to catch a second pair of eyes in the darkness. She quickened her pace. The insistent receiver brought them to a stone archway that was different from previous passages, in that carved upon the keystone was the insectoid symbol of the Golgari guild. Madarrak placed a hand on the Cyclops's shoulder, a habit clearly practiced many times, for Yzaak stooped without verbal instruction so that their faces were level. "Yzaak," Madarrak said, "wait here. Keep this way safe for us." "Mentor?" said Castan, "We do not know what lies ahead. Would it not be wiser to keep Yzaak close?" "Nonsense. We know what's ahead. It's what we came for, and it's very close now. I do not want that Golgari scum following us." Madarrak walked under the arch and again into unknown darkness that would have swallowed him whole if not from pulsing light of receiver that illuminated his frame. "Besides, we are mages of the Izzet League, not cowering pups," he called back. Castan set her jaw, took a deep breath, and followed the light of her lamp after her mentor, leaving Yzaak behind. The passage narrowed, though the ceiling remained high enough to be of unknown distance. The ground here was broken up by several crevasses from which thick gray-green vapor billowed. Following a narrow path between the gaps, Madarrak and Castan carefully picked their way across the uneven surface. #figure(image("005_The Pursuit, Part 1/06.jpg", width: 90%), caption: [], supplement: none, numbering: none) At the other end of the passage, where it diverged in opposite directions, the receiver's light suddenly intensified to an almost blinding luminance. It broke free from Madarrak's grip and took off down the path to the left. The old guildmage broke into a sprint. "Quickly now, Castan!" They ran after the glowing disk, which dizzyingly caromed off the walls, floor, and ceiling as it flew. Madarrak's breathing became labored, he struggled to keep the receiver in sight, but he did not break his stride. Castan was right behind him, and they were led through various twists and turns in their pursuit. There was no time to mark their path. The chase came to an abrupt end when the receiver came to rest on a pile of detritus and muck piled up in an alcove that had been cut into the stone wall of the chamber. The pursuers caught up to it. Hands on her knees, Castan took a moment to catch her breath. Her mentor, however, descended on the pile, digging into it, working furiously to unearth the contents beneath. "Mentor," Castan put her hand on Madarrak's shoulder. The word came to the old man as a distant murmur. "Mentor. Madarrak!" Cranky, as if woken in the middle of a dream, Madarrak turned to Castan, who was motioning to the wall where a series of crude markings had been scrawled. Madarrak was disinterested, dismissing the distraction with the wave of a muck-caked hand. Nothing else existed at that moment but his redemption, his acceptance back into the Izmagnus. It was buried beneath the filth. He just had to dig. His fingers scraped against metal. His eyes widened, and in a frenzy, he cleared away enough muck to reveal the contours of a helmet. It resembled Yzaak's, complete with a single, circular viewing glass which he wiped clear with a sleeve. Mesmerized, he stared into his own likeness that reflected off the glass. After a moment, a red glow began to swell from behind the pane. Gradually, it took on the shape of a human face that was distorted in anguish. The face spoke. Its voice was muted through the glass, but the words still clear. "You should not have come." "<NAME>?" "You should not have come."
https://github.com/goshakowska/Typstdiff
https://raw.githubusercontent.com/goshakowska/Typstdiff/main/tests/test_complex/para/para_deleted.typ
typst
In this report, we will explore the various factors that influence structures. It is the responsibility of the author to obtain any required government or company reviews for their papers in advance of publication. Start early to determine if the reviews are required; this process can take several weeks.
https://github.com/vaucher-leo/template-tb-typst
https://raw.githubusercontent.com/vaucher-leo/template-tb-typst/main/main.typ
typst
MIT License
// Main File #import "template-HEIG-tb-FR.typ": * // Replace FR with EN for EN Language #import "fonctions.typ": * #import "config.typ": * #show: project.with( title: title, authors: author, professor: professor, departement: departement, filiere: filiere, orientation: orientation, field: field, date: todayDate("FR"), confidential: confidential, ) // Main body. #include "page/Introduction.typ" #sys = Théorie #lorem(20) Exemple de bibliographie @Typst = Simulations #lorem(40) = Mesures #lorem(10) = Analyse #lorem(132) = Conclusion #lorem(200) #bibliography("bibliography.yml")
https://github.com/ntjess/typst-drafting
https://raw.githubusercontent.com/ntjess/typst-drafting/main/docs/manual.typ
typst
The Unlicense
#import "@preview/tidy:0.1.0" #import "utils.typ": * #import "../drafting.typ" #let module = tidy.parse-module(read("../drafting.typ"), scope: (drafting: drafting)) // Inspiration: https://github.com/typst/packages/blob/main/packages/preview/cetz/0.1.0/manual.typ // This is a wrapper around typst-doc show-module that // strips all but one function from the module first. // As soon as typst-doc supports examples, this is no longer // needed. #let show-module-fn(module, fn, ..args) = { module.functions = module.functions.filter(f => f.name == fn) tidy.show-module(module, ..args.pos(), ..args.named(), show-module-name: false) } #show raw.where(lang: "standalone"): text => { standalone-margin-note-example(raw(text.text, lang: "typ")) } #show raw.where(lang: "standalone-ttb"): text => { standalone-margin-note-example(raw(text.text, lang: "typ"), direction: ttb) } #show raw.where(lang: "example"): content => { set text(font: "Linux Libertine") example-with-source(content.text, drafting: drafting, direction: ltr) } #show raw.where(lang: "example-ttb"): content => { set text(font: "Linux Libertine") example-with-source(content.text, drafting: drafting) } #let show-mod-old = show-module-fn #let show-module-fn = show-mod-old.with(first-heading-level: 0) #show-module-fn(module, "margin-note-defaults") #show-module-fn(module, "margin-note") ```standalone = Document Title #lorem(3) #margin-note(side: left)[Left note] #margin-note[right note] #margin-note(stroke: green)[Green stroke, auto-offset] #lorem(10) #margin-note(side: left, dy: 10pt)[Manual offset] #lorem(10) ``` #show-module-fn(module, "inline-note") ```example = Document Title #lorem(7) #inline-note[An inline note that breaks the paragraph] #lorem(6) #inline-note(par-break: false)[A note with no paragraph break] #lorem(6) ``` #show-module-fn(module, "rule-grid")
https://github.com/LucaCiucci/tesi-triennale
https://raw.githubusercontent.com/LucaCiucci/tesi-triennale/main/img/slide_3/image.typ
typst
Creative Commons Zero v1.0 Universal
#set page(paper: "a4", margin: 10pt) #import "@preview/cetz:0.2.2" #let fig(step) = cetz.canvas(length: 1mm, { import cetz: draw import cetz.draw: * line((0, 0), (40, 0), stroke: (thickness: 2pt, ), mark: (end: "stealth"),) line((0, 0), (0, 40), stroke: (thickness: 2pt, ), mark: (end: "stealth"),) line((0, 0), (-20, -30), stroke: (thickness: 2pt, ), mark: (end: "stealth"),) content((-15, -30), text(20pt, $bold(x)$)) content((37, -5), text(20pt, $bold(y)$)) content((5, 35), text(20pt, $bold(z)$)) content((0, 0), image("i.svg", width: 70mm)) line((10, -5), (10, -12), stroke: (thickness: 0.5pt, )) line((10, 10), (10, -5), stroke: (thickness: 2pt, ), mark: (end: "stealth"),) line((10, -27), (10, -12), stroke: (thickness: 2pt, ), mark: (end: "stealth"),) content((15,-17), text(30pt, [*#sym.epsilon*])) content((-10, 17), text(30pt, $bold(R^N)$)) content((-5, -20), text(30pt, $bold(R^S)$)) }) //#rect(fig(0)) #fig(0)
https://github.com/zurgl/typst-resume
https://raw.githubusercontent.com/zurgl/typst-resume/main/metadata.typ
typst
/* Personal Information */ #let firstName = "Yacine" #let lastName = "<NAME>" #let personalInfo = ( phone: "+33 6 95 11 01 26", email: "<EMAIL>", linkedin: "yacine-elayar", homepage: "elayar.fr", ) /* Resume specific */ #let headerQuoteInternational = ( "en": [Experienced Data Analyst looking for a full time job starting from now], "fr": [Ingénieur logiciel, Analyste de donnée, freelance, actuellement en recherche active], ) #let cvFooterInternational = ( "en": "Resume", "fr": "Curriculum vitae", ) /* Letter specific */ #let letterFooterInternational = ( "en": "Cover Letter", "fr": "Lettre de motivation", ) /* Layout Setting */ // Optional: skyblue, red, nephritis, concrete, darknight #let awesomeColor = "skyblue" // Leave blank if profil photo is not needed #let profilePhoto = "../../assets/avatar.png" // INFO: value must matches folder suffix; i.e "zh" -> "./modules_zh" #let varLanguage = "fr" // Decide if you want to put your company in bold or your position in bold #let varEntrySocietyFirst = true // Decide if you want to display organisation logo or not #let varDisplayLogo = true
https://github.com/dankelley/typst_templates
https://raw.githubusercontent.com/dankelley/typst_templates/main/memo/0.0.1/memo.typ
typst
MIT License
#let conf( category: none, title: none, date: none, preface: none, font: "times", fontsize: 12pt, doc, ) = { set text(font: font, size: fontsize) set page("us-letter", header: [ *#category #h(1fr) #title #h(1fr) #date* ]) text(size: 0.9em)[#preface] // reduce size to 90% show heading.where(level: 1): set text(font: font, size: fontsize) doc }
https://github.com/LDemetrios/Conspects-4sem
https://raw.githubusercontent.com/LDemetrios/Conspects-4sem/master/typst/sources/test.typ
typst
#import "header.typ": * #show : general-style aaa _bbb_ <x> #metadata((a: 1, b: 2))<y> bbb #setup-exec("test.typext", it => read(it)) = aaaaaa == b _b_ <x> bb // author: gaiajack #let labeled-box(lbl, body) = block(above: 2em, stroke: 0.5pt + foreground, width: 100%, inset: 14pt)[ #set text(font: "Noto Sans") #place( top + left, dy: -.8em - 14pt, // Account for inset of block dx: 6pt - 14pt, block(fill: background, inset: 2pt)[*#lbl*], ) #body ] #let marked(fill: luma(240), body) = { rect(fill: fill, stroke: (left: 0.25em), width: 100%, body) } aa "bb" cc $a /b$ a <l> /* #ext:begin:fel */ #full-externation-log( ("test.sh": "ls -Ali\n", "test2.sh": "ps | \n head -n 10\n"), (("ls", "-Ali"), ("bash", "test2.sh")), ) /* #ext:end:fel */ #extract("test.typ", "fel") #close-exec()
https://github.com/ClazyChen/Table-Tennis-Rankings
https://raw.githubusercontent.com/ClazyChen/Table-Tennis-Rankings/main/history_CN/2019/WS-03.typ
typst
#set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (1 - 32)", table( columns: 4, [排名], [运动员], [国家/地区], [积分], [1], [陈梦], [CHN], [3478], [2], [丁宁], [CHN], [3388], [3], [朱雨玲], [MAC], [3353], [4], [王曼昱], [CHN], [3278], [5], [刘诗雯], [CHN], [3245], [6], [伊藤美诚], [JPN], [3170], [7], [木子], [CHN], [3136], [8], [陈幸同], [CHN], [3104], [9], [孙颖莎], [CHN], [3101], [10], [何卓佳], [CHN], [3091], [11], [石川佳纯], [JPN], [3078], [12], [#text(gray, "文佳")], [CHN], [3073], [13], [芝田沙季], [JPN], [3039], [14], [武杨], [CHN], [3031], [15], [冯亚兰], [CHN], [3017], [16], [郑怡静], [TPE], [3002], [17], [顾玉婷], [CHN], [2995], [18], [#text(gray, "刘高阳")], [CHN], [2992], [19], [王艺迪], [CHN], [2974], [20], [佩特丽莎 索尔佳], [GER], [2972], [21], [#text(gray, "胡丽梅")], [CHN], [2972], [22], [杜凯琹], [HKG], [2953], [23], [李倩], [POL], [2944], [24], [桥本帆乃香], [JPN], [2938], [25], [田志希], [KOR], [2935], [26], [陈可], [CHN], [2928], [27], [早田希娜], [JPN], [2926], [28], [伯纳黛特 斯佐科斯], [ROU], [2924], [29], [韩莹], [GER], [2908], [30], [金宋依], [PRK], [2906], [31], [平野美宇], [JPN], [2905], [32], [加藤美优], [JPN], [2901], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (33 - 64)", table( columns: 4, [排名], [运动员], [国家/地区], [积分], [33], [索菲亚 波尔卡诺娃], [AUT], [2888], [34], [张瑞], [CHN], [2871], [35], [于梦雨], [SGP], [2867], [36], [傅玉], [POR], [2866], [37], [安藤南], [JPN], [2858], [38], [孙铭阳], [CHN], [2854], [39], [张蔷], [CHN], [2852], [40], [徐孝元], [KOR], [2849], [41], [GU Ruochen], [CHN], [2849], [42], [佐藤瞳], [JPN], [2835], [43], [石洵瑶], [CHN], [2831], [44], [杨晓欣], [MON], [2829], [45], [车晓曦], [CHN], [2822], [46], [KIM Nam Hae], [PRK], [2821], [47], [LIU Xi], [CHN], [2815], [48], [单晓娜], [GER], [2808], [49], [侯美玲], [TUR], [2804], [50], [阿德里安娜 迪亚兹], [PUR], [2801], [51], [冯天薇], [SGP], [2801], [52], [PESOTSKA Margaryta], [UKR], [2780], [53], [梁夏银], [KOR], [2778], [54], [长崎美柚], [JPN], [2777], [55], [李洁], [NED], [2777], [56], [伊丽莎白 萨玛拉], [ROU], [2773], [57], [CHA Hyo Sim], [PRK], [2761], [58], [张墨], [CAN], [2759], [59], [李佳燚], [CHN], [2753], [60], [森樱], [JPN], [2746], [61], [LANG Kristin], [GER], [2739], [62], [李佼], [NED], [2738], [63], [崔孝珠], [KOR], [2737], [64], [SHIOMI Maki], [JPN], [2735], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (65 - 96)", table( columns: 4, [排名], [运动员], [国家/地区], [积分], [65], [浜本由惟], [JPN], [2732], [66], [李皓晴], [HKG], [2718], [67], [#text(gray, "姜华珺")], [HKG], [2714], [68], [木原美悠], [JPN], [2708], [69], [范思琦], [CHN], [2706], [70], [BALAZOVA Barbora], [SVK], [2703], [71], [玛妮卡 巴特拉], [IND], [2700], [72], [李芬], [SWE], [2695], [73], [陈思羽], [TPE], [2695], [74], [刘佳], [AUT], [2693], [75], [MIKHAILOVA Polina], [RUS], [2692], [76], [SOO Wai Yam Minnie], [HKG], [2689], [77], [李时温], [KOR], [2688], [78], [曾尖], [SGP], [2685], [79], [妮娜 米特兰姆], [GER], [2683], [80], [#text(gray, "MATSUZAWA Marina")], [JPN], [2680], [81], [EKHOLM Matilda], [SWE], [2679], [82], [HAPON<NAME>], [UKR], [2678], [83], [李恩惠], [KOR], [2678], [84], [#text(gray, "LI Jiayuan")], [CHN], [2677], [85], [<NAME>], [JPN], [2676], [86], [刘斐], [CHN], [2676], [87], [森田美咲], [JPN], [2673], [88], [GRZYBOWSKA-FRANC Katarzyna], [POL], [2673], [89], [POTA Georgina], [HUN], [2672], [90], [WINTER Sabine], [GER], [2665], [91], [布里特 伊尔兰德], [NED], [2661], [92], [MAEDA Miyu], [JPN], [2661], [93], [玛利亚 肖], [ESP], [2659], [94], [HUANG Yingqi], [CHN], [2656], [95], [倪夏莲], [LUX], [2655], [96], [YOO Eunchong], [KOR], [2651], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (97 - 128)", table( columns: 4, [排名], [运动员], [国家/地区], [积分], [97], [SOLJA Amelie], [AUT], [2650], [98], [金河英], [KOR], [2642], [99], [KIM Youjin], [KOR], [2641], [100], [NG Wing Nam], [HKG], [2634], [101], [SOMA Yumeno], [JPN], [2634], [102], [钱天一], [CHN], [2632], [103], [大藤沙月], [JPN], [2631], [104], [<NAME>], [ROU], [2629], [105], [YOON Hyobin], [KOR], [2627], [106], [SAWETTABUT Suthasini], [THA], [2625], [107], [张安], [USA], [2616], [108], [申裕斌], [KOR], [2611], [109], [MATELOVA Hana], [CZE], [2607], [110], [NOSKOVA Yana], [RUS], [2604], [111], [HUANG Yi-Hua], [TPE], [2602], [112], [邵杰妮], [POR], [2599], [113], [#text(gray, "KATO Kyoka")], [JPN], [2594], [114], [CHENG Hsien-Tzu], [TPE], [2583], [115], [PARTYKA Natalia], [POL], [2579], [116], [TAILAKOVA Mariia], [RUS], [2572], [117], [WU Yue], [USA], [2569], [118], [郭雨涵], [CHN], [2568], [119], [#text(gray, "SO Eka")], [JPN], [2565], [120], [#text(gray, "CHOE Hyon Hwa")], [PRK], [2564], [121], [笹尾明日香], [JPN], [2561], [122], [<NAME>], [CRO], [2561], [123], [#text(gray, "<NAME>")], [SGP], [2557], [124], [<NAME>], [FRA], [2554], [125], [<NAME>], [HUN], [2553], [126], [<NAME>], [SGP], [2553], [127], [STRBIKOVA Renata], [CZE], [2550], [128], [VOROBEVA Olga], [RUS], [2548], ) )
https://github.com/0xPARC/0xparc-intro-book
https://raw.githubusercontent.com/0xPARC/0xparc-intro-book/main/old-stuff/h-classical-pcp.typ
typst
#import "preamble.typ":* Historically, the construction of the first PCP, or *Probabilistically Checkable Proof*, was sort of an ancestor to the zkSNARK. There are a few nice ideas in here, but they're actually more complicated than the zkSNARK and hence included here mostly for historical reference. Pedagogically, we think it makes sense to just jump straight into PLONK and Groth16 even though the PCP construction came first. The more modern zkSNARK protocols are both better (according to metrics like message length or verifier complexity) and simpler (fewer moving parts). This part is divided into two sections. - @sumcheck describes the sum-check protocol, which is actually useful a bit more generally and shows up in some other SNARK constructions besides the PLONK and Groth16 that we covered. - @pcp gives an overview of the first PCP constructions, but it's quite involved and much less enlightening. It's mostly here for historical reasons and not otherwise worth reading. Elliptic curves will not be used in this part at all; in fact, these two chapters are self-contained and don't depend on earlier parts of these lecture notes.
https://github.com/Nianyi-GSND-Projects/GSND-5110-GP2
https://raw.githubusercontent.com/Nianyi-GSND-Projects/GSND-5110-GP2/master/Rulebook/Rulebook.typ
typst
#set page( paper: "us-letter", flipped: true, margin: 0.5in, columns: 2, ) #set par(linebreaks: "optimized") #set heading(numbering: "1.1.1") #let placeholder(body, width: 100%, height: 1in) = { box(width: width, height: height, stroke: (black + 0.5pt))[#{ set align(horizon + center); body; }] } #let title = [The Rule Book of _Tabletop Overcooked!_] #let version = "0.1.0" #{ show par: set block(below: 0.8em); set align(center); { set text(size: 18pt, weight: "bold"); title; } parbreak(); set text(style: "italic"); [version #version] }; A group of chefs are working in a kitchen. Try to finish orders as fast as possible! = Overview #columns(2)[ - A turn-based board game. - For 2-4 players. #colbreak() - Cooperative and fun. - Involves physical interaction. ] = Setup #let cph() = { placeholder(width: 2em, height: 2em)[]; } #let rarrow() = { box(width: 2em, height: 2em)[ #set align(center + horizon) → ]; } + Pick a map board and place it on a flat surface. + Shuffle the order pile and place it facing down in the order area. #cph()#rarrow()#cph() #h(1em) #cph()#rarrow()#cph() #h(1em) #cph()#rarrow()#cph() + Place all ingredients in the corresponding ingredient boxes on the map. #cph()#cph()#cph() + Place the chef markers into the starting spots. + Place all the ingredient tokens aside. After preparation, the board should look like this: #figure(placeholder(height: 12em)[placeholder]) == Areas There are 3 areas that can be seen from the setup: - Map board: Supports the gameplay and displays everything. - Order area: Where the active orders and finished orders are placed. - Hand: The players could hold ingredients in hand. = Play The game is played in turns, cycling between the players. The order goes as the chef's positions on the map. == New order Before the first player's turn starts, draw 1 card from the order pile and place it on the table facing up, which would be the first incoming order. There are some important information shown on the card: - The ingredients required for this order. - The waiting turn number of this order. - The scores the players could get for finishing this order. Every 6 rounds, a new order will come in. Upon a turn, a player could choose one option below: - Move their chef marker (see @Movement). - Perform a workspot action (see @Workspot). - Do nothing and skip the turn. Each turn is limited to 10 seconds, So the player must finish their action within the time range, or the turn will automatically be skipped. At the end of each turn, discard all expired orders. == Movement <Movement> A player could move their own chef marker by flicking them with fingers. In each turn, a player may only flick once. Before flicking, if the chef is holding anything, they may drop them at place. After flicking, if anything is hit during the chef marker's movement, the player could pick it up by putting _only one of them_ on the chef marker (only if the chef is not holding anything). == Workspot <Workspot> Every workspots come with their areas. When a player's chef marker is overlapping a workspot area, they could spend a turn to process an ingredient they're holding. When an ingredient is processed, it should be flipped to the processed side. == Serving When an active order's all requirements are met, players could spend the ingredients they're holding to complete it. The order then should be move to the completed order pile. == Finishing When all orders are either finished or expired, the game ends. The result of the game is determined by whether the final score has reached the level's required passing score. Sum all scores of the finished orders to get the final score. = Appendix #show table.cell.where(y: 0): set text(weight: "bold") == Term table #table( columns: 2, align: left, stroke: none, table.hline(), table.header([#set align(center); Term], [#set align(center); Meaning]), table.hline(stroke: 0.5pt), [Map board], [ A "level" of this game. Features a planar structure with obstacles to block the chefs' movement. ], [Ingredient box], [ A containing area on the map where the players could take ingredient from. ], [Hand], [ Where a player holds the game objects they own. ], [Chef marker], [ A small round disk with some weight to represent a player's avatar in the game. ], [Workspot], [ An area on the map where the players can process the ingredients to the next stage. ], table.hline(), )
https://github.com/ljgago/typst-chords
https://raw.githubusercontent.com/ljgago/typst-chords/main/src/chart.typ
typst
MIT License
#import "./utils.typ": size-to-scale, parse-input-string, top-border-sharp, top-border-round, total-bounds, set-default-arguments // Draws a horizontal border that indicates the starting of the fretboard #let draw-nut(self) = { let size = ( width: self.grid.width, height: 1.2pt * self.scale ) let elements = { if self.fret in (none, 1) { if self.design == "sharp" { top-border-sharp(size, self.stroke, self.scale) } else { top-border-round(size, self.stroke, self.scale) } } } return ( bounds: ( dx: 0pt, dy: 0pt, width: size.width, height: size.height ), elements: elements ) } // Draws a grid with a width = (length of tabs) and height = (number of frets) #let draw-grid(self) = { let radius = (bottom: 1pt * self.scale, top: 1pt * self.scale) let gap = 3pt * self.scale let elements = { place( rect( width: self.grid.width, height: self.grid.height, radius: if self.design == "sharp" {0pt} else {radius}, stroke: self.stroke ) ) // draws the vertical lines for i in range(self.grid.cols - 1) { let x = (i + 1) * self.step place( line( start: (x, 0pt), end: (x, self.grid.height), stroke: self.stroke ) ) } // draws the horizontal lines for i in range(self.grid.rows - 1) { let y = (i + 1) * self.step place( line( start: (0pt, y), end: (self.grid.width, y), stroke: self.stroke ) ) } } return ( bounds: ( dx: -gap, dy: 0pt, width: self.grid.width + gap * 2, height: self.grid.height ), elements: elements ) } // Draws the tabs over the grid #let draw-tabs(self) = { let radius = 1.7pt * self.scale let elements = { for (tab, col) in self.tabs.zip(range(self.tabs.len())) { if type(tab) == "string" and lower(tab) == "x" { let offset = col * self.step place( line( start: (offset - 1.5pt * self.scale, -2.5pt * self.scale), end: (offset + 1.5pt * self.scale, -5.5pt * self.scale), stroke: self.stroke ) ) place( line( start: (offset - 1.5pt * self.scale, -5.5pt * self.scale), end: (offset + 1.5pt * self.scale, -2.5pt * self.scale), stroke: self.stroke ) ) continue } if (type(tab) == str and lower(tab) == "o") { place( dx: self.step * col - radius, dy: -4pt * self.scale - radius, circle(radius: radius, stroke: self.stroke) ) continue } if type(tab) == int and tab > 0 and tab <= self.frets-amount { place( dx: self.step * col - radius, dy: self.step * tab - radius - 2.5pt * self.scale, circle(radius: radius, stroke: none, fill: black) ) continue } } } return ( bounds: ( dx: 0pt, dy: -(4pt * self.scale + radius), width: self.grid.width, height: 2 * radius ), elements: elements ) } // Draws a capo list // // capo = (fret, start, end) // fret: fret position // start: lowest starting string // end: highest ending string #let draw-capos(self) = { let size = self.tabs.len() let elements = { for (fret, start, end, ..) in self.capos { if start > size { start = size } if end > size { end = size } place( dy: fret * self.step - 2.5pt * self.scale, line( start: ((size - start) * self.step, 0pt), end: ((size - end) * self.step, 0pt), stroke: (paint: black, thickness: 3.4pt * self.scale, cap: "round") ) ) } } return ( bounds: ( dx: 0pt, dy: 0pt, width: 0pt, height: 0pt ), elements: elements ) } // Draws the finger numbers below the grid #let draw-fingers(self) = { let size = self.tabs.len() let elements = { for (finger, col) in self.fingers.zip(range(size)) { if type(finger) == int and finger > 0 and finger < 6 { place( left + top, dx: col * self.step - 1.5pt * self.scale, dy: self.grid.height + 1.5pt * self.scale, text(6pt * self.scale)[#finger]) } } } let (dx, dy, width, height) = (0pt, 0pt, 0pt, 0pt) if self.fingers.len() != 0 { let size = measure(text(6pt * self.scale)[~]) dy = self.grid.height + 1.5pt * self.scale width = size.width height = size.height } return ( bounds: ( dx: dx, dy: dy, width: width, height: height ), elements: elements ) } // Draws the fret start number that indicates the starting position of the fretboard #let draw-fret(self) = { let dx = -3pt * self.scale let dy = self.step / 2 - 0.2pt * self.scale let size = measure(text(8pt * self.scale)[#self.fret]) if size.width == 0pt { dx = 0pt } let elements = { place(left + top, dx: dx, dy: dy, place(right + horizon, text(8pt * self.scale)[#self.fret]) ) } return ( bounds: ( dx: dx - size.width, dy: dy, width: size.width, height: size.height ), elements: elements ) } // Draws the chord name below the grid and finger numbers #let draw-name(self) = { let vertical-offset = { if self.position == "top" and self.fingers.len() == 0 { 5pt * self.scale } else { 10pt * self.scale } } let anchor = top let dx = self.grid.width / 2 let dy = self.grid.height + vertical-offset if self.position == "bottom" { dy = -vertical-offset anchor = bottom } let elements = { place( center + anchor, dx: dx, dy: dy, box( fill: self.background, outset: 2pt * self.scale, radius: 2pt * self.scale, text(size: 12pt * self.scale, ..self.text-params)[#self.name] ) ) } let size = (:) size.name = measure(text(12pt * self.scale)[#self.name]) size.fret = measure(text(8pt * self.scale)[#self.fret]) size.graph = ( width: self.tabs.len() * self.step, height: self.frets-amount * self.step ) size.name.width += 1pt * self.scale dx = (self.grid.width - size.name.width) / 2 if self.position == "bottom" { dy -= size.name.height } return ( bounds: ( dx: dx, dy: dy, width: size.name.width, height: size.name.height ), elements: elements ) } // Render the chart #let render(self) = context { let objects = ( draw-nut(self), draw-grid(self), draw-tabs(self), draw-capos(self), draw-fingers(self), draw-fret(self), draw-name(self) ) let init = ( bounds: (dx: 0pt, dy: 0pt, width: 0pt, height: 0pt), elements: [] ) let (bounds, elements) = objects.fold( init, (acc, (bounds, elements)) => { return ( bounds: total-bounds(acc.bounds, bounds), elements: acc.elements + elements ) } ) box( width: bounds.width, height: bounds.height, place( left + top, dx: -bounds.dx, dy: -bounds.dy, { elements } ) ) } /// Generates a chart chord for stringed instruments. /// /// - ..text-params (auto): Embeds the native *text* parameters from the standard library of *typst*. *Optional*. /// /// - tabs (str): Shows the tabs on the chart. *Optional*. /// - *x*: mute note. /// - *o*: air note. /// - *n*: without note. /// - *number*: note position on the fret. /// /// The string length of tabs defines the number of strings on the instrument. /// #parbreak() Example: /// - ```js "x32o1o"``` - (6 strings - C Guitar chord). /// - ```js "ooo3"``` - (4 strings - C Ukulele chord). /// /// - fingers (str): Shows the finger numbers. *Optional*. /// - *n*, *x*, *o*: without finger, /// - *number*: one finger /// #parbreak() Example: ```js "n32n1n"``` - (Fingers for guitar chord: C) /// /// - capos (str): Adds one or many capos on the chart. *Optional*. /// - 1#super[st] digit -- *fret*: fret position. /// - 2#super[nd] digit -- *start*: lowest starting string. /// - 3#super[rd] digit -- *end*: highest ending string. /// #parbreak() Example: ```js "115"``` $\u{2261}$ ```js "1,1,5"``` $=>$ ```js "fret,start,end"``` /// #parbreak() With ```js "|"``` you can add capos: /// #parbreak() Example: ```js "115|312"``` $\u{2261}$ ```js "1,1,5|3,1,2"``` $=>$ ```js "fret,start,end|fret,start,end"``` /// /// - fret (int): Shows the fret number that indicates the starting position of the fretboard. *Optional*. /// /// - frets-amount (int): Sets the frets amount (the grid rows). *Optional*. /// /// - design (str): Sets the chart design. *Optional*. /// - ```js "sharp"```: chart with sharp corners. /// - ```js "round"```: chart with round corners. /// /// - position (str): Sets the chord chart position. *Optional*. /// - ```js "top"```: chord chart in top position. /// - ```js "bottom"```: chord chart in bottom position. /// /// - background (color): Sets the background color of the chord name. *Optional*. /// /// - name (str, content): Shows the chord name. *Required*. /// /// -> content #let chart-chord( ..text-params, tabs: "", fingers: "", capos: "", fret: none, frets-amount: 5, design: "sharp", position: "top", background: rgb(0, 0, 0, 0), name ) = { assert.eq(type(tabs), str) assert.eq(type(fingers), str) assert.eq(type(capos), str) assert.eq(type(frets-amount), int) assert.eq(type(background), color) assert(type(fret) == int or fret == none, message: "type of 'fret' must to be 'int' or 'none'") assert(type(name) in (str, content), message: "type of 'name' must to be 'str' or 'content'") assert(design in ("sharp", "round"), message: "'design' must to be '\"sharp\"' or '\"round\"'") assert(position in ("bottom", "top"), message: "'position' must to be '\"bottom\"' or '\"top\"'") let tabs = parse-input-string(tabs) let fingers = parse-input-string(fingers) let capos = parse-input-string(capos) if capos.len() != 0 and type(capos.first()) != "array" { capos = (capos,) } let (size, font, ..text-params) = set-default-arguments(text-params.named()) set text(font: font) let scale = size-to-scale(size, 12pt) let step = 5pt * scale let stroke = black + 0.5pt * scale let self = ( grid: ( width: (tabs.len() - 1) * step, height: frets-amount * step, rows: frets-amount, cols: tabs.len() - 1, ), scale: scale, step: step, stroke: black + 0.5pt * scale, text-params: text-params, tabs: tabs, fingers: fingers, capos: capos, fret: fret, frets-amount: frets-amount, design: design, position: position, background: background, name: name, ) render(self) }
https://github.com/remigerme/typst-polytechnique
https://raw.githubusercontent.com/remigerme/typst-polytechnique/main/guide.typ
typst
MIT License
#import "@preview/typographix-polytechnique-reports:0.1.4" as template #show: template.apply // Specific rules for the guide #show link: set text(blue) #let source = s => { block(width: 100%)[#{ set align(left) text(font: "New Computer Modern Sans", weight: "bold")[Source code :] v(-0.5em) pad(left: 5%)[#block(fill: rgb("f5f5f5"), width: 100%, inset: 10pt)[#raw(s, lang: "typ")]] set align(center) v(0.2em) line(length: 50%, stroke: 0.8pt + rgb("d0d0d0")) v(0.5em) }] } #let typst-rendering(d) = { block[#text(font: "New Computer Modern Sans", weight: "bold")[Typst rendering :]] pad(left: 5%)[#block(fill: rgb("f5f5f5"), inset: 10pt, width: 100%)[#eval(d, mode: "markup")]] source(d) } // Defining variables for the cover page and PDF metadata #let title = [Guide for Typst #linebreak() Polytechnique package] #let subtitle = "A modern alternative to LaTeX" #let logo = image("assets/logo-x.svg") #let short_title = "package guide" #let authors = ("<NAME>") #let date_start = datetime(year: 2024, month: 07, day: 05) #let date_end = datetime(year: 2024, month: 08, day: 05) #set text(lang: "en") // Beginning of the content #template.cover.cover(title, authors, date_start, date_end, subtitle: subtitle, logo: logo) #pagebreak() #outline(title: [Guide content], indent: 1em, depth: 2) #pagebreak() = Discovering Typst and the template #typst-rendering( "Typst is a user-friendlier alternative to LaTeX. Check out #link(\"https://github.com/remigerme/typst-polytechnique/blob/main/guide.typ\")[this pdf source] to see how it was generated." ) == Headings #typst-rendering("=== Level 3 heading") Use only one (resp. two) `=` for level 1 (resp. 2) heading (and so on). #typst-rendering( "#heading(level: 3, numbering: none)[Level 3 heading without numbering] ==== Level 4 heading" ) == Cover page ```typc // Defining variables for the cover page and PDF metadata // Main title on cover page #let title = [Guide for Typst #linebreak() Polytechnique package] // Subtitle on cover page #let subtitle = "A modern alternative to LaTeX" // Logo on cover page #let logo = none // instead of none set to image("path/to/my-logo.png") #let logo-horizontal = true // set to true if the logo is squared or horizontal, set to false if not // Short title on headers #let short-title = "package guide" #let author = "<NAME>" #let date-start = datetime(year: 2024, month: 06, day: 05) #let date-end = datetime(year: 2024, month: 09, day: 05) // Set to true for bigger margins and so on (good luck with your report) #let despair-mode = false #set text(lang: "en") // Set document metadata #set document(title: title, author: author, date: datetime.today()) #show: template.apply.with(despair-mode: despair-mode) // Cover page #template.cover.cover(title, author, date-start, date-end, subtitle: subtitle, logo: logo, logo-horizontal: logo-horizontal) #pagebreak() ``` Set text lang to `fr` if you want the months in French. \ You can also specify `short-month: true` in the call to cover to get month abbreviations. == Doing some math #typst-rendering( "Inline : $P V = n R T$ and $f : x -> 1/18 x^4$, $forall x in RR, f(x) >= 0$." ) #typst-rendering( "Block (note space after opening \$ and before closing \$) : $ f(b) = sum_(k=0)^n (b-a)^k / k! f^((k))(a) + integral_a^b (b-t)^n / n! f^((n+1))(t) dif t $" ) == Table of contents You can generate a table of contents using `#outline()`. Here are useful parameters you can specify : - `indent` - `depth` - `title` (put the title inside brackets : [title]) For example, the previous table of contents was generated using : ```typc #outline(title: [Guide content], indent: 1em, depth: 2) ``` == Cite an article #typst-rendering( "You can cite an article, a book or something like @example-turing. Just see the `#bibliography` command below - you need a `.bib` file containing the bibliography." ) == Numbering pages Useful commands to number pages (learn about #link("https://typst.app/docs/reference/model/numbering/")[numbering patterns]) : ```typc #set page(numbering: none) // to disable page numbering #set page(numbering: "1 / 1") // or another numbering pattern #counter(page).update(1) // to reset the page counter to 1 ``` *Warning* : put these instructions at the very beginning of a page, otherwise it will cause a pagebreak. #typst-rendering("#lorem(25)") == Dummy text with lorem You can generate dummy text with the `#lorem(n)` command. For example : #lower(lorem(10)) #pagebreak() = Modify the template == Contribute Contributions are welcomed ! Check out the #link("https://github.com/remigerme/typst-polytechnique")[source repository]. You can also learn more about #link("https://github.com/typst/packages")[Typst packages] release pipeline. #pagebreak() #bibliography("assets/example.bib")
https://github.com/Treeniks/bachelor-thesis-isabelle-vscode
https://raw.githubusercontent.com/Treeniks/bachelor-thesis-isabelle-vscode/master/chapters/05-main-enhancements/non-html-content.typ
typst
#import "/utils/todo.typ": TODO #import "/utils/isabelle.typ": * == Non-HTML Content for Panels <enhance:non-html> The output and state panels in #vscode were previously always sent as HTML content by the language server. The server sends #box[`PIDE/dynamic_output`] and #box[`PIDE/state_output`] notifications with output and state content, respectively. In this section, we will focus on content for the output panel. However, everything is almost equivalently done for state panel content. The #box[`PIDE/dynamic_output`] notification only contained a single `content` value, a string containing the panel's content. As mentioned, this content used to be HTML content that #vscode displayed in a WebView. However, not every code editor can natively display HTML content, and there used to be no way for a language client to get pure text content instead. We added a new Isabelle system option called #box[`vscode_html_output`]. If disabled, the language server skips the conversion to HTML and sends text content instead. However, this poses a new problem: The conversion to HTML added highlighting to the panel content. It takes the source XML body, extracts the relevant decoration markup, and uses it to generate equivalent HTML markup. Skipping this conversion and sending pure text instead also meant the language client got no highlighting within these panels. The Neovim language client prototype mentioned in @intro:motivation had this problem, as seen in @fig:neovim-no-decs. #{ show figure.caption: it => box(width: 71%, it) columns(2)[ #figure( box(stroke: 1pt, image("/resources/neovim-no-decs-light.png", width: 100%)), kind: image, caption: [Neovim Isabelle client without decorations in output panel.], // placement: bottom, ) <fig:neovim-no-decs> #colbreak() #figure( box(stroke: 1pt, image("/resources/neovim-with-decs-light.png", width: 100%)), kind: image, caption: [Neovim Isabelle client with decorations in output panel.], // placement: bottom, ) <fig:neovim-with-decs> ] } Decorations within state and output panels are quite important, as they provide more than just superficial visuals. There are many cases when writing Isabelle proofs where a single name is used for two or more individual variables. Isabelle also often generates its own names within proofs, and that generation may introduce further overlaps of identifiers. This may create goals like #isabelle[#text(blue)[`x`]` = `#text(green)[`x`]] that are not provable because the left #isabelle[#text(blue)[`x`]] is a different variable than the right #isabelle[#text(green)[`x`]]. The only way to differentiate these variables in these cases is by their color. If the colors are missing, the goal will look like #isabelle(`x = x`). To fix this, we added an optional `decorations` value to #box[`PIDE/dynamic_output`] and #box[`PIDE/state_output`] notifications, which is only given when HTML output is disabled. The form of this value is the same as the `entries` value of the #box[`PIDE/decoration`] notifications described in @enhance:decorations. That way, even when the server sends non-HTML panel content, the client can apply the given decorations to the respective panel. The result of adding this functionality to Neovim's language client prototype can be seen in @fig:neovim-with-decs. // To extract the decoration markup from the output and state XML bodies, we used Isabelle's internal `Markup_Tree` module. // #TODO[ // - currently server sends output always in HTML format // - VSCode can display HTML, but not all clients can // - now can disable HTML output and send pure text instead with option // - added decorations to the message if HTML is disabled (biggest usability win in neovim) // ]
https://github.com/Myriad-Dreamin/typst.ts
https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/fuzzers/corpora/layout/enum-align_02.typ
typst
Apache License 2.0
#import "/contrib/templates/std-tests/preset.typ": * #show: test-page #set enum(number-align: center + horizon) 1. #box(fill: teal, inset: 10pt )[a] 8. #box(fill: teal, inset: 10pt )[b] 16. #box(fill: teal,inset: 10pt )[c]
https://github.com/giacomocavalieri/gleamy_slides
https://raw.githubusercontent.com/giacomocavalieri/gleamy_slides/main/README.md
markdown
Apache License 2.0
# Gleamy slides This is a slide template I use to make my Typst slides to present Gleam code. It also contains a Gleam sublime syntax and code highlighting theme to get nice-looking code snippets.
https://github.com/0xPARC/0xparc-intro-book
https://raw.githubusercontent.com/0xPARC/0xparc-intro-book/main/src/2pc-takeaways.typ
typst
#import "preamble.typ":* #takeaway[2PC takeaways][ 1. A _garbled circuit_ allows Alice and Bob to jointly compute some function over their respective secret inputs. We can think of this as your prototypical _2PC_ (two-party computation). 2. The main ingredient of a garbled circuit is _garbled gates_, which are gates whose functionality is hidden. This can be done by Alice precomputing different outputs of the garbled circuit based on all possible inputs of Bob, and then letting Bob pick one. 3. Bob "picks an input" with the technique of _oblivious transfer (OT)_. This can be built in various ways, including with commutative encryption or public-key cryptography. 4. More generally, it is also possible for a group of people to compute whatever secret function they want, which is the field of _multiparty computation (MPC)_. ]
https://github.com/JakMobius/courses
https://raw.githubusercontent.com/JakMobius/courses/main/mipt-os-basic-2024/sem03/utils.typ
typst
#let palette = array.range(8).map((i) => { color.hsv(i * 45deg + 240deg, 100%, 100%) }) #let cell-color(base-color) = { if base-color == none { base-color = blue } let background-color = color.mix((base-color, 20%), (white, 80%)) let stroke-color = color.mix((base-color, 50%), (black, 50%)) ( base-color: base-color, background-color: background-color, stroke-color: stroke-color, ) } #let conpro(color, content) = { set text(fill: white, weight: "black", size: 20pt) box( baseline: 0.5em, width: 1.5em, height: 1.5em, radius: 5pt, fill: color, )[ #align(center + horizon)[#content] ] h(0.5em) } #let pro() = conpro(green)[+] #let con() = conpro(red)[-]
https://github.com/HiiGHoVuTi/requin
https://raw.githubusercontent.com/HiiGHoVuTi/requin/main/lang/auto_arbre.typ
typst
#import "../lib.typ": * #show heading: heading_fct On note $Q^* = union.big_(n in NN) Q^n$. Un _alphabet_ $Sigma$ est un ensemble fini de symboles. On pose $\# : Sigma --> NN$ une fonction qui à un symbole associe son arité (son nombre d'arguments). On notera $f^(\#n)$ pour dénoter que le symbole $f$ est d'arité $\#(f) = n$. On pose $cal(T)(Sigma,\#)$ l'ensemble des arbres étiqueté par $Sigma$ tel que pour tout nœud étiqueté par $alpha$, il possède $\#(alpha)$ enfants. Un _automate d'arbre_ est un quintuplet $A = angle.l Sigma, \#, Q, Q_f, delta angle.r$ avec $Q_f$ l'ensemble des états finaux tel que $Q_f subset.eq Q$ fini l'ensemble des états et $delta : Sigma times Q^* -> Q $ la fonction de transition, tel que pour tout $ x in Sigma$, $delta(x,dot) : Q^(\#(x)) -> Q$ soit totale. On note un nœud $alpha$ d'enfants $x_1,..., x_(\#(alpha))$ par $alpha(x_1,...,x_(\#(alpha)))$. On défini inductivement l'évaluation $delta^* : cal(T)(Sigma,\#) -> Q $ d'un nœud $alpha(x_1,...,x_n)$ : $ delta^*(alpha(x_1,...,x_n)) = delta (alpha, (delta^*(x_1),...,delta^*(x_n))) $ On a donc que $delta^*(a) = delta(a,())$ si $\#(a) = 0$. Un arbre $T in cal(T)(Sigma,\#)$ est _accepté_ par $A$ si $delta^*(T) in Q_f$. Un ensemble d'arbres $T subset.eq cal(T)(Sigma,\#)$ est _reconnus_ par un automate d'arbre $A$ si $A$ n'accepte que les arbres de $T$. Dans ce cas $T$ est _reconnaissable_. On considère $Sigma_1 = {f^(\#2),a^(\#0)}$. #question(0)[Soit $A = angle.l Sigma_1, \#, Q:={0,1,2}, {0}, delta angle.r$ avec $delta$ telle que: - $delta (a,()) = delta(f,(1,1)) = 0$, - $delta(f,(0,1)) = delta(f,(1,0)) = delta(f,(2,x)) = delta(f,(x,2)) = 2$ pour tout $x in {0,1,2}$, - $delta(f,(0,0)) = 1$ Dire si les arbres $a$, $f(a,f(a,a))$ et $f(f(a,a),f(a,a))$ sont accepté par $A$.] #correct([ - $delta^*(a) = 0 in Q_f$ donc $a$ est accepté. - $delta^*(f(a,f(a,a))) = 2 in.not Q_f$ donc $a$ n'est pas accepté.. - $delta^*(f(f(a,a),f(a,a))) = 0 in Q_f$ donc $f(f(a,a),f(a,a))$ est accepté. ]) #question(1)[Proposer un automate d'arbre sur $Sigma_1$ qui n’accepte que les arbres ayant un nombre paire de feuilles.] #correct([ Très similaire à ce que l'on a fait. $A = angle.l Sigma_1, \#, Q:={0,1}, {0}, delta angle.r$ avec $delta$ telle que: - $delta (a,()) = delta(f,(1,0)) = delta(f,(0,1)) = 1$, - $delta(f,(0,0)) = delta(f,(1,1)) = 0$ ]) #question(1)[Quel ensemble d'arbres l'automate de la question 0 reconnais-t'il ?] #correct[ Il faut voir que $2$ est un état puis. On arrrive à 2 si on a un des fils différents, et les fils changent mod 2 à chaque hauteur. Donc on regarde si toutes les feuilles sont de profondeur impair. ] Soit $w in Sigma^*$ un mot sur $Sigma$, on pose $T(w)$ _l'arbre mot de $w$_ défini par $w_1(w_2(w_3(...(w_(|w|)(epsilon)))))$ sur $Sigma^' = { alpha^(\#1) : alpha in Sigma } union {epsilon^(\#0)}$ #question(2)[Montrer que soit $L$ un langage sur $Sigma$, $L$ est régulier ssi il existe un automate d'arbre sur $Sigma^'$ acceptant seulement les arbres mots des mots de $L$] #question(2)[Montrer que l'ensemble des arbres reconnus par un automate d'arbres est stable par union et complémentaire.] #correct([ 3. On construit facilement l'automate d'arbre en considérant $delta'(alpha,(q)) = delta(alpha, q)$ et $delta'(epsilon) = q_"init"$. réciproquement, on peut construire l'automate avec ces mêmes égalités. 4. Pour le complémentaire, on prend $Q_f' = Q \\ Q_f$. Pour l'union, on considère ]) Soit $T,A$ deux arbres de $cal(T)(Sigma,\#)$, si $u$ est un nœud (pas un symbole) de $T$, on note $T[u <- A]$ l'arbre ou l'on a remplacé le nœud $u$ par $A$. Si $u$ est aussi un nœud de $A$, on défini $T[u <- A]^n$ par $T[u <- A]^0 = T$ et $T[u <- A]^(n+1) = T[u <- A]^n [u <- A]$ #question(3)[Montrer le lemme de pompage pour les automates d'arbres: pour tout automate d'arbre $A$, il existe un $N >0$ tel que pour tout arbre $T$ de hauteur $>N$ accepté par $A$, il existe un décomposition de $T$ en $T = T'[u <- A]$ telle que : - $u in A$ - $A != {u}$ - $forall n in NN, T'[u <- A]^n$ est accepté par $A$ ] #question(2)[Montrer avec le lemme précédent que le langage des arbres parfait n'est pas reconnaissable par automate d'arbre.] #question(3)[Donner un langage d'arbre sur $Sigma = {f^(\#2),a^(\#0)}$ reconnaissable tel sa clôture par les relations d'équivalences $f(t_1,t_2) space ~_"comm" space f(t_2,t_1)$ et $f(t_1,f(t_2,t_3)) space ~_"assoc" space f(f(t_1,t_2),t_3)$ ne soit pas reconnaissable.]
https://github.com/topdeoo/NENU-Thesis-Typst
https://raw.githubusercontent.com/topdeoo/NENU-Thesis-Typst/master/pages/bachelor-declare.typ
typst
#import "../fonts/fonts.typ": font-family, font-size #import "../utils/format.typ": indent #import "../utils/datetime.typ": datetime-display-cn-declare #let bachelor-declare( // TODO 加入更多参数支持 two-side: false, fonts: (:), ) = { fonts = font-family + fonts //! 渲染 pagebreak( weak: true, to: if two-side { "odd" }, ) v(80pt) //! 标题 align( center, text( font: fonts.宋体, size: font-size.三号, weight: "bold", "独 创 性 声 明\n", ), ) //! 扉页内容 block[ #set text(font: fonts.宋体, size: font-size.四号) #set par(justify: true, first-line-indent: 2em, leading: 1em) #v(.5em) #indent 本人郑重声明:所提交的毕业论文是本人在导师指导下独立进行研究工作所取得的成果。据我所知,除了特别加以标注和致谢的地方外,论文中不包含其他人已经发表或撰写过的研究成果。对本人的研究作出重要贡献的个人和集体,均已在文中做了明确的说明。本声明的法律结果由本人承担。 ] //! 签名与日期 //TODO 加入自动识别是否存在签名的 `pdf` 或 `png`,在签名处自动填充 set text(font: fonts.宋体, size: font-size.四号) [ #v(2em) #indent 论文作者签名:#box(width: 7em, stroke: (bottom: 0.5pt), outset: 2pt)#h(1em)日期:#underline(offset: 2pt)[#datetime-display-cn-declare(datetime.today())] ] }
https://github.com/janlauber/bachelor-thesis
https://raw.githubusercontent.com/janlauber/bachelor-thesis/main/common/metadata.typ
typst
Creative Commons Zero v1.0 Universal
// Enter your thesis data here: #let titleEnglish = "One-Click Deployment" #let subTitleEnglish = "Simplifying Open-Source Software Deployment" #let degree = "Bachelor" #let program = "BsC Computer Science" #let advisor = "<NAME>" #let expert = "<NAME>" #let author = "<NAME>" #let submissionDate = "13.06.2024" #let projectPartner = "Natron Tech AG"
https://github.com/zenor0/FZU-report-typst-template
https://raw.githubusercontent.com/zenor0/FZU-report-typst-template/main/fzu-report/utils/to-string.typ
typst
MIT License
#let to-string(content) = { if content == none { none } else if type(content) == str { content } else if content.has("text") { content.text } else if content.has("children") { content.children.map(to-string).join("") } else if content.has("child") { to-string(content.child) } else if content.has("body") { to-string(content.body) } else if content == [ ] { " " } }
https://github.com/wznmickey/typst_workshop
https://raw.githubusercontent.com/wznmickey/typst_workshop/main/2024Summer/a.typ
typst
#import "@preview/polylux:0.3.1": * #import "@preview/codly:1.0.0": * #set page(paper: "presentation-16-9") #set text(size: 20pt) #set text(font: ("IBM Plex Serif", "Noto Sans CJK SC")) #let LaTeX = { set text(font: "New Computer Modern") box( width: 2.55em, { [L] place(top, dx: 0.3em, text(size: 0.7em)[A]) place(top, dx: 0.7em)[T] place(top, dx: 1.26em, dy: 0.22em)[E] place(top, dx: 1.8em)[X] }, ) } #polylux-slide[ #align(horizon + center)[ = Typst workshop Part 2 #side-by-side[ 王梓宁 <NAME> #link("mailto:<EMAIL>") 2024 年 07 月 31 日 ][ #image("Feishu_Group.jpg", width: 10em) ] ] ] #polylux-slide[ == 一些资源 === 文档 - https://typst-doc-cn.github.io/docs/packages/ - https://typst.app/docs/ === 包和模板 - https://typst.app/universe/ ] #polylux-slide[ == 三种模式 - 标记模式 Markup Mode (类似 Markdown) - 默认模式 - 数学模式 Math Mode (类似 #LaTeX) - 在 `$` 与 `$`之间输入,同时支持行间公式和行内公式 - 代码模式 Code Mode (类似 一般编程代码) - 使用`#` 开头。 - 所有的标记模式里面的格式实际上都是代码模式里的部分函数的语法糖,可以通过函数方式调用以与其他代码协同 ] #polylux-slide[ == 部分常用的 #LaTeX 包的typst替代 1. 代码:原生自带,使用`raw`函数或\`导入 2. bibtex:原生自带,使用`bibliography` 函数导入 3. 图片:原生自带,使用`image` 函数导入 4. 表格:原生自带,使用`table` 函数导入 5. beamer:使用第三方包(比如`polylux`) ] #polylux-slide[ == 代码模式 语法 (与rust不同) - 以`#` 开头(如果需要多行命令,使用大括号包起来)。 #side-by-side[ ``` #text("123") ```][ #text("123") ] - 如果临时要使用标记模式的文本材料,使用`[]`包起来。 #side-by-side[ ``` #[*Hello*] ```][ #[*Hello*] ] - 表达式(函数等的返回值)会以默认文本格式显示 #side-by-side[ ``` "hello".len() ```][ #"hello".len() ] ] #polylux-slide[ - 使用`let` 赋值与(解)绑定 #side-by-side[ ``` #{ let x = 1 x } ```][ #{ let x = 1 x } ] #side-by-side[ ``` #{ let (a,..,b) = (1,2,3,4,5) b } ```][ #{ let (a, .., b) = (1, 2, 3, 4, 5) b } ] #side-by-side[ ``` #{ let myFun(x,y) = {x+y} myFun(1,2) } ```][ #{ let myFun(x, y) = { x + y } myFun(1, 2) } ] ] #polylux-slide[ - 有 `if`, `while`. `for` 是 in-range 设计 #side-by-side[ ``` #{ let array = (1, 2, 3) for i in array { if i >= 2 [#i] } } ```][ #{ let array = (1, 2, 3) for i in array { if i >= 2 [#i] } } ] - 使用`.`访问 字段和方法 #side-by-side[ ``` #let it = [== Typst] #it.depth #let dict = (name: "JI") #dict.at("name") #dict.values() ```][ #let it = [== Typst] #it.depth #let dict = (name: "JI") #dict.at("name") #dict.values() ] ] #polylux-slide[ - 类型 - 字符串 String`""` - 数组(变长) Array`()` - 字典 Dict `(key:value)` - `set` and `show` 修改文档的样式属性 ]
https://github.com/butaneprotocol/specs
https://raw.githubusercontent.com/butaneprotocol/specs/main/gema.typ
typst
#import "@preview/algorithmic:0.1.0": * #let special_version = read("./version.txt") #align(center, [ #v(2em) #[#set par(justify: true) #image("img/logo-black.png", width: 120pt)] #set text(size:18pt) Oracle Specifications #align(center, text(12pt, weight: 400)[ v. #special_version ]) #datetime.today().display("[day] [month repr:long], [year]") ]) #v(2em) = GEMA Oracle Specification == Overview This specification outlines the Greater Exponential Moving Average (GEMA) oracle algorithm, designed to provide a responsive and adaptive measure of asset price trends. It is particularly suited for financial applications requiring quick reactions to market downturns while maintaining stability during upward trends. == Algorithm Description The GEMA algorithm uses a single piece of previous state to add a drag for price increases while immediately reflecting decreases. This approach combines quick responsiveness to potential losses with the stability of trend following during growth periods. == Key Features 1. Immediate reflection of price decreases 2. Smoothed response to price increases using GEMA calculation 3. Single state variable for efficient implementation == Implementation Details === Inputs - `prices`: A list of asset prices over time - `period`: The number of days for the GEMA calculation (default: 20) === Output - A list of GEMA values corresponding to the input prices == Visual Representation #figure( image("img/example.png", width: 80%), caption: [ Comparison of Asset Price and GEMA over time. The red line represents the GEMA, which quickly adapts to price decreases while smoothing price increases. ] ) This graph demonstrates how the GEMA (red line) closely follows the asset price (blue line) during downward movements, while providing a smoother trend during upward movements. === Algorithm $x_t$ is the current price \ $gamma = "smoothing" / ("period" + 1)$ #align(center, [ $f(0) = x_0$ \ $f(t) = min { gamma dot f(t-1) + (1-gamma) x_t, x_t }$ ]) ==== Pseudocode #import "@preview/algorithmic:0.1.0" #import algorithmic: algorithm #algorithm({ import algorithmic: * Function("CalculateGEMA", args: ("prices", "period", "smoothing"), { Assign("gema", [prices[0]]) Assign("multiplier", $"smoothing" / ("period" + 1)$) For(cond: "price in prices[1:]", { If(cond: $"price" >= "gema"[-1]$, { Assign("new_gema", $("price" - "gema"[-1]) dot "multiplier" + "gema"[-1]$) Call("gema.append", "new_gema") }) Else({ Call("gema.append", "price") }) }) Return("gema") }) }) == Notes 1. The algorithm initializes the GEMA with the first price in the series. 2. For each subsequent price: - If the new price is lower than the previous GEMA, it immediately becomes the new GEMA value. - If the new price is higher, a standard GEMA calculation is applied. 3. This approach ensures quick adaptation to price drops while smoothing out price increases. == Reference Implementation For a reference implementation, refer to the `calculate_gema` function in the `oracles.py` file.
https://github.com/kdog3682/2024-typst
https://raw.githubusercontent.com/kdog3682/2024-typst/main/src/collections.typ
typst
#let base-math-collect(regex: "aaaa", create: none, collect: none) = { locate((loc) => { let mathEquations = query(selector(math.equation).before(loc), loc) let collection = mathEquations.filter(collect) let tableItems = collection.enumerate().map(((i, x) => create(x, i)) let table-attrs = ( align: auto, column-gutter: auto, row-gutter: auto, columns: 2, fill: auto, // stroke: ( // top: none, // left: none, // right: none, // bottom: none, // ), rows: auto, ) return table(..table-attrs, ..tableItems.flatten()) }) } #let collect(eq) = { let kids = eq.body.children let first = kids.first() let fields = first.fields() let hasExponent = fields.at("t", default: none) != none let length = kids.len() return length > 6 and hasExponent } #let create(eq, i) = { let first = eq.body.children.first().fields() let statement = [The base for each part is $#first.base$.] let index = [*Example #{i + 1}*] return (index, eq, statement) } #let collect-exponents = base-math-collect.with( create: create, collect: collect, )
https://github.com/WinstonMDP/knowledge
https://raw.githubusercontent.com/WinstonMDP/knowledge/master/z.typ
typst
#import "cfg.typ": cfg #show: cfg = $ZZ$ $s$ - делитель $n := n$ - кратное $s := s divides n := n = t s, t in ZZ$. $n, m$ - ассоциированные $:= n = plus.minus m$. $n divides m -> m divides n -> n = plus.minus m$. $p$ простое $:= exists.not p' != plus.minus 1, plus.minus p space p' divides p$. Основная теорема арифметики: $forall n in QQ space n > 0 -> exists! $ разложение $n = p_1^(epsilon_1) p_2^(epsilon_2) ...$ Теорема Евклида: ${x in ZZ | x "простое"}$ бесконечно. $n = plus.minus p_1^alpha_1... -> m = plus.minus p_1^beta_1... -> "НОД"(n, m) := p_1^min(alpha_1, beta_1)...$ $"НОД"(n, m) divides n$. $d divides n -> d divides m -> d divides "НОД"(n, m)$. $n divides "НОК"(n, m)$. $n divides u -> m divides u -> "НОК"(n, m) divides u$. $"НОД"(n, m) "НОК"(n, m) = n m$. $n, m$ - взаимно простые $:= "НОД"(n, m) = 1$. $forall b != 0 space exists q, r in ZZ space cases(0 <= r < |b|, a = b q + r)$. $"НОД"(n, m) = min{n u + m v | exists u, v in ZZ space n u + m v > 0}$. Функция Эйлера $:= op(phi) n = abs({x in NN med mid(|) med cases(x < n, "НОД"(x, n) = 1)})$.
https://github.com/sabitov-kirill/comp-arch-conspect
https://raw.githubusercontent.com/sabitov-kirill/comp-arch-conspect/master/questions/1_phisical_base.typ
typst
#heading[Физические основы.] #emph[Физические основы. Реализация логических вентилей с помощью транзисторов.] == Электрические сигналы === Аналоговые сигналы #emph[Аналоговый сигнал] --- непрерывный сигнал, изменяющийся во времени. Принимающий всевозможные значения из заданного промежутка. Все реальные сигналы являются аналоговыми, то есть непрерывны и постоянно изменяются. === Дискретные (цифровые) сигналы Значений алоговых сигналов бесконечно много, в следствии чего их тяжело интерпретировать. Решение проблемы -- введение абстракции в виде дискретных сигналов. #emph[Дискретный сигнал] --- прерывистый сигнал, который изменяясь по времени принимает значения только из списка возможных. В реальности удобно вводить ряд констант для дискретизации аналогового сигнала: + Напряжение земли - $V_"gnd"$ (обычно $0V$) + Напряжение источника питания, то есть максимально возможное значение аналогового сигнала - $V_"dd"$ + Верхняя граница входного аналогового напряжения для получения 0 на выходе; то есть $0$ на выходе, если $V in [0; V_l]$ - $V_l$ + Нижняя граница напряжения на входе для получения $1$ на выходе, то есть $1$ на выходе, если $V in [V_h; V_"dd"]$ - $V_h$ #box(height: 45pt)[ #set align(center) #columns(2)[ #image("/images/analogue_signal.png") #colbreak() #image("/images/discr_signal.png") ] ] == Полупроводники Проводники -- категории материалов обладающие рядом особых свойств, делающих их строительными блоками для почти любой цифровой системы. В нашем случае будем считать, что полупроводники состоят из кремния (Si) валентности 4, т.е. имеющего 4 электрона в оболочке, и примеси (атомов других химических элементов с отличной валентностью). В зависимости от валентрости примеси выделяют два типа полупроводников: #columns(2)[ === Полупроводники _n_-типа При добавлении примеси с валентостью больше, чем у кремния, образуется полупроводник n-типа (negative). Носителями заряда являются свободные эллектроны от атомов примеси, свободно летающие по кристалической решетке. Тип связи -- донорная. Пример примеси с валентностью больше, чем у кремния - Фосфор с валентностью 5. #colbreak() === Полупроводники _p_-типа При добавлении примеси с валентносью меньше, чем у кремния, образуется полупроводник p-типа (positive). В месте связи атомов примеси с атомами кремния останется блуждающая по кристалической решетке “дырка” (т.к. не хватает эллектрона для валетной связи). Эта “дырка” и является носителем заряда. Тип связи -- акцепторная. Пример примеси с валентностю меньше чем у кремния - Бор с валентностью 3. ] #import "/commons.typ": imagebox #imagebox("p_n_type_semicond.png", height: 150pt) == Диоды Если соединить полупроводники _n_ и _p_ типов, то в месте их соприкосновения образуется _p-n_ переход, при этом часть с полупроводником _n_-типа называют _катод_, с _p_-типа -- _анод_. Такое явление лежит в работе прибора _диод_, имеющего нелинейную ВАХ (вольт-амперную характеристику). Суть являения заключаеться в том, что на аноде образуется недостача электронов, он заряжен положительно; на катоде много свободных электронов, он заряжен отрицательно. В следствии этого образуеться достаточно большая разность потенциалов. Тогда, если добавить внешнюю разность потенциалов, сонаправленно с внутренней (которая идет от анода к катоду), то ток пойдет, если противоноправленно и внешняя разность потенциалов будет компенсированна внутренней. В случае противноправленной внешней связи, если она будет достаточно большой, то произойдет пробой. #imagebox("diode.png", label: [_p-n_ переход и обозначение диода.], height: 70pt) == Конденсаторы Иногда полезно умение контролировать время за которое проходит заряд и накапливать его. Для этого существуют _конденсаторы_. Конденсатор состоит из двух проводников и диэлектрика между ними. Если подать на один из проводников заряд $V$, то между проводниками образуется магнитное поле, через какое-то время на первом проводнике будет заряд $Q$, на другом $-Q$. Вводят понятие емкости конденсатора $C = Q/V$, она прямопропорциальна площади проводников и обратно пропорциональна расстоянию между ними. Более высокая емкость означает, что электрическая схема будет работать медленнее и потребует для своего функционирования больше энергии. #imagebox("capacitor.png", height: 30pt, label: [Обозначение конденсатора с емкостью $C$.]) == Транзисторы _Транзистор_ (в нашем случае именно _полевой_, _МОП-транзистор_, _MOSFET_, _metal-oxide-semiconductor field-effect transistor_) --- несколько слоев проводников, полупроводников и диэлектриков. Принцип работы -- пока не повзаимодействовать с _gate (затвор)_, ток от _source (исток)_ не дойдет до _drain (сток)_. Все части транзистора закреплены на _substrate (база)_. Примерный размер современных МОП-транзисторов -- $1 mu"m" = 10^(-6) "m"$. Выделяют два типа МОП-транзисторов: #imagebox("transistors.png", height: 140pt, label: [nMOS (слева) и pMOS (справа) транзисторы.]) #pagebreak() #columns(2)[ === n-p-n (nMOS) Subatrate соединен с землей, source с источником питания. Если не подавать ток на gate, то диоды source to substrate и substrate to drain разнонаправлены, в следствии чего ток не идет. Если подать ток на gate, после зарядки конденсатора (который образован gate и subtrate) образуется канал из отрицательных электронов, по которому заряд может идти от source на drain. #colbreak() === p-n-p (pMOS) Subatrate соединен с источником питания, source с источником питания. Если на gate подан ток, то заряд от source *не* идет на drain (состояние sustrate не изменно из-за того, что она и так подключена к источнику питания, диоды разнонаправленны); если тока на drain нет, то идет ток от source до drain (образуеться канал _p_-типа между source и drain которые также _p_-типа). ] #imagebox("nmos.png", label: [Разные состояния nMOS проводника (напряжения на gate) --\ $0V$ на gate слева, $V_"dd"$ на gate справа.], height: 100pt) Так же из-за ряда физических явлений nMOS транзисторы плохо передают 1, а pMOS плохо передают 0, что стоит учитывать при создвании электросхем. *Производство кристаллов для процессора*. Размещается subtrate (база) -- круглая подложка из кремния, полученна нарезанием кремниевого циллиндра, в нее имплантируются примеси соответсвующего типа (валентонсть которых или ниже или выше валенстности кремния). Таким образом на подложке образуется множество мельчайших транзисторов, затем подложка нарезается на готовые чипы. == Логические схемы на транзисторах Тривиальные схемы для базовых логических (бинарных) операций: #imagebox("not.png", height: 100pt, label: [Схема для логического НЕ.]) #columns(2, gutter: 5em)[ #imagebox("nand.png", height: 100pt, label: [Схема для штриха Шеффера $arrow:t$, NAND.], alignMode: right) #colbreak() #imagebox("nor.png", height: 100pt, label: [Схема для стрелки Пирса $arrow:b$, NOR.], alignMode: left) ] Схемы для AND и OR можно было бы получить, поменяв тип транзисторов в схемах для NAND и NOR, но из-за эффекта плохой проводимости напряжений 0 и 1 соответсвующими транзистрорами на практике так делать нельзя. Решение -- просто добавить NOT к NAND, NOR.
https://github.com/paugarcia32/CV
https://raw.githubusercontent.com/paugarcia32/CV/main/modules/projects.typ
typst
Apache License 2.0
#import "../brilliant-CV/template.typ": * #cvSection("Projects") #cvProject( title: [It Odyssey], society: [Web Application], date: [2023 - Present], description: list( [Web Blog where I post articles related with my IT journey], ), url: "https://www.itodyssey.dev", tags: ("Web", "FrontEnd", "NextJS", "TS", "S.O.L.I.D"), logo: "../src/logos/itodyssey.png", ) #cvProject( title: [EETAC GO], society: [Full Stack Application], date: [2022 - 2023], description: list( [Mobile Application for new students of the campus], [Developed with a team of 5 members], ), url: "https://github.com/mariaubiergo2/EA-FRONTEND", tags: ("Mobile", "BackEnd", "Flutter", "TypeScript", "MongoDB", "Agile & SCRUM", "NodeJS"), logo: "../src/logos/eetacgo.png", )
https://github.com/pku-typst/awesome-PKU-Typst
https://raw.githubusercontent.com/pku-typst/awesome-PKU-Typst/main/README-en.md
markdown
MIT License
# Awesome PKU Typst [![Awesome](https://awesome.re/badge.svg)](https://awesome.re) > This is a list of awesome PKU Typst resources. Feel free to contribute. English | [简体中文](/README.md) ## Contents TBD
https://github.com/SWATEngineering/Docs
https://raw.githubusercontent.com/SWATEngineering/Docs/main/src/2_RTB/VerbaliInterni/VerbaleInterno_231107/content.typ
typst
MIT License
#import "meta.typ": inizio_incontro, fine_incontro, luogo_incontro #import "functions.typ": glossary, team #let participants = csv("participants.csv") = Partecipanti / Inizio incontro: #inizio_incontro / Fine incontro: #fine_incontro / Luogo incontro: #luogo_incontro #table( columns: (3fr, 1fr), [*Nome*], [*Durata presenza*], ..participants.flatten() ) = Sintesi Elaborazione Incontro == Assegnazione dei ruoli Su proposta dei componenti è stata assegnata questa prima distribuzione dei ruoli: - Responsabile: <NAME>; - Amministratore: <NAME>; - Analista: <NAME>; - Analista: <NAME>; - Verificatore: <NAME>; - Verificatore: <NAME>. Per il momento i ruoli di Progettista e Programmatore non sono stati assegnati, in quanto per il momento hanno operatività limitata. == Verbali degli Stand-Up Data l'importanza di registrare le decisioni prese durante gli incontri, si è sollevato il dibattito sulla necessità di verbalizzare gli Stand-Up. La decisione è stata quella di verbalizzare esclusivamente gli incontri Stand-Up in cui si affrontano decisioni di particolare rilevanza. Si è concordato che, per prendere una decisione, è necessaria la presenza di almeno 4 membri del team. Pertanto, saranno documentati soltanto gli Stand-Up che coinvolgono almeno 4 componenti, assicurando così la registrazione delle decisioni più significative. Questa prassi dovrà essere aggiunta alle NdP (_Norme di Progetto_) e il compito è stato assegnato a <NAME>ango. #pagebreak() == Separazione delle Repository Per quanto riguarda questo aspetto, abbiamo optato per la suddivisione in due repository distinte: una dedicata al codice, denominata _InnovaCity_, e un'altra per la documentazione, chiamata _Docs_. Questa scelta mira a semplificare la gestione complessiva del progetto e a evitare dispersione all'interno di una singola repository. == Presentazione del prossimo diario di bordo <NAME>, nel ruolo di Responsabile, si occupa della redazione e della presentazione del prossimo DDB (<NAME>) in data 13/11/2023. == Corrispondenza con Sync Lab A seguito dell'assegnazione del capitolato, si è deciso di contattare l'azienda Sync Lab per chiedere un incontro organizzativo e di chiarimento. L'azienda ha risposto positivamente alla richiesta e ha proposto un incontro in data 10/11/2023 alle ore 10:30. L'incontro si terrà in modalità remota tramite la piattaforma _Google Meet_. == Modifica del versionamento Si è deciso di modificare il versionamento del progetto, in risposta ai suggerimenti del committente. Il nuovo versionamento avrà la forma X.Y, dove: - X viene cambiato una volta che il documento è pronto per la revisione di avanzamento; - Y viene cambiato ad ogni modifica successiva apportata dai redattori. La responsabilità della modifica della sezione Versionamento nelle NdP è stata assegnata a Matteo Rango. == Modifica dei nomi dei documenti In seguito ai suggerimenti del Committente, si è deciso di modificare i nomi dei documenti, in modo che contengano la versione attuale. Inoltre, sempre sotto suggerimento del Committente, verranno rimosse, dalle tabelle changelog, le entry relative alle revisioni. #pagebreak() == Sostituzione note taking system Attualmente Notion viene utilizzato per prendere note relative ai meeting, all'organizzazione dell'ordine del giorno per questi ultimi, per l'allocazione delle risorse temporali dei componenti e in generale come strumento di condivisione di idee e informazione. Tuttavia, la sua limitatezza a 1000 blocchi, con il piano gratuito attualmente utilizzato, rischia in un futuro di non essere più sufficiente. La responsabilità di trovare un sistema alternativo è stata assegnata a <NAME> e <NAME>. == Transizione a Typst Per quanto riguarda la stesura dei documenti, è stata presa la decisione di cambiare il linguaggio utilizzato, passando da LaTeX a Typst. Di seguito sono riportati i motivi di questa scelta: - La sintassi risulta molto più semplice e rapida da utilizzare, risultando più efficiente rispetto all'attuale _LatTex_; - La compilazione è molto più rapida rispetto a _LaTeX_ e questo incentiva l'utilizzo delle _GitHub Actions_ per la compilazione automatica dei documenti; - Aggiunge funzionalità utili all'automazione di alcuni processi di stesura dei documenti, come ad esempio l'ultima versione riportata sul frontespizio; - Ci permette di mantenere lo stesso risultato estetico, senza perdita di alcuna funzionalità. La responsabilità della ricreazione dei template in Typst e del conseguente aggiornamento delle NdP viene affidata a Matteo Rango. #pagebreak() == Struttura repository _Docs_ La Repository _Docs_ è stata strutturata in modo da mantenere separato il codice Typst per la produzione dei documenti (src) e i documenti pdf prodotti, raccolti nelle cartelle che corrispondono alle milestone. Di seguito viene riportato un esempio della struttura. - src: (contiene i sorgenti) - 1_Candidatura: - Verbali: - Interni: - VerbaleXXX: - main.typ - ... - Esterni: - VerbaleXXX: - main.typ - ... - 1_Candidatura (prodotto) - 2_RTB (prodotto) - … (prodotto) La compilazione dei documenti avviene tramite GitHub Actions e i documenti prodotti vengono caricati nella cartelle corrispondenti. == Timing Control Nel corso della discussione, è emersa l'esigenza di implementare un sistema di tracciamento del tempo lavorativo. Questo consentirà di monitorare attentamente le ore di lavoro dedicate dai membri del gruppo. Al fine di ridurre la dispersione tra le piattaforme utilizzate, si sta valutando l'adozione di un sistema unificato, possibilmente sotto forma di un nuovo software che sostituisca l'attuale utilizzo di Notion. == Bozza di _Analisi dei Requisiti_ Nella riunione è sorta la necessità di avere una bozza dell'_Analisi dei Requisiti_. Questa bozza servirà come punto di partenza per la redazione del documento definitivo e ci preparerà all'incontro con l'azienda in data 10/11. L'obiettivo è iniziare a chiarire alcuni requisiti fondamentali e avere degli spunti di discussione con la Proponente. La bozza verrà redatta da <NAME> e <NAME>. La stesura verrà effettuata, per il momento, su Notion e si baserà sulla proposta del capitolato e sulle informazioni raccolte durante l'incontro con l'azienda.
https://github.com/Enter-tainer/typstyle
https://raw.githubusercontent.com/Enter-tainer/typstyle/master/tests/assets/unit/markup/func-in-par.typ
typst
Apache License 2.0
如果你不能很好地阅读和理解#link("https://semver.org/")[SemVer]规范,仅记住合法的版本号#link("https://semver.org/")由三个递增的数字#link("https://semver.org/")组成,并用「点号」(`.`)分隔;版本号之间#link("https://semver.org/")可以相互比较,且比较版本#link("https://semver.org/")时按顺序比较数字。例如`0.0.0`、`0.10.0`、`1.5.11`、`1.24.1`是合法且递增的版本号。 如果你不能很好地阅读和理解 #link("https://semver.org/")[SemVer] 规范,仅记住合法的版本号#link("https://semver.org/")由三个递增的数字组成,并用「点号」(`.`)分隔;版本号之间可以相互比较,且比较版本时按顺序比较数字。例如`0.0.0`、`0.10.0`、`1.5.11`、`1.24.1`是合法且递增的版本号。 Lorem ipsum dolor sit amet, `consectetur` adipiscing elit. Nulla nec purus feugiat, molestie ipsum et, consequat nibh.
https://github.com/HiiGHoVuTi/requin
https://raw.githubusercontent.com/HiiGHoVuTi/requin/main/algo/subsetsum.typ
typst
#import "../lib.typ": * #show heading: heading_fct On considère le problème suivant : #problem([SUBSETSUM],[$n in NN$ et $a_1, a_2, ..., a_n, S in NN$],[ Est-ce qu'il existe $I subset.eq [n]$ tel que\ $ sum_(i in I) a_i = S $ ]) #question(0)[ Pour les suites suivante de $(a_n)$ et les valeurs de $N$ suivantes, indiquer si le problème est satisfiable ou non : - $(a_n)_n = (1,2,4,8,16), N = 7$ - $(a_n)_n = (31,24,2,43,12,12,18), N = 29$ - $(a_n)_n = (1001,1010,101,100,11), N = 1111$ ] #question(1)[ Montrer que le problème est dans la classe NP ] #question(2)[ Donner un algorithme qui en $O(n S)$ résoud le problème ] #correct([ 0. On a $7 = 4+2+1$ et $1111 = 1010+101$. Le deuxième n'est pas satifiable car tout les nombres $<N$ sont pair et que $N$ est impair. 1. On donne la liste des indices, évidemment polynomiale car de longueur $<n$ et chaque élément est bornée par $n$. Vérifier si la somme égale à $S$ se fait en $O(n log S)$ (la somme de deux éléments plus petit que $S$ est en $log S$). 2. On le fait par programmation dynamique sur un tableau de booléens $T[i][j]$ de dimension $n times S$. On pose $T[i][k] = exists? I subset.eq {1,..,i}, sum a_i = k$ On pose $T[0][a_1] = T[0][0] = top$ et $T[0][k] = bot$ partout ailleurs et on a $ T[i+1][k] = cases(T[i][k] &"si " a_(i+1)> k,T[i][k] or T[i][k-a_(i+1)] &"sinon") $ On peut voir le lien avec Floyd-Warshall: on fait une programmation dynamique sur les entiers (sommets) que l'on rajoute petit à petit. ]) === NP-Complétude On cherche à prouver que le problème précédent est NP-Complet. Pour cela on considère une généralisation du problème sur des $k$-uplets : #problem([SUBSETSUM-VECT],[$k,n in NN$ et $a_1, a_2, ..., a_n, S in NN^k$],[ Est-ce qu'il existe $I subset.eq [n]$ tel que\ $ sum_(i in I) a_i = S $ ]) #question(1)[ Montrer que le problème *SUBSETSUM-VECT* est NP-Complet ] #question(2)[ Montrer que on peut réduire le problème *SUBSETSUM-VECT* au problème *SUBSETSUM* ] #question(2)[ Montrer que *SUBSETSUM-VECT* est NP-Complet. On pourra poser $k$ le nombre de clauses d'une formule $phi$ sous forme normale conjonctive ] #question(1)[ Vu que on a en question 2 un algorithme en $O(n S)$, est-ce que l'on peut en conclure que $"P" = "NP"$ ? ] === Un problème analogue On introduit le problème suivant : #problem([SUBSUM-SPACE],$a_1, a_2, ..., a_n, k in NN$,[ Est-ce qu'il existe $b_1,...,b_k in NN$ tel que\ $ forall i <= n, a_i in { sum_(j=1)^k beta_j b_j | beta_1, ..., beta_k in {0,1} } $ ]) #question(1)[Montrer que le problème est NP] #question(4)[Montrer que le problème est NP-Complet]
https://github.com/hakureiR-eimu/my-typst-documents
https://raw.githubusercontent.com/hakureiR-eimu/my-typst-documents/template/myTemplate.typ
typst
MIT License
#let heiti = ("Times New Roman", "Heiti SC", "Heiti TC", "SimHei") #let songti = ("Times New Roman", "Songti SC", "Songti TC", "SimSun") #let zhongsong = ("Times New Roman","STZhongsong", "SimSun") #let bib_cite(..names) = { for name in names.pos() { cite(name) } } #let indent() = { box(width: 2em) } #let indent_par(body) = { box(width: 1.8em) body } #let equation_num(_) = { locate(loc => { let chapt = counter(heading).at(loc).at(0) let c = counter("equation-chapter" + str(chapt)) let n = c.at(loc).at(0) "(" + str(chapt) + "-" + str(n + 1) + ")" }) } #let table_num(_) = { locate(loc => { let chapt = counter(heading).at(loc).at(0) let c = counter("table-chapter" + str(chapt)) let n = c.at(loc).at(0) str(chapt) + "-" + str(n + 1) }) } #let image_num(_) = { locate(loc => { let chapt = counter(heading).at(loc).at(0) let c = counter("image-chapter" + str(chapt)) let n = c.at(loc).at(0) str(chapt) + "-" + str(n + 1) }) } #let equation(equation, caption: "") = { figure( equation, caption: caption, supplement: [公式], numbering: equation_num, kind: "equation", ) } #let tbl(tbl, caption: "") = { figure( tbl, caption: caption, supplement: [表], numbering: table_num, kind: "table", ) } #let img(img, caption: "") = { figure( img, caption: caption, supplement: [图], numbering: image_num, kind: "image", ) } #let empty_par() = { v(-1em) box() } // inspired from https://github.com/lucifer1004/pkuthss-typst.git #let chinese_outline() = { align(center)[ #text(font: heiti, size: 18pt, "目  录") ] set text(font: songti, size: 12pt) // 临时取消目录的首行缩进 set par(leading: 1.24em, first-line-indent: 0pt) locate(loc => { let elements = query(heading.where(outlined: true), loc) for el in elements { // 计算机学院要求不出现三级以上标题 if el.level > 2 { continue } // 是否有 el 位于前面,前面的目录中用拉丁数字,后面的用阿拉伯数字 let before_toc = query(heading.where(outlined: true).before(loc), loc).find((one) => {one.body == el.body}) != none let page_num = if before_toc { numbering("I", counter(page).at(el.location()).first()) } else { counter(page).at(el.location()).first() } link(el.location())[#{ // acknoledgement has no numbering let chapt_num = if el.numbering != none { numbering(el.numbering, ..counter(heading).at(el.location())) } else {none} if el.level == 1 { set text(weight: "black") if chapt_num == none {} else { chapt_num "  " } el.body } else { chapt_num " " el.body } }] // 填充 ...... box(width: 1fr, h(0.5em) + box(width: 1fr, repeat[.]) + h(0.5em)) [#page_num] linebreak() } }) } // 原创性声明和授权书 #let declaration(anonymous: false) = { set text(font: songti, 12pt) v(5em) align(center)[ #text(font: heiti, size: 18pt)[ 学位论文原创性声明 ] ] text(font: songti, size: 12pt)[ #set par(justify: false, leading: 1.24em, first-line-indent: 2em) 本人郑重声明:所呈交的论文是本人在导师的指导下独立进行研究所取得的 研究成果。除了文中特别加以标注引用的内容外,本论文不包括任何其他个人或集体已经发表或撰写的成果作品。本人完全意识到本声明的法律后果由本人承担。 ] v(2em) align(right)[ #if not anonymous { text("作者签名:       年  月  日") } else { text("作者签名:██████████年███月███日") } ] v(6em) align(center)[ #text(font: heiti, size: 18pt)[ 学位论文版权使用授权书 ] ] text(font: songti, size: 12pt)[ #set par(justify: false, leading: 1.24em, first-line-indent: 2em) #if not anonymous [ 本学位论文作者完全了解学校有关保障、使用学位论文的规定,同意学校保留并向有关学位论文管理部门或机构送交论文的复印件和电子版,允许论文被查阅和借阅。本人授权省级优秀学士论文评选机构将本学位论文的全部或部分内容编入有关数据进行检索,可以采用影印、缩印或扫描等复制手段保存和汇编本学位论文。 ] else [ 本学位论文作者完全了解学校有关保障、使用学位论文的规定,同意学校保留并向有关学位论文管理部门或机构送交论文的复印件和电子版,允许论文被查阅和借阅。本人授权█████████████将本学位论文的全部或部分内容编入有关数据进行检索,可以采用影印、缩印或扫描等复制手段保存和汇编本学位论文。 ] 学位论文属于 1、保密 □,在#h(3em)年解密后适用本授权书。 #h(6.3em) 2、不保密 □ #h(6.3em)请在以上相应方框内打 “√” ] v(3em) align(right)[ #if not anonymous { text("作者签名:       年  月  日") } else { text("作者签名:██████████年███月███日") } ] align(right)[ #if not anonymous { text("导师签名:       年  月  日") } else { text("导师签名:██████████年███月███日") } ] } // 参考文献 #let references(path) = { // 这个取消目录里的 numbering set heading(level: 1, numbering: none) set par(justify: false, leading: 1.24em, first-line-indent: 2em) bibliography(path, title:"参考文献", style: "./hust-cse-ug.csl") } // 致谢,请手动调用 #let acknowledgement(body) = { // 这个取消目录里的 numbering set heading(level: 1, numbering: none) show <_thx>: { // 这个取消展示时的 numbering set heading(level: 1, numbering: none) set align(center) set text(weight: "bold", font: heiti, size: 18pt) "致  谢" } + empty_par() [= 致谢 <_thx>] body } // 中文摘要 #let zh_abstract_page(abstract, keywords: ()) = { set heading(level: 1, numbering: none) show <_zh_abstract_>: { align(center)[ #text(font: heiti, size: 18pt, "摘  要") ] } [= 摘要 <_zh_abstract_>] set text(font: songti, size: 12pt) abstract par(first-line-indent: 0em)[ #text(weight: "bold", font: heiti, size: 12pt)[ 关键词: #keywords.join(";") ] ] } // 英文摘要 #let en_abstract_page(abstract, keywords: ()) = { set heading(level: 1, numbering: none) show <_en_abstract_>: { align(center)[ #text(font: heiti, size: 18pt, "Abstract") ] } [= Abstract <_en_abstract_>] set text(font: songti, size: 12pt) abstract par(first-line-indent: 0em)[ #text(weight: "bold", font: heiti, size: 12pt)[ Key Words: #keywords.join("; ") ] ] } #let project( anonymous: false, // 是否匿名化处理 title: "", school: "", author: "", id: "", mentor: "", class: "", date: (1926, 8, 17), body, ) = { // 引用的时候,图表公式等的 numbering 会有错误,所以用引用 element 手动查 show ref: it => { if it.element != none and it.element.func() == figure { let el = it.element let loc = el.location() let chapt = counter(heading).at(loc).at(0) // 自动跳转 link(loc)[#if el.kind == "image" or el.kind == "table" { // 每章有独立的计数器 let num = counter(el.kind + "-chapter" + str(chapt)).at(loc).at(0) + 1 it.element.supplement " " str(chapt) "-" str(num) } else if el.kind == "equation" { // 公式有 '(' ')' let num = counter(el.kind + "-chapter" + str(chapt)).at(loc).at(0) + 1 it.element.supplement " (" str(chapt) "-" str(num) ")" } else { it } ] } else { it } } // 图表公式的排版 show figure: it => { set align(center) if it.kind == "image" { set text(font: heiti, size: 12pt) it.body it.supplement " " + it.counter.display(it.numbering) " " + it.caption.body locate(loc => { let chapt = counter(heading).at(loc).at(0) let c = counter("image-chapter" + str(chapt)) c.step() }) } else if it.kind == "table" { set text(font: heiti, size: 12pt) it.supplement " " + it.counter.display(it.numbering) " " + it.caption.body set text(font: songti, size: 10.5pt) it.body locate(loc => { let chapt = counter(heading).at(loc).at(0) let c = counter("table-chapter" + str(chapt)) c.step() }) } else if it.kind == "equation" { // 通过大比例来达到中间和靠右的排布 grid( columns: (20fr, 1fr), it.body, align(center + horizon, it.counter.display(it.numbering) ) ) locate(loc => { let chapt = counter(heading).at(loc).at(0) let c = counter("equation-chapter" + str(chapt)) c.step() }) } else { it } } set page(paper: "a4", margin: ( top: 2.5cm, bottom: 2.5cm, left: 3cm, right: 3cm )) // 封面 align(center)[ // hust logo #v(20pt) // 匿名化处理需要去掉个人、机构信息 #let logo_path = if not anonymous { "./assets/hust.png" } else { "./assets/black.png" } #image(logo_path, width: 55%, height: 7%) #v(40pt) #text( size: 36pt, font: zhongsong, weight: "bold" )[本科生毕业设计(论文)] #v(40pt) #text( font: heiti, size: 22pt, )[ #title ] #v(80pt) #let info_value(body) = { rect( width: 100%, inset: 2pt, stroke: ( bottom: 1pt + black ), text( font: zhongsong, size: 16pt, bottom-edge: "descender" )[ #body ] ) } #let info_key(body) = { rect(width: 100%, inset: 2pt, stroke: none, text( font: zhongsong, size: 16pt, body )) } #grid( columns: (70pt, 180pt), rows: (40pt, 40pt), gutter: 3pt, info_key("院  系"), info_value(if not anonymous { school } else { "██████████" }), info_key("专业班级"), info_value(if not anonymous { class } else { "██████████" }), info_key("姓  名"), info_value(if not anonymous { author } else { "██████████" }), info_key("学  号"), info_value(if not anonymous { id } else { "██████████" }), info_key("指导教师"), info_value(if not anonymous { mentor } else { "██████████" }), ) #v(30pt) #text( font: zhongsong, size: 16pt, )[ #date.at(0) 年 #date.at(1) 月 #date.at(2) 日 ] ] counter(page).update(0) // 页眉 set page( header: { set text(font: songti, 10pt, baseline: 8pt, spacing: 3pt) set align(center) if not anonymous { [114514 毕 业 设 计 (论 文)] } else { [█████████████████████████] } line(length: 100%, stroke: 0.7pt) } ) // 页脚 // 封面不算页数 set page( footer: { set align(center) grid( columns: (5fr, 1fr, 5fr), line(length: 100%, stroke: 0.7pt), text(font: songti, 10pt, baseline: -3pt, counter(page).display("I") ), line(length: 100%, stroke: 0.7pt) ) } ) set text(font: songti, 12pt) set par(justify: true, leading: 1.24em, first-line-indent: 2em) show par: set block(spacing: 1.24em) set heading(numbering: (..nums) => { nums.pos().map(str).join(".") + " " }) show heading.where(level: 1): it => { set align(center) set text(weight: "bold", font: heiti, size: 18pt) set block(spacing: 1.5em) it } show heading.where(level: 2): it => { set text(weight: "bold", font: heiti, size: 14pt) set block(above: 1.5em, below: 1.5em) it } // 首段不缩进,手动加上 box show heading: it => { set text(weight: "bold", font: heiti, size: 12pt) set block(above: 1.5em, below: 1.5em) it } + empty_par() pagebreak() counter(page).update(1) // 目录 chinese_outline() // 正文的页脚 set page( footer: { set align(center) grid( columns: (5fr, 1fr, 5fr), line(length: 100%, stroke: 0.7pt), text(font: songti, 10pt, baseline: -3pt, counter(page).display("1") ), line(length: 100%, stroke: 0.7pt) ) } ) counter(page).update(1) // 代码块(TODO: 加入行数) show raw: it => { set text(font: songti, 12pt) set block(inset: 5pt, fill: rgb(217, 217, 217, 1), width: 100%) it } body } // 三线表 #let tlt_header(content) = { set align(center) rect( width: 100%, stroke: (bottom: 1pt), [#content], ) } #let tlt_cell(content) = { set align(center) rect( width: 100%, stroke: none, [#content] ) } #let tlt_row(r) = { (..r.map(tlt_cell).flatten()) } #let three_line_table(values) = { rect( stroke: (bottom: 1pt, top: 1pt), inset: 0pt, outset: 0pt, grid( columns: (auto), rows: (auto), // table title grid( columns: values.at(0).len(), ..values.at(0).map(tlt_header).flatten() ), grid( columns: values.at(0).len(), ..values.slice(1).map(tlt_row).flatten() ), ) ) }
https://github.com/teshu0/CLIT-report-typst
https://raw.githubusercontent.com/teshu0/CLIT-report-typst/main/activity.typ
typst
Creative Commons Zero v1.0 Universal
#import "template/utils.typ": title, author, date #import "template/reset.typ": reset #show: reset #title("活動内容報告 #99") #author("2400123, 電通太郎") #date() // 中を空にすると自動的に今日の日付になる = 報告課題(時間中にやったこと) ( ここに授業時間中にやったことを簡単にまとめてください。 ) = アンケート == Q1: ( ここにQ1の回答を記入 ) == Q2: ( ここにQ2の回答を記入 ) == Q3: ( ここにQ3の回答を記入 )