repo
stringlengths 26
115
| file
stringlengths 54
212
| language
stringclasses 2
values | license
stringclasses 16
values | content
stringlengths 19
1.07M
|
---|---|---|---|---|
https://github.com/simon-isler/zhaw-summaries | https://raw.githubusercontent.com/simon-isler/zhaw-summaries/main/S6%20-%20FS2024/MOBA2/main.typ | typst | #import "../../Template/template.typ": *
#show: project.with(
title: "Mobile Apps 2",
authors: (
"<NAME>",
),
)
#include "./sections/1-react.typ" |
|
https://github.com/TomVer99/FHICT-typst-template | https://raw.githubusercontent.com/TomVer99/FHICT-typst-template/main/examples/showcase/showcase.typ | typst | MIT License | #import "./../../template/fhict-template.typ": *
#import "./terms.typ": term-list
#show: fhict-doc.with(
title: "Showcase",
subtitle: "FHICT Template Showcase",
authors: (
(
name: "TomVer99",
),
),
// language: "nl",
// chapter-on-new-page: true,
available-languages: ("en", "nl", "de", "fr", "es"),
version-history: (
(
version: "1.0",
date: "2023-11-04",
author: [TomVer99],
changes: "Initial version",
),
),
table-of-figures: false,
// table-of-listings: true,
// table-of-tables: true,
// print-extra-white-page: true,
bibliography-file: bibliography("my-sources.bib"),
glossary-terms: term-list,
secondary-organisation-logo: read("Company.svg"),
secondary-organisation-color: rgb("184471"),
secondary-organisation-logo-height: 3.5cm,
enable-index: true,
)
= Examples
== Headings Example
#index-main[Headings]
=== level 3
==== level 4
===== level 5
====== level 6
Level 6 and higher are not supported in the custom syntax. They will still work, but will have the default styling.
== Text example
#index[Text]
#lorem(100)
== Figures example
#index[Figures]
=== Image
#index[Image embed]
This is a figure with an image. This will be shown in the list of figures if enabled.
#figure(
image(
"./../../template/assets/fontys-for-society.png",
width: 50%,
),
caption: "Figure example",
)
#pagebreak()
=== Code
#index[Code embed]
This is a figure with code. This will be shown in the list of listings if enabled.
#figure(
```cpp
int main() {
return 0;
}
```,
caption: "Code example C++",
)
#figure(
```rust
fn main() {
println!("Hello, world!");
}
```,
caption: "Code example Rust",
)
== Citations
#index[Citations]
This is a citation #cite(<FHICT-typst-template>).
== Tables
#index[Tables]
#figure(
ftable(
columns: (auto, auto, auto, 1fr),
[Header 1],
[Header 2],
[Header 3],
[Header 4],
[Cell 1],
[Cell 2],
[Cell 3],
[Cell 4],
[Cell 1],
[Cell 2],
[],
[Cell 4],
),
caption: "Table example",
)
The table above is made with the following code:
#figure(
```typst
#ftable(
columns: (auto, auto, auto, 1fr),
[Header 1],[Header 2],[Header 3],[Header 4],
[Cell 1],[Cell 2],[Cell 3],[Cell 4],
[Cell 1],[Cell 2],[],[Cell 4],
)
```,
caption: "Table example",
)
== Using Terms
#index[Terms]
Using terms for the first time will show the term with a 'long' variant if it is set. If it is not set, the 'short' variant will be shown. If the term is used again, only the 'short' variant will be shown.
`@wip` will be shown as @wip. But the second time you use it, it will be shown as @wip.
#pagebreak()
== Links
#index[Links]
```typst
#hlink("www.google.com", content: "Google")
#hlink("www.google.com")
```
#hlink("www.google.com", content: "Google")
#hlink("www.google.com")
= Cover Page
#index[Cover Page]
The cover page in this document has a 'secondary organisation' added to it.
#pagebreak()
= Lorem
#index[Lorem]
== Lorem 1
`Lorem` is a placeholder text commonly used to demonstrate the visual form of a document or a font without relying on meaningful content.
#lorem(20)
#ftable(
columns: (auto, auto, auto, 1fr),
[Header 1],
[Header 2],
[Header 3],
[Header 4],
[Cell 1],
[Cell 2],
[Cell 3],
[Cell 4],
[Cell 1],
[Cell 2],
[],
[Cell 4],
)
```typst
#ftable(
columns: (auto, auto, auto, 1fr),
[Header 1],[Header 2],[Header 3],[Header 4],
[Cell 1],[Cell 2],[Cell 3],[Cell 4],
[Cell 1],[Cell 2],[],[Cell 4],
)
```
== Lorem 2
=== Lorem 3
$ a = (A_c + A_m dot sin(2 pi omega_m t)) dot sin(2 pi omega_c t) $
$ x(t) = 4 / pi sum_(k=1)^infinity sin((2 k - 1) 2 pi f t) / (2 k - 1) $
=== Lorem 4
#figure(
image(
"./../../template/assets/fontys-for-society.png",
width: 50%,
),
caption: "Figure example",
)
```typst
#figure(
image(
"./../../template/assets/fontys-for-society.png",
width: 50%,
),
caption: "Figure example",
)
```
|
https://github.com/jneug/schule-typst | https://raw.githubusercontent.com/jneug/schule-typst/main/src/util/marks.typ | typst | MIT License | // ================================
// = Hidden labels and states =
// ================================
//
// Place hidden metadata and marks in the document.
#import "args.typ": if-auto
/// Opens an environment to be used in contextual functions.
/// - name (str): Name of the environment.
/// -> content
#let env-open(name) = state("schule.env." + name).update(true)
/// Opens an environment.
/// - name (str): Name of the environment.
/// -> content
#let env-close(name) = state("schule.env." + name).update(false)
/// Checks if this command is called within a previously opened environment.
/// ```typst
/// context {
/// #marks.in-env("appendix") // false
/// #marks.env-open("appendix")
/// #marks.in-env("appendix") // true
/// #marks.env-close("appendix")
/// }
/// ```
/// #hint[Needs to be called in a `context`.]
/// #propert(context: true)
/// - name (str): Name of the environment.
/// -> content
#let in-env(name) = state("schule.env." + name).get() not in (none, false)
/// Places hidden meta-data in the document that can be queried via @cmd[query].
/// - label (label): Label to attach to the metadata.
/// - data (any): Arbitrary data to optionally place with the metadata.
/// -> content
#let place-meta(label, data: auto) = [#metadata(if-auto(data, () => str(label)))#label]
/// Places a hidden figure in the document, that can be referenced via the
/// usual `@label-name` syntax.
/// - label (label): Label to reference.
/// - kind (str): Kind for the reference to properly step counters.
/// - supplement (str): Supplement to show when referencing.
/// - numbering (str): Numbering schema to use.
/// -> content
#let place-reference(
label,
kind,
supplement,
numbering: "1",
) = place()[#figure(
kind: kind,
supplement: supplement,
numbering: numbering,
[],
)#label]
/// Iterates over all items with a certain label found in the document (usually @cmd[metadata]).
/// #hint[Needs to be called in a `context`.]
/// #property(context: true)
/// - label (label): Label to query for.
/// - do (function): Function to execute on each item.
/// - before (bool): Set to true to only look for items before this call.
/// -> content
#let foreach(label, do: (it, loc) => none, before: false) = {
let items
if before {
items = query(selector(label).before(here()))
} else {
items = query(label)
}
for it in items {
if type(it) == metadata {
do(it.value, it.location())
} else {
do(it, none)
}
}
}
#let get-page(target) = {
let loc = query(target)
if loc != () {
loc.first().location().page()
} else {
none
}
}
|
https://github.com/Myriad-Dreamin/typst.ts | https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/fuzzers/corpora/meta/numbering_01.typ | typst | Apache License 2.0 |
#import "/contrib/templates/std-tests/preset.typ": *
#show: test-page
#for i in range(0, 4) {
numbering("A", i)
[ for #i \ ]
}
... \
#for i in range(26, 30) {
numbering("A", i)
[ for #i \ ]
}
... \
#for i in range(702, 706) {
numbering("A", i)
[ for #i \ ]
}
|
https://github.com/the-JS-hater/typst_cv | https://raw.githubusercontent.com/the-JS-hater/typst_cv/main/cv_2024VT.typ | typst | #grid(
columns: (7fr, 3fr),
gutter: 6pt,
[
#grid(
columns: (1fr, 3fr),
gutter: 3pt,
image("CV_pfp.jpg", width: 65%),
align(left + bottom)[
= <NAME>
Student
]
)
= Utbildning
== Civilingenjör i Mjukvaruteknik, Linköpings Universitet
#text(gray)[Augusti 2021 - Nuvarande]
En datateknologisk civilingenjörsutbildning med fokus på mjukvaruutveckling. Programmet går ut på att utöver programmering i sig, utveckla kompetenser kring allmän problemlösning och utvecklingsmetodiker.
= Arbetslivserfarenheter
Jag har under min studietid jobbat deltid som kursassistent i ett flertal kurser, vilket har gett mig möjlighet att vidareutveckla min problemlösningsförmåga, samt förmåga att kommunicera kring programmering. Mina arbetsuppgifter har varit att handleda labbpass och rätta inlämningar och projekt.
== Objektorienterad programmering och Java (TDDE30)
#text(gray)[Januari 2024 - Maj 2024]
En introduktionskurs till OOP i Java som ges för Datateknik-programmet, som bestod av en labbserie samt ett större projekt.
== Funktionell och imperativ programmering, del 1 & 2 (TDDE23/24)
#text(gray)[Augusti 2023 - Januari 2024]
En introduktionskurs i Python vars mål är att introducera studenterna till grundläggande programmeringskoncept, ur perspektiven av de imperativa och funktionella paradigmerna. Utöver att handleda labbpass och rätta inlämningar inkluderade mina arbetsuppgifter att hålla seminarier där jag presenterade och gick igenom olika programmerginskoncept och uppgifter kopplade till kursen.
== Projekt: Mobila och sociala applikationer (TDDD80)
#text(gray)[Januari 2023 - Maj 2023]
En projektkurs där de studerande ska genomföra ett projekt som går ut på att bygga en mobilapplikation för Android, med tillhörande _backend_. Kursen består av en labbserie parallellt med projektet. Kursens material berör SQLAlchemy, PostreSQL, Python-Flask, och Android Java.
],
[
= Kontaktuppgifter
- Email: <EMAIL>
- Telefon: +3584573445201
- Address: Björnkärrsgatan 9 B.22
= Kompetens
- Python
- Java
- C/C++
- SQL
- Rust
- Svenska
- Engelska
= Övriga erfarenheter
== Lagerarbete i livsmedelsbutik
#text(gray)[Juni 2020 - Augusti 2022]
Sommarjobbade på färskavdelningen på Sparhallen, Mariehamn.
== Säsongsarbete på jordbruk
#text(gray)[Juni 2016 - Augusti 2019]
Sommarjobbade med grönsaksodlarna Anki & Leif Kalm, Jomala.
== Ideéllt engagerad vid HG
#text(gray)[Februari 2024 - Nuvarande]
Jobbar ideéllt på kårhuset _Ryds Herrgård_, med servitris-, köks- & bararbete.
= Länkar
- #link("https://www.linkedin.com/in/morgan-nordberg-31457522b/")[
LinkedIn: \
]
#link("https://www.linkedin.com/in/morgan-nordberg-31457522b/")
- #link("https://github.com/the-JS-hater")[
Github: \
]
#link("https://github.com/the-JS-hater")
],
)
|
|
https://github.com/7sDream/fonts-and-layout-zhCN | https://raw.githubusercontent.com/7sDream/fonts-and-layout-zhCN/master/chapters/05-features/shaping/shaping.typ | typst | Other | #import "/template/template.typ": web-page-template
#import "/template/components.typ": note
#import "/lib/glossary.typ": tr
#show: web-page-template
// ## How OpenType shaping works
== 文本#tr[shaping]的工作流程
// While we could now carry on describing the syntax of the feature file language and giving examples of OpenType rules, this would not necessarily help us to transfer our knowledge to new situations - especially when we are dealing with scripts which have more complicated requirements and expectations. To get that transferable knowledge, we need to have a deeper understanding of what we're doing and how it is being processed by the computer.
我们已经知道特性文件中描述OpenType规则的语法了,但只知道这些并不足以让我们高效地利用现有的知识,尤其是在处理具有复杂排版需求的#tr[script]系统时。为了让这些关于文本的知识可以转化为通用的字体设计能力,我们需要深入了解计算机在整个处理过程中到底进行了哪些操作。
// So we will now pause our experiments with substitution rules, and before we get into other kinds of rule, we need to step back to look at how the process of OpenType shaping works. As we know, *shaping* is the application of the rules and features within a font to a piece of text. Let's break it down into stages.
让我们暂停对#tr[substitution]规则的实验,也先不去了解其他类型的规则。暂且后退一步,先来看看OpenType文本#tr[shaping]的整体工作原理。我们已经知道,#tr[shaping]就是将字体中规则和特性应用到一段文本上的过程。这个过程可以分为以下几个阶段。
|
https://github.com/Enter-tainer/typstyle | https://raw.githubusercontent.com/Enter-tainer/typstyle/master/docs/architecture.md | markdown | Apache License 2.0 | ## High Level Overview
Typstyle is a code formatter, the input is a string of typst source code and the output is a string of formatted typst source code.
To format the code, there are certain main steps that are followed:
1. **Parsing**: The input code is parsed into an Abstract Syntax Tree (AST) using the `typst-syntax` package. If the input code is erroneous, the code will not be formatted and following steps will be skipped.
2. **Attach Attributes**: The AST is traversed and certain attributes are attached to the nodes. Like some nodes should be skipped from formatting, some nodes should be formatted in a special way, etc.
3. **Formatting**: The AST is traversed and the AST is transformed into a Wadler-style pretty-print-tree. This tree is then converted into a string of formatted code.
4. **Post Processing**: The formatted code is post-processed to remove any trailing whitespaces, etc.
5. **Output**: The formatted code is returned as the output.
The main work happens in step 2 and 3. We will discuss these steps in detail in the following sections.
<!-- ## Attach Attributes
## Formatting -->
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/supercharged-dhbw/1.3.0/template/appendix.typ | typst | Apache License 2.0 | // level 1 headings are not allowed in the appendix
#let appendix = [
#lorem(100)
] |
https://github.com/AU-Master-Thesis/thesis | https://raw.githubusercontent.com/AU-Master-Thesis/thesis/main/sections/2-background/rrt.typ | typst | MIT License | #import "../../lib/mod.typ": *
== Rapidly Exploring Random Trees <s.b.rrt>
#acr("RRT") is a sampling-based path planning algorithm, introduced by <NAME> in 1998@original-rrt. The algorithm incrementally builds a tree of nodes, each node a specific step length, $s$, from the last. The tree is built by randomly sampling a point in the configuration space, and then extending the tree towards that point with $s$. See the entire algorithm in @alg-rrt@original-rrt@sampling-based-survey. @ex.rrt goes through a contextual example of the algorithm.
// What about `inline` code? Also shown in @lst.python-rrt.
// #listing(caption: [Pseudo-code for the regular RRT algorithm.])[
// ```python
// def rrt(start, goal, s, N, g_tolerance):
// V = {start}
// E = set()
// for i in range(N):
// x_random = SampleRandomPoint()
// x_nearest = NearestNeighbor(G=(V, E), x_random)
// x_new = Steer(s, x_nearest, x_random)
// if CollisionFree(x_nearest, x_new):
// V = V.union({x_new})
// E = E.union({(x_nearest, x_new)})
// else:
// continue
// if WithinGoalTolerance(g_tolerance, x_new, goal) and CollisionFree(x_new, goal):
// V = V.union({goal})
// E = E.union({(x_new, goal)})
// break
// return G = (V, E)
// ```
// ]<lst.python-rrt>
#let func(content) = text(theme.mauve, content)
#algorithm(
[
#show regex("(SampleRandomPoint|NearestNeighbor|Steer|CollisionFree|WithinGoalTolerance)"): set text(theme.mauve, font: "JetBrainsMono NF", size: 0.85em)
#let ind() = h(2em)
*Input:* $x_"start", x_"goal", s, N, g_"tolerance"$ \ \
$V #la {x_"start"}$ \
$E #la emptyset$ \ \
*for* $i = 1, dots, N$ *do* \
#ind()$x_"random" #la "SampleRandomPoint"()$ \
#ind()$x_"nearest" #la "NearestNeighbor"(G = (V, E), x_"random")$ \
#ind()$x_"new" #la "Steer"(s, x_"nearest", x_"random")$ \ \
#ind()*if* $"CollisionFree"(x_"nearest", x_"new")$ *then* \
#ind()#ind()$V #la V union x_"new"$ \
#ind()#ind()$E #la E union {(x_"nearest", x_"new")}$ \
#ind()*else* \
#ind()#ind()*continue* \
#ind()*end* \
#ind()*if* $"WithinGoalTolerance"(g_"tolerance", x_"new", x_"goal")$ \
#ind()#h(1em)$and "CollisionFree"(x_"new", x_"goal")$ *then* \
#ind()#ind()$V #la V union x_"goal"$ \
#ind()#ind()$E #la E union {(x_"new", x_"goal")}$ \
#ind()#ind()*break* \
#ind()*end* \
*end* \ \
*Output:* $G = (V, E)$
],
caption: [The RRT Algorithm]
)<alg-rrt>
// In @alg-rrt the following functions are used:
#[
// #show regex("(SampleRandomPoint|NearestNeighbor|Steer|CollisionFree|WithinGoalTolerance)"): set text(theme.mauve, font: "JetBrainsMono NF", size: 0.85em)
#set list(marker: text(theme.mauve, sym.diamond.filled))
#set par(first-line-indent: 0em)
=== RRT Functions
This section provides a mathematical description of the functions used in the RRT algorithm; functions `SampleRandomPoint`, `NearestNeighbor`, `Steer`, `CollisionFree`, and `WithinGoalTolerance`. As in @alg-rrt, the #acr("RRT") tree consists of vertices, $V$, and edges, $E$; together composing a graph, $G = (V, E)$. These denotations are used in the following descriptions.@erc-rrt-star
// \ #text(size: 1.25em, weight: "bold", [`SampleRandomPoint()`]) \
// ==== `SampleRandomPoint() -> x`
==== #fsig[SampleRandomPoint() -> x]
This functions takes no arguments, and returns a random point, $x$, in the configuration space. Most commonly this is done by drawing from a uniform distribution. Say that $omega$ is an element in the set of all possible states in the configuration space $Omega$, where $forall omega in Omega$ equation @eq.func-sample-random-point holds.@erc-rrt-star
#algeq[
$
"SampleRandomPoint" : omega arrow.r.bar {"SampleRandomPoint"_i (omega)}_(i in NN_0) subset cal(X)
$<eq.func-sample-random-point>
]
That is; the set of all randomly sampled points, $cal(X)_"rand"$, which is the result of the above mapping, is a subset of the configuration space, $cal(X)$.
// #jens[finish this one, it does not make too much sense]
// \ #text(size: 1.25em, weight: "bold", [`NearestNeighbor(G, x)`]) \
// ==== `NearestNeighbor(G, x) -> v`
==== #fsig[NearestNeighbor(G, x) -> v]
Finds the nearest node $v in V subset cal(X)$ in the tree to a given point. Takes in the graph, $G = (V, E)$, and a point, $x in cal(X)$, see @eq.func-nearest-neighbor. This notion could be further specified with a distance metric, such as the Euclidean distance, as seen in @eq.func-nearest-neighbor-euclidean, which return the node $v in V$ that minimizes the distance, $norm(x - v)$, between the new point $x$ and an existing node $v$.@erc-rrt-star
#algeq[
$
"NearestNeighbor" : (G, x) arrow.r.bar v in V
$<eq.func-nearest-neighbor>
$
"NearestNeighbor"(G = (V, E), x) = "argmin"_(v in V) norm(x - v)
$<eq.func-nearest-neighbor-euclidean>
]
// \ #text(size: 1.25em, weight: "bold", [`Steer(x, y)`]) \
// ==== `Steer(x, y, s) -> v`
==== #fsig[Steer(x, y, s) -> v]
Creates a new node at a specific distance from the nearest node towards a given point. Takes in two points $x, y in cal(X)$, and a step length $s in RR^+$,. The new node $v$ is created by moving $s$ distance from $x$ towards $y$. This way equation @eq.func-steer returns a point $v in cal(X)$ such that $v$ is closer to $y$ than $x$, which will either be $s$ closer, or if the randomly sampled point $y$ is within $s$ distance from $x$ to begin with, $v$ will be at $y$. As such the inequality $norm(z - y) >= s$ holds.@erc-rrt-star
#algeq[
$
"Steer" : (x, y, s) arrow.r.bar v in cal(X)
$<eq.func-steer>
]
// \ #text(size: 1.25em, weight: "bold", [`CollisionFree(x, y)`]) \
// ==== `CollisionFree(x, y) -> p`
==== #fsig[CollisionFree(x, y) -> p]
Checks if the path between two nodes is collision-free. Takes in two points $x, y in cal(X)$, and returns a boolean, $p in {top, bot}$. The returned values, $p$, says something about whether the addition of node $y in cal(X)$ into the #acr("RRT") tree is valid, given a proposed edge to the node $x in V$. Typically the validity notion depends on whether the path from $x$ to $y$ is collision-free, hence the function's name, but could include any other arbitrary constraints.@erc-rrt-star
// \ #text(size: 1.25em, weight: "bold", [`WithinGoalTolerance(tolerance, x, goal)`]) \
// ==== `WithinGoalTolerance(t, x, g) -> p`
==== #fsig[WithinGoalTolerance(t, x, g) -> p]
Checks if a node is within the goal tolerance distance from the goal. As such the the functions takes in the distance tolerance $t$, a node $x in V$, and the goal state $g in cal(X)$. The function returns a boolean, $p in {top, bot}$, that tells us whether $x$ is within a euclidean distance $t$ from the goal state $g$. See @eq.func-goal-tolerance for the mathematical representation.@erc-rrt-star
#algeq[
$
"WithinGoalTolerance" : (t, v, g) arrow.r.bar p in {top, bot}
$<eq.func-goal-tolerance>
]
]
// #jens[
// Consider making this a table?
// ]
#example(caption: [Contextual RRT Application])[
#set par(first-line-indent: 0em)
*Scenario:* Let us look at an example, where the possible state space is _two-dimensional euclidean space_. A Robot wants to go from 2D position $x_A$ to $x_B$ \ \
*Input:* In @alg-rrt, the input is outlined to be a starting position, $x_"start"$, a goal position $x_"goal"$, a step length $s$, a maximum number of iterations, $N$, and lastly, a goal tolerance, $g_"tolerance"$. \ \
*Output:* At the end of algorithm execution, the resulting graph is outputted as the combination of; $V$, the set of vertices, and $E$, the set of edges. \ \
*Execution:*
+ $V$ is initialized to contain the initial position of the robot $x_"start" = x_A$, thus the set ${x_A}$. $E$ is initialized to be empty.
+ Enter a for loop, that will maximally run $N$ times, but will break early if the goal is reached.
*Each iteration:*
+ A random point, $x_"random"$, is sampled from the configuration space, by calling the sampling function $"SampleRandomPoint"()$.
+ The nearest existing node in the tree, $x_"nearest"$, is found by $"NearestNeighbor"(G = (V, E), x_"random")$.
+ Thereafter, a new node, $x_"new"$, is created by making a new node $s$ distance from $x_"nearest"$ towards $x_"random"$ in the call to $"Steer"(s, x_"nearest", x_"random")$.
*Checks:*
+ Only if the path from $x_"nearest"$ to $x_"new"$ is collision-free, the new node is added to the tree. Otherwise, continue to the next iteration.
+ If the node is added to the tree, and it is within $g_"tolerance"$ distance from $x_"goal"$, and the path from $x_"new"$ to $x_"goal"$ is collision-free, the goal is added to the tree, and the loop is broken.
]<ex.rrt>
== Optimal Rapidly Exploring Random Trees <s.b.rrt-star>
// FROM @erc-rrt-star
// In the year 2011, <NAME> and <NAME> in their paper Sampling-based Algorithms for Optimal Motion Planning, introduced three new path planning algorithms that improved upon the existing algorithms. These were, namely, optimal rapidly exploring random trees (RRT*), optimal probabilistic road mapping (PRM*), and rapidly exploring random graphs (RRG).
// The most popular algorithm among these is the RRT* algorithm, that is heavily based on the RRT algorithm, and has some improvisions, and provides a more optimal solution.
// Let’s now look at the RRT* algorithm that was originally proposed in the paper along with the pseudo code. All the mathematical notations and functions in the paper are clearly explained here.
// The following image shows the RRT* algorithm applied on a 2D graph.
// Intuition HEADING
// The node sampling and selection process is exactly the same as RRT, wherein a point is randomly generated and a node is created at that point or at a specified maximum distance from the existing node, whichever is closer.
// However, the difference is where the connection is made. We assign every node a cost function that denotes the length of the shortest path from the start node. We then search for nodes inside a circle of given radius r centered at the newly sampled point.
// We then rearrange the connections such that they minimize the cost function and optimize the path. This can rearrange the graph in such a way that we get the shortest path.
// Courtesy: Joon’s lectures
// In the image above, after rearranging the connections, the path to the green points, i.e.,
// is shorter through the red connections than through the earlier connections.
#acr("RRT*") is an extension of the #acr("RRT") algorithm, which was introduced in 2011 by <NAME> and <NAME> in their paper _Sampling-based Algorithms for Optimal Motion Planning_@sampling-based-survey. With only a couple of modifications to #acr("RRT"), the algorithm is able to reach asymptotic optimality, where the original algorithm makes no such promises. The modifications are explained in listing #boxed(color: theme.mauve, [*M-X*]).
#algorithm(
caption: [The RRT\* Algorithm],
)[
// #set text(size: 0.85em)
#show regex("(MinCostConnection|Rewire|Sample|Nearest|Steer|ObstacleFree|Neighbourhood|Cost|Line|CollisionFree|Parent|WithinGoalTolerance)"): set text(theme.mauve, font: "JetBrainsMono NF", size: 0.85em)
#let ind() = h(2em)
*Input:* $x_"start", x_"goal", s, N, g_"tolerance"$ \ \
$V = {x_"start"}$ \
$E = emptyset$ \ \
*for* $i = 1, dots, n$ *do* \
#ind()$x_"rand" #la "Sample"()$ \
#ind()$x_"nearest" #la "Nearest"(V, E, x_"rand")$ \
#ind()$x_"new" #la "Steer"(x_"nearest", x_"rand")$ \ \
#ind()*if* $"ObstacleFree"(x_"nearest", x_"new")$ *then* \
#ind()#ind()$V_"near" #la "Neighbourhood"(V, E, x_"new", r)$ \
#ind()#ind()$V #la V union {x_"new"}$ \
#ind()#ind()$c_"nearest" #la "cost"(x_"nearest") + c("Line"(x_"nearest", x_"new"))$ \ \
#ind()#ind()$x_"min" #la "MinCostConnection"(V_"near", x_"new", x_"nearest", c_"nearest")$ \
#ind()#ind()$E #la E union {[x_"min", x_"new"]}$ \ \
#ind()#ind()$"Rewire"(V_"near", x_"new")$ \
#ind()*end* \
// Check for connection to goal like RRT
#ind()*if* $"WithinGoalTolerance"(g_"tolerance", x_"new", x_"goal")$ \
#ind()#h(1em)$and "CollisionFree"(x_"new", x_"goal")$ *then* \
#ind()#ind()$V #la V union {x_"goal"}$ \
#ind()#ind()$E #la E union {[x_"new", x_"goal"]}$ \
#ind()#ind()*break* \
#ind()*end* \
*end* \ \
*Output:* $G = (V, E)$
]<alg.rrt-star>
#set enum(numbering: box-enum.with(prefix: "M-", color: theme.mauve))
+ _*Cost Function:*_ The first modification is the introduction of a _cost function_, $c(v)$, for each node, $v in V$. The cost function outputs the length of the shortest path from the start node to the node $v$. This modification encodes an optimizable metric for each branch, which enables the next modification, #boxed(color: theme.mauve, [*M-2*]), to take place.
+ _*Rewiring:*_ The second modification is the introduction of a _neighbourhood radius_, $r in RR^+$, around each newly created node, which is used to search for nodes that can be reached with a lower cost.
As such every time a new node is created, there is a possibility that other nodes within that radius, will have a lower cost if they were to be connected to the new node. Thus, comparing the nodes' old cost, and the cost they would have in case we connect them to the newly created node, determines whether to rewire or not.
With the modifications made, the #acr("RRT*") algorithm is shown in @alg.rrt-star@erc-rrt-star. Two important blocks of the algorithm has been sectioned out in sub-algorithms #numref(<alg.rrt-star.min-cost-connection>) and #numref(<alg.rrt-star.rewire>), which are described along side the other new functions of #acr("RRT*") under @s.b.rrt-star.functions. The main parts of the algorithm are visualized in @f.rrt-rewire as three steps:
#set enum(numbering: box-enum.with(prefix: "Step ", color: theme.mauve))
+ A new point has been sampled, deemed collision-free, and thus node $v_"new"$ can be added to the tree. But first, we need to find which existing node to connect to. Here, $v_"nearest"$ is chosen by the `MinCostConnection` algorithm, as it is the node that minimizes the total cost from the root to $v_"new"$, within the step-length radius $s$.
+ In preparation, rewiring candidates will be found, by looking at all nodes in the tree, that are within a certain requiring radius, $r$, from $v_"new"$. This is done by the `Neighbourhood` function, which returns the set $V_"near" = {n_1, n_2, dots, n_n}$.
#[
#show regex("(MinCostConnection|Rewire|Sample|Nearest|Steer|ObstacleFree|Neighbourhood|Cost|Line|CollisionFree|Parent|WithinGoalTolerance)"): set text(theme.mauve, font: "JetBrainsMono NF", size: 0.85em)
+ This step is where the rewiring takes place. By looking at the nodes in $V_"near"$, we can compute each node's cost, $c_"new"$ with equation @eq.rrt-star-cost
$
"Cost"(v_"new") + c("Line"(n_i, v_"new"))
$<eq.rrt-star-cost>
as if $v_"new"$ were its parent. Denote the costs $C_"near" = {c_1, c_2, dots, c_n}$. Now for each node $n_i in V_"near"$, check if $c_i < c_"new"$, and if so, rewire the connection to make $v_"new"$ the parent of $n_i$.
]
#figure(
{
v(1em)
image("../../figures/out/rrt-rewire.svg", width: 100%)
},
caption: [The #acr("RRT*") algorithm drawn out in 3 steps. Firstly, a new node is sampled and added to the tree, where the cost is lowest, looking in a radius of $s$#swatch(theme.mauve.lighten(40%)). Then nodes within a neighbourhood $r$#swatch(theme.lavender.lighten(25%)), are then rewired if their cost would be lower by doing so.],
)<f.rrt-rewire>
=== RRT\* Functions <s.b.rrt-star.functions>
Here the functions used in the #acr("RRT*") algorithm are described in @alg.rrt-star. Functions from base-#acr("RRT") are not repeated here, as no change is made to them. The new functions are; `MinCostConnection`, `Rewire`, `Cost`, `Neighbourhood`, `Parent`, and `Line`.
// ==== `MinCostConnection(V_near, x_new, x_init, c_init) -> v` <s.b.rrt-star.min-cost-connection>
==== #fsig[MinCostConnection(V_near, v_new, v_init, c_init) -> v] <s.b.rrt-star.min-cost-connection>
#let body = [This function is a main part of the #acr("RRT*") modification, as it attached the new node $v_"new"$, not to the node nearest to the randomly sampled point in $cal(X)$, but to the node that minimizes the cost from the root to $v_"new"$. This happens by looking at all nodes in a neighbourhood $V_"near"$ of radius $r$ from $v_"new"$, and then finding the node that minimizes the cost. To begin with the initial node $v_"init"$ and its cost $c_"init"$ is passed to the function as the initial comparison point. The initial comparison point is typically the nearest node in the tree, that would have been the parent for $v_"new"$ in #acr("RRT"). The function's operation is described in @alg.rrt-star.min-cost-connection.]
#let alg = [
#algorithm(
caption: [Min Cost Connection],
[
#set par(justify: false)
#set text(size: 0.85em)
#show regex("(MinCostConnection|Rewire|Sample|Nearest|Steer|ObstacleFree|Neighbourhood|Cost|Line|CollisionFree|Parent|WithinGoalTolerance)"): set text(theme.mauve, font: "JetBrainsMono NF", size: 0.85em)
#let ind() = h(2em)
*Input:* $V_"near", x_"new", x_"nearest", c_"nearest"$ \ \
$x_"min" #la x_"nearest"$ \
$c_"min" #la c_"nearest"$ \ \
*for* $x_"near" in X_"near"$ *do* \
#ind()$c_"near" #la "Cost"(x_"near") + c("Line"(x_"near", x_"new"))$ \
#ind()*if* $"CollisionFree"(x_"near", x_"new")$ \
#ind()#ind()#ind()$and c_"near" < c_"min"$ *then* \
#ind()#ind()$x_"min" #la x_"near"$ \
#ind()#ind()$c_"min" #la "Cost"(x_"near") + c("Line"(x_"near", x_"new"))$ \
#ind()*end* \
*end* \ \
*Output:* $x_"min"$
]
)<alg.rrt-star.min-cost-connection>
]
#grid(
columns: (90mm, 1fr),
gutter: 1em,
alg,
body,
)
// ==== `Rewire(V_near, x_new)` <s.b.rrt-star.rewire>
==== #fsig[Rewire(V_near, v_new)] <s.b.rrt-star.rewire>
#let body = [The rewiring function is the second part of the #acr("RRT*") optimization steps, which changes previously established connections in the tree. The function uses the neighbourhood $V_"near"$ of nodes in radius $r$ around $v_"new"$. For each $n_i in V_"near"$, if the cost of $n_i$ with $v_"new"$ as parent is lower than the previously established cost for $n_i$, the tree is rewired. The function is described in @alg.rrt-star.rewire.]
// #pagebreak(weak: true)
#let alg = [
#algorithm(
caption: [Rewiring],
[
#show regex("(MinCostConnection|Rewire|Sample|Nearest|Steer|ObstacleFree|Neighbourhood|Cost|Line|CollisionFree|Parent|WithinGoalTolerance)"): set text(theme.mauve, font: "JetBrainsMono NF", size: 0.85em)
#set text(size: 0.85em)
#let ind() = h(2em)
*Input:* $V_"near", x_"new"$ \ \
*for* $x_"near" in V_"near"$ *do* \
#ind()$c_"near" #la "Cost"(x_"new") + c("Line"(x_"new", x_"near"))$ \
#ind()*if* $"CollisionFree"(x_"new", x_"near") and c_"near" < "Cost"(x_"near")$ *then* \
#ind()#ind()$x_"parent" #la "Parent"(x_"near")$ \
#ind()#ind()$E #la E \\ {[x_"parent", x_"near"]}$ \
#ind()#ind()$E #la E union {[x_"new", x_"near"]}$ \
#ind()*end* \
*end* \ \
*Output:* None
]
)<alg.rrt-star.rewire>
]
#grid(
columns: (1fr, 105mm),
gutter: 1em,
body,
alg,
)
// ==== `Cost(v) -> c` <s.b.rrt-star.cost>
==== #fsig[Cost(v) -> c] <s.b.rrt-star.cost>
// By whatever means, returns the cost of a node, $v in V$. That is; the length of the line segments if one were to walk from $v$ and all the way back to the root node.
This function is used in @alg.rrt-star, #numref(<alg.rrt-star.min-cost-connection>), and #numref(<alg.rrt-star.rewire>) to access the cost $c$ of a node $v in V$. Typically the cost is a distance, and as such; the sum of the Euclidean distances between all nodes if one were to walk all the way back to the root node from $v$. Thus a mapping from a node $v in V$ to a cost $c in RR^+$ as shown in @eq.func-cost.
#algeq[
$
"Cost" : v arrow.r.bar c in RR^+
$<eq.func-cost>
]
// ==== `Neighbourhood(V, E, x, r) -> V_near` <s.b.rrt-star.neighbourhood>
==== #fsig[Neighbourhood(V, E, x, r) -> V_near] <s.b.rrt-star.neighbourhood>
A more complex function, which returns a the set of all nodes in $V$ that are within a radius $r$ from a potential new node $x in cal(X)$. If the configuration space is $cal(X) = RR^2$, then the neighbourhood $V_"near"$ is a subset of $V$ such that $forall v in V_"near"$, $norm(v - x) <= r$. This mapping is described in @eq.func-neighbourhood.
#algeq[
$
"Neighbourhood" : (V, E, x, r) arrow.r.bar V_"near" subset V
$<eq.func-neighbourhood>
]
// ==== `Parent(v) -> p` <s.b.rrt-star.parent>
==== #fsig[Parent(v) -> p] <s.b.rrt-star.parent>
Semantically denotes access to the parent node $p in V$ of a node $v in V$, see @eq.func-parent.
#algeq[
$
"Parent" : v arrow.r.bar p in V
$<eq.func-parent>
]
// ==== `Line(x, y) -> l` <s.b.rrt-star.line>
==== #fsig[Line(x, y) -> l] <s.b.rrt-star.line>
#{
show regex("(MinCostConnection|Rewire|Sample|Nearest|Steer|ObstacleFree|Neighbourhood|Cost|Line|CollisionFree|Parent|WithinGoalTolerance)"): set text(theme.mauve, font: "JetBrainsMono NF", size: 0.85em)
[
Denotes the idea of the finding the line segment that is between two nodes $x, y in cal(X)$. This line segment expresses the relationship between $x$ and $y$ in the configuration space $cal(X)$. It can be used, as seen in algorithms #numref(<alg.rrt-star>), #numref(<alg.rrt-star.min-cost-connection>) and #numref(<alg.rrt-star.rewire>), to calculate the cost that this line segment provides. This is done by the function $c("Line"(x, y))$, which, in case of a Euclidean configuration space, and thus cost would express a mapping from two points $x, y in cal(X)$ to a distance $l in RR^+$, see @eq.func-line.
$
"Line" : (x, y) arrow.r.bar l in RR^+ \
c("Line"(x, y)) = norm(x - y)
$<eq.func-line>
]
}
|
https://github.com/soul667/typst | https://raw.githubusercontent.com/soul667/typst/main/PPT/MATLAB/touying/docs/blog/2024-01-17-touying-0-2-0.md | markdown | ---
slug: touying-0-2-0
title: Touying 0.2.0
authors: orangex4
tags: [development]
---
Touying 0.2.0 is done.
## Implemented Features
- [x] **Object-oriented programming:** Singleton `s`, binding methods `utils.methods(s)` and `(self: obj, ..) => {..}` methods.
- [x] **Page arguments management:** Instead of using `#set page(..)`, you should use `self.page-args` to retrieve or set page parameters, thereby avoiding unnecessary creation of new pages.
- [x] **`#pause` for sequence content:** You can use #pause at the outermost level of a slide, including inline and list.
- [x] **`#pause` for layout functions:** You can use the `composer` parameter to add yourself layout function like `utils.side-by-side`, and simply use multiple pos parameters like `#slide[..][..]`.
- [x] **`#meanwhile` for synchronous display:** Provide a `#meanwhile` for resetting subslides counter.
- [x] **`#pause` and `#meanwhile` for math equation:** Provide a `#touying-equation("x + y pause + z")` for math equation animations.
- [x] **Slides:** Create simple slides using standard headings.
- [x] **Callback-style `uncover`, `only` and `alternatives`:** Based on the concise syntax provided by Polylux, allow precise control of the timing for displaying content.
- You should manually control the number of subslides using the `repeat` parameter.
- [x] **Transparent cover:** Enable transparent cover using oop syntax like `#let s = (s.methods.enable-transparent-cover)(self: s)`.
- [x] **Handout mode:** enable handout mode by `#let s = (s.methods.enable-handout-mode)(self: s)`.
- [x] **Fit-to-width and fit-to-height:** Fit-to-width for title in header and fit-to-height for image.
- `utils.fit-to-width(grow: true, shrink: true, width, body)`
- `utils.fit-to-height(width: none, prescale-width: none, grow: true, shrink: true, height, body)`
- [x] **Slides counter:** `states.slide-counter.display() + " / " + states.last-slide-number` and `states.touying-progress(ratio => ..)`.
- [x] **Appendix:** Freeze the `last-slide-number` to prevent the slide number from increasing further.
- [x] **Sections:** Touying's built-in section support can be used to display the current section title and show progress.
- [x] `section` and `subsection` parameter in `#slide` to register a new section or subsection.
- [x] `states.current-section-title` to get the current section.
- [x] `states.touying-outline` or `s.methods.touying-outline` to display a outline of sections.
- [x] `states.touying-final-sections(sections => ..)` for custom outline display.
- [x] `states.touying-progress-with-sections((current-sections: .., final-sections: .., current-slide-number: .., last-slide-number: ..) => ..)` for powerful progress display.
- [x] **Navigation bar**: Navigation bar like [here](https://github.com/zbowang/BeamerTheme) by `states.touying-progress-with-sections(..)`, in `dewdrop` theme.
- [x] **Pdfpc:** pdfpc support and export `.pdfpc` file without external tool by `typst query` command simply.
|
|
https://github.com/Gekkio/gb-ctr | https://raw.githubusercontent.com/Gekkio/gb-ctr/main/chapter/console/clocks.typ | typst | Creative Commons Attribution Share Alike 4.0 International | #import "../../common.typ": *
#import "../../timing.typ"
== Clocks
=== System clock
The system oscillator is the primary clock source in a Game Boy system, and it generates the *system clock*. Almost all other clocks are derived from the system clock using prescalers / clock dividers, but there are some exceptions:
- If a Game Boy is set up to do a serial transfer in secondary mode, the serial data register is directly clocked using the serial clock signal coming from the link port. Two Game Boys connected with a link cable never have precisely the same clock phase and frequency relative to each other, so the serial clock of the primary side has no direct relation to the system clock of the secondary side.
- The inserted game cartridge may use use other clock(s) internally. A typical example in some official games is the Real Time Clock (RTC), which is based on a 32.768 kHz oscillator and a clock-domain crossing circuit so that RTC data can be read using the cartridge bus while the RTC circuit is ticking independently using its own clock.
The Game Boy SoC uses two pins for the system oscillator: XI and XO. These pins along with some external components can be used form a Pierce oscillator circuit. Alternatively, the XI pin can be driven directly with a clock signal originating from somewhere else, and the XO pin can be left unconnected.
==== System clock frequency
In DMG and MGB consoles the system oscillator circuit uses an external quartz crystal with a nominal frequency of *4.194304 MHz* (= $2^22$ MHz = 4 MiHz) to form a Pierce oscillator circuit. This frequency is considered to be the standard frequency of a Game Boy.
In SGB the system oscillator input is directly driven by the ICD2 chip on the SGB cartridge. The clock is derived via /5 division of the main SNES / SFC clock, which has a different frequency depending on the console region (21.447 MHz NTSC, 21.281 MHz PAL). The SNES / SFC clock does not divide into 4.194304 MHz with integer division, so the clock seen by the SGB SoC is not the same as in DMG and MGB consoles. The frequency is higher, so everything is sped up by a small amount and audio has a slightly higher pitch.
In SGB2, just like SGB, the system oscillator input is driven by the ICD2 chip, but instead of using the SNES / SFC clock, the ICD2 chip is driven by a Pierce oscillator circuit with a 20.971520 MHz crystal. ICD2 then divides this frequency by /5 to obtain the final frequency seen by the SGB2 SoC, which is 4.194304 MHz that matches the standard DMG / MGB frequency.
=== Clock periods, T-cycles, and M-cycles
In digital logic, a clock switches between low and high states and every transition happens on a _clock edge_, which might be a rising edge (low → high transition) or a falling edge (high → low transition). A single _clock period_ is measured between two edges of the same type, so that the clock goes through two opposing edges and returns to its original state after the clock period. The typical convention is that a clock period consists of a rising edge and a falling edge.
In addition to the system clock and other clocks derived from it, Game Boy systems also use _inverted clocks_ in some peripherals, which means the rising edge of an inverted clock may happen at the same time as a falling edge of the original clock. @example-clock-periods shows two clock periods of the system clock and an inverted clock derived from it, and how they are out of phase due to clock inversion.
#figure({
import timing: diagram, clock as c, data as d, either as e, high as h, low as l, unknown as u, undefined as x, high_impedance as z
set text(20pt)
diagram(
w_scale: 2.0,
y_scale: 2.0,
(label: "CLK 4 MiHz", wave: (
l(1),
..range(5).map((_) => c(1)),
)),
(label: "Inverted 4 MiHz", wave: (
h(1),
..range(5).map((_) => c(1)),
)),
fg: () => {
let rising_color = olive
let falling_color = blue
import cetz.draw
let label(content) = align(center, text(10pt, content))
draw.on-layer(-1, {
draw.line((2, 7), (2, 8), stroke: (dash: "dashed"))
draw.line((6, 7), (6, 8), stroke: (dash: "dashed"))
draw.line((10, 7), (10, 8), stroke: (dash: "dashed"))
draw.content((4, 7.5), label[period])
draw.content((8, 7.5), label[period])
draw.line((4, -1), (4, -3), stroke: (dash: "dashed"))
draw.line((8, -1), (8, -3), stroke: (dash: "dashed"))
draw.line((12, -1), (12, -3), stroke: (dash: "dashed"))
draw.content((6, -2), label[also\ a period])
draw.content((10, -2), label[also\ a period])
})
}
)},
caption: "Example clock periods"
) <example-clock-periods>
#figure({
import timing: diagram, clock as c, data as d, either as e, high as h, low as l, unknown as u, undefined as x, high_impedance as z
set text(20pt)
diagram(
w_scale: 2.0,
y_scale: 2.0,
(label: "CLK 4 MiHz", wave: (
l(1),
c(1, label: "T1"),
c(1, label: "T1"),
c(1, label: "T2"),
c(1, label: "T2"),
c(1, label: "T3"),
c(1, label: "T3"),
c(1, label: "T4"),
c(1, label: "T4"),
c(1),
)),
(label: "PHI 1 MiHz", wave: (
l(1),
c(4),
c(4),
c(1),
)),
fg: () => {
let rising_color = olive
let falling_color = blue
import cetz.draw
draw.on-layer(-1, {
for x in range(1, 9, step: 2) {
draw.set-style(stroke: (paint: rising_color.transparentize(50%), thickness: 0.1em))
draw.line((x * 2, -1), (x * 2, 7))
}
for x in range(2, 9, step: 2) {
draw.set-style(stroke: (paint: falling_color.transparentize(50%), thickness: 0.1em))
draw.line((x * 2, -1), (x * 2, 7))
}
})
let rising(content) = text(10pt, fill: rising_color, weight: "bold", content)
let falling(content) = text(10pt, fill: falling_color, weight: "bold", content)
let y = 8
draw.content((1 * 2, y), rising[T1R])
draw.content((2 * 2, y), falling[T1F])
draw.content((3 * 2, y), rising[T2R])
draw.content((4 * 2, y), falling[T2F])
draw.content((5 * 2, y), rising[T3R])
draw.content((6 * 2, y), falling[T3F])
draw.content((7 * 2, y), rising[T4R])
draw.content((8 * 2, y), falling[T4F])
}
)},
caption: "Clock edges in a machine cycle"
) <reference-m-cycle>
|
https://github.com/polarkac/MTG-Stories | https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/049%20-%20The%20Brothers'%20War/004_Chapter%201%3A%20Stronghold.typ | typst | #import "@local/mtgstory:0.2.0": conf
#show: doc => conf(
"Chapter 1: Stronghold",
set_name: "The Brothers' War",
story_date: datetime(day: 21, month: 10, year: 2022),
author: "<NAME>",
doc
)
#emph[There are times when destiny calls forth a people and demands an action. ] Now#emph[ is the time. ] We#emph[ are the people. ] This#emph[ is our action.]
— Eladamri, Lord of Leaves
Teferi never thought he'd walk any halls erected by Urza again, let alone ones his former mentor strode through as a mortal man. Four thousand years was a long time for any edifice to remain standing, let alone one so integral to a continent-destroying war. But there Teferi was all the same, climbing the spiraling steps of the tower where, millennia before, Urza engineered mechanical constructs to fight a bitter struggle against his brother, Mishra.
The tower itself was in pristine shape, if desolate. Stone and metal were meticulously fitted together with no seams or cracks, as if the tower had been willed into existence rather than put together by hands. Legend had it that Urza built this tower to be his personal workshop away from the horrors of the Brothers' War, and it showed in the care he'd poured to its golden accents, its orniary, its assembly armatures—the kind of care he'd never been able to extend to the people in his life.
The mystery of the tower was how it had evaded ransacking through the centuries. There had been no signs of raiders establishing camps, no evidence of opportunistic wizards setting up their laboratories. Structurally, it would have been perfect for either. Teferi could have been convinced that it was the tower's well-concealed location in a mist-shrouded vale that ensured the tower's survival. But he knew better. It was dumb luck—the same luck that all of Urza's machinations seemed to rely on (and succeed with). Reckless luck. Dangerous luck. The kind that promised only the extremes of success or failure with pain attached to each.
Teferi reached the top of the stairs, where he paused to catch his breath and clutch his midsection, the wounds he'd received in New Argive still sore. Yes, he could have used his magic to levitate up, but Subira had always impressed upon him the serenity that came with the meditation of one step~ after one step~ after one. How he wished Subira were there to cut through his tightly wound knot of concerns.
#emph[Get out of your head] , she would always tell him. #emph[Look with your eyes.]
Entering his quarters, Teferi noted the new, if sparse, furnishings of a table, chair, and cot that had been added in the week or so since he was gone planeswalking across the Multiverse. Then he ventured out onto the mist-shrouded battlement high above the secluded vale. Breathing in the cold Argivian air, he gazed over the edge and imagined phalanxes of metal warriors lined up underneath this very battlement, their thunderous heels pounding the ground.
Teferi turned away from the battlement to see a squat construct resembling an upright brass caterpillar waddle through the door, tenuously balancing a cup filled with a steaming, green-hued liquid and small plate of tea biscuits. Teferi could only watch confused as the poor thing made its way across the room. He assumed that the construct was Saheeli's work, but he wasn't sure how to react to it.
"This can't be for me," he said, hoping that it understood him. "I just arrived."
"That's my doing." Standing in the doorway was Jodah, as handsome and vibrant as ever. No matter how miserable the occasion, he always managed to look every bit the part of Dominaria's most acclaimed archmage. His robes were spotless, and his smile shone as bright as the Null Moon on a clear night. "I cast an intruder alarm on the tower, so I knew you were back. I hope you don't mind me taking liberties with your breakfast."
"No. You're very kind," said Teferi, biting into one of the tea biscuits. Hmm. What was that strange flavor? Not sweet, but salty, with an odd tang and a sand-like grit between his teeth. "What is this?"
"That is the traditional biscuit of the Kjeldoran people," Jodah said. "I gave my recipe to Saheeli, and she whipped up one of her constructs to bake them. She's quite brilliant." He narrowed his eyes. "Why do you ask?"
"Is it supposed to taste like that?"
Jodah picked up a biscuit and scrutinized it. "I've eaten thousands of these, so I don't see what could be wrong." Then he took a bite. "Huh. You're right. It's a bit off. Maybe the roach flour's gone stale."
"Roach flour," Teferi repeated, glaring at Jodah.
"Mmm-hmm," said Jodah, popping the rest of the biscuit into his mouth.
Teferi gingerly dropped his onto the plate. "How are things holding up here?"
"As well as can be expected. You'll have to ask Saheeli how she's faring on her project, but I can tell you that we have our privacy. For now, at least."
That was good to hear. A handful of weeks had passed since the attack on the Mana Rig, leaving Teferi with few allies and fewer resources. Karn had been taken by the Phyrexians, the sylex destroyed, and Ajani revealed to be a sleeper agent. To thwart any more prying eyes, Jodah insisted on finding a new base, a sentiment Teferi agreed with. What he didn't necessarily agree with was relocating to Urza's Tower. But what was done was done.
"I brought Wrenn," said Teferi. "Did you happen to see her?"
"Yes. We met on my way up here. Not exactly the friendliest sort."
"Let her warm up to you. You'll find her wisdom without peer."
"Speaking of peers, two more Planeswalkers arrived soon after you did."
Teferi raised an eyebrow. For the last month, he'd been feverishly chasing down allies throughout the Multiverse, but most of the long-standing Planeswalker contacts he managed to find laughed at his entreaties. Teferi had more luck with the younger Planeswalkers who'd fought alongside him against <NAME>: <NAME> was the most vital recruit due to her skill with artifacts. Kaya was instrumental for her keen strategic mind and her network of informants across distant planes. And Wrenn he'd sought out for her ability to see through his own foolish affectations. He had not expected anyone to come looking for him.
"Kaya seemed to know them," said Jodah, shrugging his shoulders. "That and the fact that this place hasn't exploded yet means that they're probably friendly."
"I guess we should see what they want." Teferi took a sip of tea. The hints of lemon and honey tempered his anxiety, allowing him to address something he'd been dreading since they'd left Shiv. "Before we go, I wanted to ask a favor."
"This sounds ominous."
"Once Saheeli's time machine is ready, I'll be indisposed," Teferi explained. "I'll need—all of us will need—someone to lead and make the right decisions." He placed a hand on Jodah's shoulder. "I'd like you to consider being that person."
"Consider?" Jodah asked with a grin. "You're not just going to order me to do it?"
Teferi shook his head. "I've learned that it's better to ask."
"Only took you sixty years." Jodah placed his hand on Teferi's. "I'll try not to burn the place down."
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
"Where is Ajani?"
The expression on Elspeth Tirel's face when Teferi answered was one he'd seen before. It was at Tolarian Academy, on a visit to Barrin, his old headmaster—not so much a visit as an act of contrition. Teferi remembered the venerable mage standing up from his desk, his face sunken, jaw quivering, the tempest inside stayed only by propriety. The name dividing them was #emph[Rayne] —Barrin's dead wife, killed in a war Teferi spearheaded, a war he bore responsibility for.
The name was different this day—Ajani's instead of Rayne's. But that exact same look that Barrin had worn~ It cut into Teferi like an old, rusty dagger in Elspeth's calloused grip.
"I'm sorry," Teferi repeated, but the words felt hollow. "I wish things were different—"
Before he could say anything more, Elspeth raised her hand to stop him. She crossed her arms and stepped into the far corner of the room, her back toward everyone else. Teferi began to follow, but Jodah seized his arm, keeping him in place.
"Let her be," he said, then to Kaya: "Go ahead."
Everyone who had gathered in Kaya's makeshift war room, save Elspeth, huddled around a glowing sphere of ghostly light that hovered over a pool in the room's center. At the head of the crowd was the other freshly arrived Planeswalker, <NAME>, who Teferi had met on Ravnica during their struggle against <NAME>.
#figure(image("004_Chapter 1: Stronghold/01.jpg", width: 100%), caption: [Art by: <NAME>], supplement: none, numbering: none)
"The New Coalition continues to marshal defenses against further attacks on Dominaria," Kaya explained. "But the Phyrexians are relentless, as we found out on Shiv."
Teferi could barely pay attention. His eye strayed again and again to Elspeth alone in the corner. Karn had told him tales of her courage in saving him from the Phyrexians. And then there was Ajani pulling him aside to excitedly tell him how he'd found Elspeth alive, despite her seeming demise on Theros. #emph[The champion we need] , Ajani had said. #emph[One that we can trust.]
Was Ajani still himself then? Could Elspeth be trusted?
"Kaldheim and Ixalan are mobilized," Kaya said, walking around the sphere to highlight key points and locations in the Gatewatch's plan. "Jace is on Ravnica lobbying for support from the guilds, while Chandra has gone to Zendikar to contact Nissa. When we've finished our tasks here, Saheeli intends to spearhead the defense of Kaladesh." Kaya stopped at one last point on the sphere. "Then we have the Mirran camp on New Phyrexia itself, led by Koth."
Elspeth turned, her eyes lit. "Koth is alive. How did you find this out?"
"Jace," Kaya said. "I don't know his source, but he told me that Koth and the Mirrans are planning an assault on the Phyrexian core. We plan on joining them once we're ready. Then together, we'll eliminate the Phyrexian leadership and mop up what's left. Without a head, the body will fail."
"That's a death sentence," Elspeth said. "Koth knows that better than anyone."
Teferi began to explain. "If you'll just listen to our plan—"
"No. You listen to me. Koth and I once tried to do exactly what you're proposing, and it ended in failure." Her eyes darted from person to person before finally settling on Teferi. That look. "None of you were there." With that, she stormed out of the room.
The quiet afterward was unsettling. Teferi knew that no matter what strategy they used, the odds would not favor any outside force trying to penetrate New Phyrexia's defenses. Everyone else in the room understood that, too. But hearing it said out loud in as direct a fashion as Elspeth had done crystallized how dire the stakes really were.
"Let me talk to her," Jodah said, a hand on Teferi's back. "The responsibilities of leadership, eh?" He walked out the door to track Elspeth down.
Vivien extended her hand to Teferi, greeting him with the grip of a seasoned hunter. "Good to see you again Teferi, though under circumstances just as unfortunate as last time."
"You and Elspeth are welcome to stay and rest if you need it."
Vivien shook her head. "I'm going to Ravnica to make sure Jace is apprised of the situation. Then I'll head to Ikoria. I'm worried about being able to organize a meaningful defense there. All the settlements do is squabble, so I'm going to make sure they stay in line."
"And Elspeth?"
"With this news of Ajani," said Vivien, "I think it would do her good to be among friends."
"#emph[If ] she considers us friends."
"She's been through much. Give her time."
#emph[Time.] A luxury Teferi knew they had precious little of.
"As for the task at hand, that's why we're here," she said. "I have intelligence from a Phyrexian insider—Urabrask, the praetor of the Quiet Furnace. Kaya's information seems to align with what we've been told, but it is incomplete."
Teferi felt the frown lines on his face deepen. The Phyrexians he'd been familiar with were militaristic fanatics who did not tolerate dissent. The notion of these New Phyrexians having factions among them seemed anathema to their very nature.
"What did Urabrask say?"
"That <NAME>, a rival praetor, has nearly unified New Phyrexia under her banner," said Vivien. "Urabrask and his forces stand opposed to Norn's aspirations. He's planning his own revolution and is communicating with the Mirrans."
"Whatever the case, we'll be on the ground there soon," Teferi insisted.
"Not soon enough," said Vivien. "Urabrask didn't give specifics, but he was concerned about some way that <NAME> may have to force her 'one singularity' throughout New Phyrexia and, more importantly, the Multiverse. She plans to expand her dominion all at once."
"Gods and monsters," Kaya whispered.
"What is it?" asked Teferi.
"The creature I told you about—the one I was hired to kill," she said as she began pacing the room. "I'd never encountered anything like it—a beast of stitched flesh over metal. After you and I talked, we both agreed it was a Phyrexian. But I couldn't figure out why, of all places, it was on Kaldheim. With Vivien's information~" Kaya activated the expression pool, shaping it with the force of her mind. The result was a three-dimensional rendering of a many-limbed tree, crowned at the top like the great magnigoths populating the woodlands of Yavimaya, only constantly undulating as if made from the effluvia of the Multiverse itself. "This is the World Tree of Kaldheim. It's like a network that allows instantaneous travel between all the realms of the plane. What if~"
Kaya didn't have to finish. If the Phyrexians had somehow replicated or repurposed Kaldheim's World Tree, it could conceivably join every plane in the Multiverse. With it in place, the Phyrexians could be anywhere, at any time, at the speed of thought. They didn't just have to worry about secret infiltrations like on Dominaria; the Phyrexians could march their armies in directly.
"By now, whatever Urabrask has planned is only days away from happening. If we're going to make a move, it must happen very soon."
"Days?" said Teferi. He didn't think he had such a small window to work with.
"I don't like it," said Kaya. "The timing of this proposed alliance is too perfect."
At a gut level, Teferi agreed. He'd played this game before and learned that the enemy of my enemy could be the #emph[worser] enemy by far. All it took to fall into the trap was a tasty scrap, an irresistible temptation. The truth was the choicest morsel of all to snatch a victim.
"I have my suspicions as well," Vivien said. "But I don't believe we have many alternatives other than taking Urabrask at his word. I'll vouch for him and for the intermediary who arranged for my audience with him."
"And who is that?" asked Teferi.
"Tezzeret."
"No," Kaya declared. "No way. You know what he did on Ravnica! And he still has access to the Planar Bridge!" She turned to Teferi. "If we trust Tezzeret, we could fall straight into the Phyrexians' trap. Again."
Once more, Kaya spoke the truth. And yet, how foolish would it be to cast aside a possible advantage over their opponents? A revolution from within could split the battlefield and nullify the upper hand the Phyrexians had on their home plane.
"See what you can learn through your contacts," Teferi said to Kaya. "Right now, we have to concentrate on what we're doing here."
Kaya stood down. "Fine. I'll shake the money tree on Ravnica. We'll see what gets knocked loose."
"I wish you both luck," said Vivien, turning to leave. "For all our sakes."
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
If any part of Urza's Tower still bore the marks of its creator, it was the eastern wing. At the height of the Brothers' War, witnessing the machine arms assemble one of Urza's constructs would have seemed like a miracle to the Korlisians or Argivians who'd only waged war with pike, sword, and blood.
Now those machine arms lay in heaps on the floor, joining the spare parts and scrap Urza left behind when he abandoned his tower for Argoth. And amid this heap sat Saheeli Rai cross legged on the floor. Saheeli had been one of Teferi's first recruits. Like Vivien and Kaya, he'd first met her on Ravnica, but she was a far cry from their forbidding dispositions. Saheeli instead embraced the visceral joy in seeking the art in artifice, as exemplified by the beautiful spun-gold peafowl that hopped up high and glided back down to the floor.
"Hello!" greeted Saheeli, enjoying a small cup of tea to go along with more of those biscuits Jodah had served to him earlier. She slid over and patted the floor next to her. "Care to join me?"
"No thank you," he said. "I wanted to quickly check in. How are things going?"
"They are~ going."
Teferi looked around at the circle of workbenches Saheeli had pulled up upon the main assembly platform, her own private little workspace in the middle of the wreck that used to be Urza's grand showroom. Atop one of those tables was an item that caught his eye—an exquisite piece of art, a bowl made of copper pulled, twisted, and shaped like no human hands ever could. But it was so much more than that.
"You did it," Teferi said as he approached the bowl. "It's perfect."
"I wouldn't use that word," said Saheeli.
But Teferi wanted to. Saheeli had crafted a perfect replica of the sylex: a blend of the signature elements of the original relic with subtle nods to Saheeli's characteristic filigree stylings. Her version featured the same heavy handles on both sides, the same shallow bas-relief depictions of farmers armed with scythes facing down a troop of armored knights. Identical runes—a master translation between several ancient languages—spiraled downward from the inner edges of the bowl to the very bottom.
"It's one thing to create something you've seen and held before," she said. "It's another to do so from notes that may not be complete."
"I trust you did your best."
"I hope my best is good enough."
"How is the new time machine?" he said, moving onto the other major goal he'd assigned to her.
"Temporal Anchor," Saheeli reminded him. "Things are progressing, as you can see." She motioned to the other side of the platform, where her machine sat. Teferi still remembered Urza's time machine—an eyesore of glass cylinders and snaking tubes that took up half a classroom in Tolarian Academy. Saheeli's, in contrast, was a sculpture of swooping curves made of fiery orange metal. It, like the sylex, would have fit into any gallery of fine art.
"You're too modest. It's far less painful-looking than your initial version."
"I'll accept that compliment," Saheeli said with a laugh. "Still, there have been a few snags. Aether is a much easier energy source to manipulate; even the most powerful aether engine is like a candle flame to a supernova when it comes to this."
She placed her hand on the pedestal where the #emph[Weatherlight] 's powerstone sat enmeshed in a nest of copper coils. Teferi grimaced. The mere memory of seeing the mighty airship, a symbol of strength for all Dominaria, twisted into a Phyrexian abomination soured his stomach.
"Managing the power load while trying to ensure the safety of the occupant is difficult," Saheeli said. "These components were never meant to work together." The mildest of accusations hung in the air between them. Teferi knew he'd saddled Saheeli with a nigh-impossible task and very little margin for error. "I think I've figured it out, but I need to run more tests."
He measured his words. "It's not my intention to push too hard, but—"
"I know," said Saheeli. "Here." Placing her hand onto the floor, Saheeli allowed the wind-up bird to alight onto her finger. "I made it for you."
"For me?"
With a smile, she flicked her hand, and the peafowl hopped onto Teferi's foot and pecked at his boot. "In Ghirapur, birds like this perch on the bridges that cross Canal Dukhara. Those bridges represent the founding of the city, when warring nobles decided it would be better to cooperate and create a future not dominated by war. For us, this little bird represents our cooperation, our unity in purpose."
Teferi bent down and let the bird hop onto his palm. He stood up, holding the bird close to his face. Its movements captured the herky-jerky nature of a flesh and blood bird, so much so that a casual observer might have mistaken it for its organic counterpart. But on closer inspection, one could discern between its golden feathers a clockwork heart that pulsed with the unmistakable glow of a tiny powerstone—one of dozens scavenged from the fallen Mana Rig. The bird hopped once to face him straight on, then unfurled its feathers in an arc of such delicate craftsmanship that Urza couldn't have matched in a thousand millennia, let alone a handful of days. Teferi chuckled.
"You like it?" Saheeli asked.
"Very much," he said. "My mentor was a great artificer, perhaps the best this plane has ever had. And the notion of prizing style over function would send him into fits. 'Bah! A waste of resources!' he'd say."
"Chance has granted us gifts," said Saheeli. "How we use our gifts will ultimately define us. I choose beauty. That is how I would like to be known." She whistled, and in response, the artifact bird spread its wings and spun in place, spraying a shower of multicolored sparks in every direction. Teferi couldn't help but smile. After a minute, the bird was back to its normal self, pecking at invisible crumbs he didn't have in his hand. "The Anchor will be ready to use tonight. Come back then."
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
Teferi rested against one of the few trees that still grew within the tower's narrow greenbelt ring. Before he lost his spark to the time rifts, he couldn't conceive of his body ever experiencing the aches and pains of age. #emph[That's for others] , he'd thought. #emph[Not me. Not ever.] It turned out that it was exactly for him, exactly what he'd needed, even with his spark restored. He found ironic amusement in it: the plane's foremost temporal mage succumbing to the ravages of time, welcoming them, even.
The sun had crested hours before, not that the high peaks surrounding the vale allowed for much direct sunlight outside a short window of time in the middle of the day. He thought of all the lands past the horizon. Shiv, where Jhoira was rallying dragons, viashino, goblins, and her own Ghitu to protect their land from Phyrexian attack. Orvada, where the merchant-lords had agreed to put aside their bristly relationship with Benalia and furnish food and supplies to Serran troops led by <NAME>. Urborg, where rumors had arisen of a spectral panther warrior back from the dead to bring salvation to the living. Outside this vale, Dominaria was uniting like it never had. But would any of that matter if he and his companions failed in their mission?
From behind him, Teferi heard heavy footsteps approach, accompanied by the brush of leaves against the wind, the creak of bark and xylem bending under stress. He looked up to see Wrenn and Seven approaching. It had taken Teferi some time to track Wrenn down, locating her finally on the plane of Cridhe, where she and Seven were basking in the intense mana showers of the plane's Clan Tree. It had taken him even more time to convince her to leave with him.
"I have fulfilled your request," said Wrenn. "They will grow strong, though your choice of land is questionable. There are no songs here, no harmony. Only isolated chords, warped and fragmented. Or worse—severed like gangrenous limbs."
"I promised you more. I'm sorry."
"Do not be. We are glad you have brought us here. I have been loath to explain to Seven about malevolence, about destruction. Better to show. Better to feel." Seven stooped to let Wrenn reach out and touch his hand. "Your own song is discordant this day, a vexing melody."
Teferi nodded. "I've been thinking."
"That is not reassuring, mage."
This amused Teferi, albeit fleetingly. "I was thinking about you, Kaya, and Saheeli—all of you who answered my call for help." He set Saheeli's clockwork peafowl onto the stone ground in front of him. It pecked about nonchalantly. "I can't help but think that this path has been trod before~ by Urza, my teacher. He, too, assembled heroes—Planeswalkers and mortals—to battle Phyrexia. Even so, history remembers him as the monster this plane needed to defeat the monsters who threatened it."
"Was he a monster?" Wrenn asked.
Teferi pondered that question. Most on Dominaria would have said #emph[yes] —those who'd actually known Urza emphatically so. But for Teferi, the answer wasn't so pat. "On Innistrad, I told you about Zhalfir. Remember?"
"Your homeland. The one you hoped I could help you find."
"I didn't tell you exactly how I lost it," he said. "You see, Urza petitioned me to be one of his titans. Yes, me. I had the great Urza Planeswalker begging me to join his merry band of heroes, and of course, I told him I would if he helped me first."
"A reasonable agreement."
"That's what he thought, too." Teferi clasped his hands together and rested his forehead on his fingers. "So, with his aid, I sealed shut a Phyrexian portal that had opened up in the skies above Zhalfir. When the task was done, and he demanded my help in return, I simply laughed and refused. 'You've only wanted to defeat your foes,' I told him. 'This is how I save my people. This is how you and I differ.' Then I siphoned energy from the closed portal to fuel a spell spiriting Zhalfir away from space and time itself. I didn't ask permission. I didn't care what the people of Zhalfir thought. So, you tell me—who is the monster?"
"Your tangle may prove too snarled. Even for me."
Teferi let out a dry laugh. "I was so damn proud of how easily I'd stolen his petty victory over me~ for my own petty victory. That's the way we all are—all of Urza's children of fury."
"Children?"
"We who are touched by his actions," Teferi explained. "His students, his colleagues~ even his enemies. We despise him, yet we follow in his footsteps like luckless understudies. I've smashed armies, sparing no mercy. I've vanquished those I deemed villains and maneuvered allies to their demise for my own ends. #emph[For the greater good] , I told myself." Teferi picked up a small rock and hurled it into the mist. "A liar who lies about his lies, the true heir to Urza's mantle."
Teferi waited for Wrenn's response. The dryad sat in quiet contemplation of his confession. He had never told anyone the extent of his missteps, at least not this directly. The reasonable reaction would have been for Wrenn to planeswalk away.
Instead, Wrenn turned to him, waves of heat emanating from the fire contained within her chest, and said, "I am not here to provide you absolution, mage. Your crimes are your own, and you will answer for them in time. Ultimately, you are not important. Neither am I. Teferi and Wrenn are singular melodies. I am here to play my part in the symphony."
The sounds of more footsteps, this time the harsh tapping of metal-shod boots, caused Teferi and Wrenn to break off their conversation. Elspeth was walking resolutely toward them clad in a full suit of armor. Teferi stood to receive her.
"If you seek punishment," said Wrenn, "I am sure there are others happy to mete it out. For now, I shall depart." Seven reared up and stomped away, taking Wrenn with them.
Teferi raised his hand to address Elspeth, but like before, she stopped him as he started to speak.
"I'm leaving tomorrow," she said. Her hand rested on the hilt of the sword that hung on her belt. "Thank you for letting me rest under your roof."
"It's not my roof," said Teferi. "But you're welcome all the same."
"Also~ I owe you an apology. Vivien thinks highly of you, and out of respect for her, I should not have spoken to you like I did." Satisfied, she spun on her heel like a trained soldier and began to walk back to the tower.
"Wait," Teferi called out. "I didn't know about Ajani."
Elspeth stopped and turned back around.
"None of us did," he continued. "I was there with him when it happened—when the compleation took hold. It almost seemed like he didn't know, either."
"This is no comfort," said Elspeth.
Teferi took his time with a response. It was a simple matter to say the truth was the truth. Isn't that what a great leader, a battle-hardened general would say? Isn't that what everyone needed him to be? Who was the true Teferi? Was it Teferi, mage of Zhalfir, who pledged to defend his home no matter the cost? Was it Teferi, master of time, the elitist, nigh-omnipotent planeswalker who thought everyone should simply get in line and follow? Or was it Teferi the disruptive student, who used cruel humor to obscure his own fears that no one would ever understand him, that no one would ever consider him a friend?
#emph[Get out of your head] . #emph[Look with your eyes.]
"Are you hungry?" Teferi asked.
"Hungry?" asked Elspeth, puzzled.
"Yes, have you eaten?" Teferi walked past and motioned for her to follow. "I've just realized that I haven't had so much as a crumb since this morning."
"Jodah provided me with some of his biscuits."
"Oh, then we should hurry."
With the cold, wet night air flowing through his robes, Teferi led Elspeth across the greenbelt to the tower proper, where they followed the wall around to a small patch of grass butted up against the tower's base. There, encircled by a globe of green energy, was a sprawl of vines bearing bulbous, pale green fruits. Teferi picked one and held it out to Elspeth.
"Mitab," Teferi said. He took another fruit and bit down into it, letting its juices flow out the sides of his mouth. He was aware he looked silly, not at all how a regal Planeswalker of old was supposed to comport himself. "Wrenn and I made a short stop in Jamuraa, my homeland, before coming back here."
Elspeth took the fruit and brought it to her lips. She tried to maintain decorum as she ate, but juice and fleshy bits of fruit stuck to her face no matter how careful she was. At some point, she gave up and began to eat faster, with more aplomb.
"I was hungrier than I thought," she said.
Teferi stared at the spell that Wrenn had woven to keep his mitab alive despite the vale's inhospitable climate. He reached down and hovered his hand within it, his fingers tingling as they grew warm. "I won't lie to you," he began. "You're right about our plan—it's yelling a prayer into a gale. But it's our best shot. We have a weapon capable of stopping the Phyrexians at the source. Right now, we're working on a way for me to learn how to use it. It's not perfect, but I must have faith that it's enough. For me, the fight against the Phyrexians isn't about being victorious."
Where was he headed with this? He'd always been so prepared. Even his practical jokes required extensive planning to pull off. But now, the words flowed out of him, first at a trickle, and then a torrent that he couldn't control. "I have a daughter," he said. "Her name is Niambi, and she~ Everything I'm doing is about saving #emph[her] . It's about her knowing that I did everything I could to save others while always remaining the person she knew, the father she loves. If I waver—if I have any doubt at all—I doom Niambi this very second."
"Then you do know," said Elspeth. "The terror that comes with hope."
No response was necessary. The remains of the mitab sat in his hands, the flesh eaten away leaving only the core and seeds. His fingers, covered in juice, glistened in the starlight. He placed the remains of the fruit onto the dirt underneath the vines and wiped his hands on his robes. In time, the heat of Wrenn's spell would bake the core dry, and worms would pull it under to nourish new plants.
"I should go," said Teferi. "Saheeli is waiting. I can escort you back to your room."
Elspeth refused. "I think I'll walk the grounds for a while. I like this weather."
"Then live on, Elspeth. Be well and happy. Safe journeys."
"If the Phyrexians are still on this plane," said Elspeth, "it's only a matter of time before they find this place. You will need someone to defend you if that happens—if they track us down."
"Us?"
"If you'll have me."
"We will. Gladly," said Teferi. "I hope we can get to know each other better."
Teferi turned and walked back toward the front of the tower. He managed only a few steps before a bright flash caused him to look over his shoulder. There, Elspeth stood with her sword unsheathed. Out of the globe-shaped pommel, tendrils of milky light spiraled outward, its radiance soft and warm like his earliest days spent under the Zhalfirin sun.
The pains of his wounds dulled, and his mind cleared, bringing forth a long-neglected memory: a flock of wattle-eyes that would pay regular visits to his home in Jamuraa. According to his father, they were the descendants of an injured bird he'd saved in his youth, a bird that lived under his family's roof as a full member until its wanderlust obliged it to leave. In the years, then decades, that followed, the bird had its own children that would regularly visit out of loyalty. Out of love. That's why the trees on his family's land always sang.
Eventually, Teferi became a wizard of repute, then a Planeswalker whose legend spread to other nations, other continents, other worlds. Still, in between waging bitter battles and accomplishing incredible magical feats, he'd recall the story of the birds and find comfort in hearing the low, soothing bass of his father's voice in his mind.
#emph[They do what they do out of love.]
The tale itself he'd dismiss as his father's whimsy and nothing more—an extravagance perfect for children who needed a story to hold onto.
But not this time. This time, Teferi chose to believe.
|
|
https://github.com/Dr00gy/Typst-thesis-template-for-VSB | https://raw.githubusercontent.com/Dr00gy/Typst-thesis-template-for-VSB/main/README.md | markdown | # Typst template for creating a bachelor's thesis
### What is included in this repo:
#### 📘 .typ files:
1. A **main.typ** file - the showcase file which already has some data written in it
2. A **thesis_template** .typ files - template.typ imports all of the needed functions that were created specifically for the template, the other files are modularised
#### 📄 Other files:
3. A folder for logos (in the thesis_template folder, NOT images)
4. A folder for images / figures for demonstrating images in the demo file (can be repurposed for someone's thesis files)
5. A PDF file of main.typ for showcasing purposes
6. A bibliography file for the demo
‼️ Make sure to edit the following when writing your thesis:
1. The title, author and keywords values
2. The language setting ("cs" or "en", "sk" is not supported in typst)
3. Your abstracts, keywords and symbols and abbreviations
4. Indent settings
|
|
https://github.com/drupol/master-thesis | https://raw.githubusercontent.com/drupol/master-thesis/main/README.md | markdown | Other | [](https://github.com/drupol/master-thesis/releases/latest)
[](https://creativecommons.org/licenses/by/4.0/)
[](https://firstdonoharm.dev/version/3/0/full.html)
[](https://doi.org/10.5281/zenodo.12666898)
# Reproducibility in Software Engineering (RiSE)
This repository contains the sources of my master's thesis in computer science,
entitled "Reproducibility in Software Engineering", that was completed in June
2024 at the [University Of Mons] under the supervision of Professor [<NAME>],
director of the [Software Engineering Lab].
Founded in October 2003, the [Software Engineering Lab] carries out research in
the domains of open source software, empirical software engineering, software
ecosystems, software evolution, and software modeling. The lab is directed by
Professor [<NAME>], and is part of the [Department of Computer Science] of the
Faculty of Sciences of the [University of Mons], Belgium.
## Abstract
The concept of reproducibility has long been a cornerstone in scientific
research, ensuring that results are robust, repeatable, and can be independently
verified. This concept has been extended to computer science, focusing on the
ability to recreate identical software artefacts. However, the importance of
reproducibility in software engineering is often overlooked, leading to
challenges in the validation, security, and reliability of software products.
This master's thesis aims to investigate the current state of reproducibility in
software engineering, exploring both the barriers and potential solutions to
making software more reproducible and raising awareness. It identifies key
factors that impede reproducibility such as inconsistent environments, lack of
standardisation, and incomplete documentation. To tackle these issues, I propose
an empirical comparison of tools facilitating software reproducibility.
To provide a comprehensive assessment of reproducibility in software
engineering, this study adopts a methodology that involves a hands-on evaluation
of four different methods and tools. Through a systematic evaluation of these
tools, this research seeks to determine their effectiveness in establishing and
maintaining identical software environments and builds.
This study contributes to academic knowledge and offers practical insights that
could influence future software development protocols and standards.
## Repository Structure
The repository is structured as follows:
- `src/thesis`: Contains the [Typst] source code
- `nix`: Contains the [Nix] expressions necessary for the build but also for the
local development environment
- `resources`: Contains some resources (images, source code, ...)
## Licensing
This work is licenced under a dual license: the Creative Commons Attribution 4.0
International ([CC BY 4.0]) and the Hippocratic Licence 3.0 ([HL3]) licences.
You are free to share and adapt the material under the terms of the CC BY 4.0,
provided you give appropriate credit to the original author. You must also use
the material in accordance with the ethical guidelines specified in HL3,
ensuring it is not used to contribute to human rights abuses or other unethical
practices. In case of any conflict between the licences, HL3 will take
precedence.
## Cite
```
@masterthesis{dellaieraMasterThesis2024,
title = {Reproducibility in Software Engineering},
author = {<NAME>},
year = 2024,
month = {June},
school = {University of Mons},
address = {Mons, Belgium},
type = {Master's thesis},
doi = {10.5281/zenodo.12666898}
}
```
## How To Contribute
I welcome and appreciate contributions from the community! Here are the ways
you can contribute:
1. **Codeberg (Preferred)**
- Visit the main repository on Codeberg: https://codeberg.org/p1ld7a/master-thesis
- Fork the repository and make your changes.
- Submit a pull request for review.
- Ensure your code adheres to our coding guidelines and is well-documented.
2. **GitHub**
- Visit the mirror repository on GitHub: https://github.com/drupol/master-thesis
- Fork the repository and make your changes.
- Submit a pull request for review.
- Ensure your code adheres to our coding guidelines and is well-documented.
3. **Email**
- If you prefer, you can also contribute by sending me patches or suggestions
via email.
- Please include a detailed description of your changes and any relevant
attachments.
- My email address: <EMAIL>
### Contribution Guidelines
To ensure a smooth contribution process, please follow these guidelines:
- **Code Quality:** Make sure your changes are clean and readable.
- **Commit Messages:** Write clear and descriptive commit messages.
### Setting Up Your Development Environment
To set up a local development environment with all the necessary tools to build
the document, you have two options:
1. **Using Nix:** Load the default shell environment by running: `nix develop`
2. **Using DevContainer:** You can also set up a complete development
environment using [DevContainer].
The master thesis is written using [Typst], a modern typesetting system. The
primary language of the document is British English.
[DevContainer]: https://containers.dev/
[Nix]: https://nixos.org/
[Typst]: https://typst.app/
[University Of Mons]: https://www.umons.ac.be/
[Tom Mens]: https://staff.umons.ac.be/tom.mens/
[CC BY 4.0]: https://creativecommons.org/licenses/by/4.0/
[HL3]: https://firstdonoharm.dev/version/3/0/full.html
[Software Engineering Lab]: https://informatique-umons.be/genlog/
[Department of Computer Science]: https://informatique.umons.ac.be/
|
https://github.com/darioglasl/Arbeiten-Vorlage-Typst | https://raw.githubusercontent.com/darioglasl/Arbeiten-Vorlage-Typst/main/01_Einleitung/06_nfr.typ | typst | #import "../Helpers/nfr-data.typ": nfrScenarios
== Non Functional Requirements (NFR) <headingNfrs>
#let n = 0
#while n < nfrScenarios.len() {
let number = n + 1
let title = if n < 10 {
[NFR-0#number]
} else {
[NFR-#number]
}
figure(
table(
columns: (16%, 84%),
inset: (x: 5pt, y: 4pt),
align: left,
fill: (_, row) => if calc.odd(row) { luma(225) } else { white },
[*ID*], [*#title* #nfrScenarios.at(n).titel],
[Anforderung], [#nfrScenarios.at(n).anforderung],
[Szenario], [#nfrScenarios.at(n).szenario],
[Stimulus], [#nfrScenarios.at(n).stimulus],
[Reaktion], [#nfrScenarios.at(n).reaktion],
[Massnahme], [
#for measure in nfrScenarios.at(n).massnahme [
#measure
]
],
[Level], [#nfrScenarios.at(n).level],
),
caption: [#title #nfrScenarios.at(n).anforderung],
)
n = n + 1
} |
|
https://github.com/Arsenii324/matap-p2 | https://raw.githubusercontent.com/Arsenii324/matap-p2/main/t-repo/main.typ | typst | #import "template.typ": *
// #include "lecture2.typ"
// Take a look at the file `template.typ` in the file panel
// to customize this template and discover how it works.
#show: project.with(
title: "Математический анализ 2",
authors: (
(name: "Georgy", affiliation: "@georgyshamteev"),
),
)
#show outline.entry.where(
level: 1
): it1 => {
strong(it1)
}
#show outline.entry.where(
level: 2
): it2 => {
strong(it2)
}
#show outline.entry.where(
level: 3
): it3 => {
it3
}
#outline(
indent: n => 2em * n,
)
#pagebreak()
#include "lecture1.typ"
#pagebreak()
#include "lecture2.typ"
#pagebreak()
#include "lecture3.typ"
#pagebreak()
#include "lecture4.typ"
#pagebreak()
#include "lecture5.typ"
#pagebreak()
#include "lecture6.typ"
#pagebreak()
#include "lecture7.typ"
|
|
https://github.com/Myriad-Dreamin/tinymist | https://raw.githubusercontent.com/Myriad-Dreamin/tinymist/main/docs/tinymist/introduction.typ | typst | Apache License 2.0 | // dist/tinymist/rs
#import "mod.typ": *
#show: book-page.with(title: "Introduction")
Tinymist [ˈtaɪni mɪst] is an integrated language service for #link("https://typst.app/")[Typst] [taɪpst]. You can also call it "微霭" [wēi ǎi] in Chinese.
It contains:
- an analyzing library for Typst, see #link("https://github.com/Myriad-Dreamin/tinymist/tree/main/crates/tinymist-query")[tinymist-query].
- a CLI for Typst, see #link("https://github.com/Myriad-Dreamin/tinymist/tree/main/crates/tinymist/")[tinymist].
- which provides a language server for Typst, see #cross-link("/feature/language.typ")[Language Features].
- which provides a preview server for Typst, see #cross-link("/feature/preview.typ")[Preview Feature].
- a VSCode extension for Typst, see #link("https://github.com/Myriad-Dreamin/tinymist/tree/main/editors/vscode/")[Tinymist VSCode Extension].
== Features
#include "feature/language-content.typ"
== Release Cycle
Tinymist follows the #link("https://semver.org/")[Semantic Versioning] scheme. The version number is in the format of `MAJOR.MINOR.PATCH`. The release cycle is as follows:
- If there is a typst version update, a new major or minor version will be released intermediately. This means tinymist will always align the minor version with typst.
- If there is at least a bug or feature added this week, a new patch version will be released.
== Installation
Follow the instructions to enable tinymist in your favorite editor.
- #cross-link("/frontend/vscode.typ")[VS Cod(e,ium)]
- #cross-link("/frontend/neovim.typ")[NeoVim]
- #cross-link("/frontend/emacs.typ")[Emacs]
- #cross-link("/frontend/sublime-text.typ")[Sublime Text]
- #cross-link("/frontend/helix.typ")[Helix]
- #cross-link("/frontend/zed.typ")[Zed]
== Installing Regular/Nightly Prebuilds from GitHub
Note: if you are not knowing what is a regular/nightly release, please don't follow this section.
Besides published releases specific for each editors, you can also download the latest regular/nightly prebuilts from GitHub and install them manually.
- Regular prebuilts can be found in #link("https://github.com/Myriad-Dreamin/tinymist/releases")[GitHub Releases].
- Nightly prebuilts can be found in #link("https://github.com/Myriad-Dreamin/tinymist/actions")[GitHub Actions]. For example, if you are seeking a nightly release for the featured #link("https://github.com/Myriad-Dreamin/tinymist/pull/468")[PR: build: bump version to 0.11.17-rc1], you could click and go to the #link("https://github.com/Myriad-Dreamin/tinymist/actions/runs/10120639466")[action page] run for the related commits and download the artifacts.
To install extension file (the file with `.vsix` extension) manually, please #kbd("Ctrl+Shift+X") in the editor window and drop the downloaded vsix file into the opened extensions view.
== Documentation
See #link("https://myriad-dreamin.github.io/tinymist/")[Online Documentation].
== Packaging
#link(
"https://repology.org/project/tinymist/versions",
md-alter(
"Packaging status",
() => image("https://repology.org/badge/vertical-allrepos/tinymist.svg", alt: "Packaging status"),
),
)
== Roadmap
After development for two months, most of the features are implemented. There are still some features to be implemented, but I would like to leave them in typst v0.12.0. I'll also pick some of them to implement on my weekends. Also please feel free to contribute if you are interested in the following features.
- Documentation and refactoring: It is my current focus.
- Spell checking: There is already a branch but no suitable (default) spell checking library is found.
- Periscope renderer: It is disabled since vscode reject to render SVGs containing foreignObjects.
- Inlay hint: It is disabled _by default_ because of performance issues.
- Find references of labels, dictionary fields, and named function arguments.
- Go to definition of dictionary fields and named function arguments.
- Autocompletion for raw language tags.
- Improve symbol view's appearance.
== Contributing
Please read the #link("CONTRIBUTING.md")[CONTRIBUTING.md] file for contribution guidelines.
== Acknowledgements
- Partially code is inherited from #link("https://github.com/nvarner/typst-lsp")[typst-lsp]
- The #link("https://github.com/Myriad-Dreamin/tinymist/tree/main/editors/vscode#symbol-view")[integrating] *offline* handwritten-stroke recognizer is powered by #link("https://detypify.quarticcat.com/")[Detypify].
- The #link("https://github.com/Myriad-Dreamin/tinymist/tree/main/editors/vscode#preview")[integrating] preview service is powered by #link("https://github.com/Enter-tainer/typst-preview")[typst-preview].
|
https://github.com/elteammate/typst-compiler | https://raw.githubusercontent.com/elteammate/typst-compiler/main/src/lexer.typ | typst | /// Build a lexer with a given structure
///
/// The rules are represented by an array of pairs,
/// First element of each pair is a regular expression is used to
/// match tokens in the given source. Order of rules matters.
/// When extracting the token from source, the first rule is matched,
/// if match failed, then the second, and so on. Once a matching rule found,
/// The value from the second element of the pair is added to token list.
/// If no matching rule found, panics.
///
/// The second element also may be a function, in which case it would
/// be invoked with the match object as an argument. The returned value is
/// added to token list.
///
/// Finally, it can also be `none`, in which case the token is skipped.
///
/// The `post_process` callback is executed for every token in the generated
/// list with a match object which matches the string at the given position.
/// It's usefult to add information after the tokenizer is finished,
/// for example, about span of the token.
///
/// Sounds complicated, but it's super easy to use.
///
/// lexer: ((str, none | T | (match => T)))
/// post_process: (T, match) => any
#let compile_lexer(lexer, post_process) = {
let full_regex = regex("((" + lexer.map(rule => rule.at(0)).join(")|(") + "))")
let group_count_regex = regex("(^\(|[^\\\\]\()")
let regex_group_mapping = ()
let cur_group = 1
for rule in lexer {
regex_group_mapping.push((
group_no: cur_group,
rule: rule.at(1),
))
cur_group += 1 + rule.at(0).matches(group_count_regex).len()
}
(s) => {
let matches = s.matches(full_regex)
for i, match in matches {
if i + 1 < matches.len() {
let next = matches.at(i + 1)
if match.end != next.start {
return (:).at("Slice `" + s.slice(match.end, next.start) + "` can not be lexed")
}
}
}
matches.map(match => {
for rule_no, plausable_rule in regex_group_mapping {
if match.captures.at(plausable_rule.group_no) == none { continue }
let kind = if type(plausable_rule.rule) == "function" {
(plausable_rule.rule)(match)
} else if plausable_rule.rule == none {
none
} else {
plausable_rule.rule
}
if kind == none { return none }
match.captures = () => match.captures.slice(
plausable_rule.group_no,
if rule_no + 1 < regex_group_mapping.len() {
regex_group_mapping.at(rule_no + 1).group_no
} else {
regex_group_mapping.len()
}
)
return post_process(kind, match)
}
}).filter(x => x != none)
}
}
|
|
https://github.com/chubetho/Bachelor_Thesis | https://raw.githubusercontent.com/chubetho/Bachelor_Thesis/main/chapters/summary.typ | typst | = Summary
In this concluding chapter, the insights from @section_review and the key findings from the evaluation chapters are synthesized to address the two primary research questions of this study. Subsequently, a plan for future research is proposed to further explore and enhance the feasibility of adopting a micro frontend architecture for the @dklb project.
== Conclusion
A table outlining the advantages and disadvantages across the four aspects of flexibility, maintainability, scalability, and performance will be presented first.
#{
show table.cell.where(x: 0): strong
show table.cell.where(y: 0): strong
show table.cell.where(y: 0): set align(center)
let flip = c => table.cell(align: horizon, rotate(-90deg, reflow: true)[#c])
table(
columns: (auto, 1fr, 1fr),
inset: 10pt,
[], [Advantages], [Disadvantages],
flip[Flexibility],
[
*Technology Agnostic*: Teams can choose different technologies or frameworks that best suit their micro frontend, enabling the adoption of new technologies without rewriting the entire application.
*Independent Deployment*: Micro frontends can be deployed independently, allowing for faster feature releases and rollbacks.
],
[
*Inconsistent User Interface:* Without strict guidelines, the look and feel across micro frontends can become inconsistent, affecting the overall user experience.
*Integration Challenges:* Combining different technologies and frameworks requires careful planning to ensure seamless interaction between micro frontends.
],
flip[Maintainability],
[
*Modular Codebase*: Breaking the frontend into smaller, manageable pieces makes the codebase easier to understand and maintain. Each team can focus on a specific module without affecting others.
*Independent Updates*: Teams can update or refactor their micro frontend without coordinating with other teams, reducing the risk of introducing bugs into unrelated parts of the application.
*Isolation of Issues*: Bugs are confined to specific micro frontends, making it easier to locate and fix problems.
],
[
*Complex Dependency Management:* Managing shared libraries and ensuring consistency across micro frontends can become challenging.
*Overhead in Tooling:* Maintaining multiple build processes, repositories, or deployment pipelines can increase the complexity of the development environment.
],
flip[Scalability],
[
*Team Scalability*: Multiple teams can work in parallel on different micro frontends without interfering with each other, enhancing productivity.
*Performance Scaling*: Specific micro frontends can be scaled independently based on load and performance needs, optimizing resource usage.
*Modular Growth*: New features can be added as separate micro frontends without impacting the existing system.
],
[
*Infrastructure Complexity:* Scaling multiple micro frontends may require sophisticated infrastructure and orchestration tools.
*Network Overhead*: Increased number of services can lead to more network requests, which might impact performance if not managed properly.
],
flip[Performance],
[
*Optimized Loading:* Micro frontends allow for lazy loading of components, reducing initial load times by fetching only what's necessary.
],
[
*Increased Bundle Size*: Different micro frontends might include duplicate dependencies, increasing the overall application size.
*Runtime Overhead:* Assembling multiple micro frontends at runtime can introduce latency, especially if not efficiently managed.
],
)
}
The table above indicates that a micro frontend architecture can effectively address the challenges and limitations of the current monolithic system in the @dklb project. This approach introduces greater flexibility in development and deployment, while also improving maintainability and scalability for individual parts of the frontend. These characteristics align well with agile methodologies, promoting iterative development and enabling faster delivery. Moreover, as highlighted in the research by Männistö et al., even small teams can leverage the benefits of this architecture provided @mannisto_ExperiencesFrameworklessMicroFrontend_2023.
However, adopting a micro frontend architecture introduces additional complexity in management and monitoring, particularly in ensuring a smooth integration of components and a consistent user experience. Additionally, the decentralized nature of this approach requires further optimization to maintain adequate performance levels.
In conclusion, micro frontend architecture presents a promising solution for web applications, delivering notable advantages while also introducing certain challenges. The decision to implement this approach should be driven by the project's specific requirements, carefully considering whether the additional complexities are justified by the benefits. While this study has examined crucial aspects of the web application development process, further in-depth analysis is necessary to fully assess and optimize its potential for the @dklb project.
== Future Research
One necessary optimization is bundle analysis, which aims to reduce duplicate code by ensuring proper sharing of library code across JavaScript chunks, as these redundancies can negatively impact performance. To address this issue, gaining deeper knowledge of the Vite plugin could enable more precise intervention in its configuration.
Alternatively, Rspack could be considered as a replacement bundler, given its official support for Module Federation. This provides a key advantage over Vite, which currently depends on a third-party plugin that is no longer actively maintained. Furthermore, the case for adopting Rspack is strengthened by its collaboration with the creator of Module Federation on the forthcoming release of Module Federation 2.0. This update promises new features, expanded use cases, and improved performance, making it a more robust solution.
Additionally, improved error-handling mechanisms should be implemented. For example, when a horizontal micro frontend encounters an error, redirecting the user to an error page disrupts the experience, as only a portion of the view may be affected. A more refined approach would involve updating the overview configuration at runtime to control which micro frontends are displayed or hidden, allowing for greater flexibility and enabling more dynamic solutions.
#pagebreak(weak: true) |
|
https://github.com/longlin10086/HITSZ-PhTyp | https://raw.githubusercontent.com/longlin10086/HITSZ-PhTyp/main/utils/image.typ | typst | #import "@preview/wrap-it:0.1.0": wrap-content
#import "../themes/theme.typ" : *
#let teacher_signature(
name,
images
) = {
set text(font: 字体.黑体, size: 字号.五号,lang: "zh", region: "cn")
let bold_name = strong(name)
let fig = image(
images,
height: 28pt,
fit: "contain",
)
wrap-content(
fig,
bold_name,
align: top + right,
column-gutter: 3pt,
)
}
|
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/minimalistic-latex-cv/0.1.0/README.md | markdown | Apache License 2.0 | # minimalistic-latex-cv
This is a Typst template for a minimalistic LaTeX-style CV. It provides a simple
structure for a CV with a header, a section for professional experience, a section
for education, and a section for skills and languages.
## Usage
You can use this template in the Typst web app by clicking "Start from template"
on the dashboard and searching for `minimalistic-latex-cv`.
Alternatively, you can use the CLI to kick this project off using the command
```
typst init @preview/minimalistic-latex-cv
```
Typst will create a new directory with all the files needed to get you started.
## Configuration
This template exports the `cv` function with the following named arguments:
- `name`: The name of the person.
- `metadata`: A dictionary of metadata of the person to be displayed in the header.
- `photo`: The path to the photo of the person.
- `lang`: The language of the document.
The function also accepts a single, positional argument for the body of the
paper.
The template will initialize your package with a sample call to the `cv`
function in a show rule. If you want to change an existing project to use this
template, you can add a show rule like this at the top of your file:
```typ
#import "@preview/minimalistic-latex-cv:0.1.0": cv
#show: cv.with(
name: "<NAME>",
metadata: (
address: "1234 City, Example Street 1/A",
email: "<EMAIL>",
telephone: "+123456789",
),
photo: image("photo.jpeg"),
lang: "en",
)
// Your content goes below.
```
|
https://github.com/lkoehl/typst-boxes | https://raw.githubusercontent.com/lkoehl/typst-boxes/main/lib.typ | typst | MIT License | #let box-colors = (
default: (stroke: luma(70), fill: white, title: white),
red: (stroke: rgb(237, 32, 84), fill: rgb(253, 228, 224), title: white),
green: (stroke: rgb(102, 174, 62), fill: rgb(235, 244, 222), title: white),
blue: (stroke: rgb(29, 144, 208), fill: rgb(232, 246, 253), title: white),
)
#let colorbox(
title: none,
box-colors: box-colors,
color: "default",
radius: 2pt,
width: auto,
body,
) = {
return block(
fill: box-colors.at(color).fill,
stroke: 2pt + box-colors.at(color).stroke,
radius: radius,
width: width,
)[
#if title != none [
#block(
fill: box-colors.at(color).stroke,
inset: 8pt,
radius: (top-left: radius, bottom-right: radius),
)[
#text(fill: box-colors.at(color).title, weight: "bold")[#title]
]
]
#block(
width: 100%,
inset: (x: 8pt, bottom: 8pt, top: if title == none { 8pt } else { 0pt }),
)[
#body
]
]
}
#let slanted-colorbox(
title: "Title",
box-colors: box-colors,
color: "default",
radius: 2pt,
width: auto,
body
) = {
let slanted-background(color: black, body) = {
set text(fill: white, weight: "bold")
style(styles => {
let size = measure(body, styles)
let inset = 8pt
[#block()[
#polygon(
fill: color,
(0pt, 0pt),
(0pt, size.height + (2 * inset)),
(size.width + (2 * inset), size.height + (2 * inset)),
(size.width + (2 * inset) + 6pt, 0cm),
)
#place(center + top, dy: size.height, dx: -3pt)[#body]
]]
})
}
return block(
fill: box-colors.at(color).fill,
stroke: 2pt + box-colors.at(color).stroke,
radius: radius,
width: width,
)[
#slanted-background(color: box-colors.at(color).stroke)[#title]
#block(width: 100%, inset: (top: -2pt, x: 10pt, bottom: 10pt))[
#body
]
]
}
#let outline-colorbox(
title: "Title",
box-colors: box-colors,
color: "default",
width: 100%,
radius: 2pt,
centering: false,
body,
) = {
return block(
fill: box-colors.at(color).fill,
stroke: 2pt + box-colors.at(color).stroke,
radius: radius,
width: width,
above: 26pt,
)[
#if centering [
#place(top + center, dy: -12pt)[
#block(fill: box-colors.at(color).stroke, inset: 8pt, radius: radius)[
#text(fill: white, weight: "bold")[#title]
]
]
] else [
#place(top + start, dy: -12pt, dx: 20pt)[
#block(fill: box-colors.at(color).stroke, inset: 8pt, radius: radius)[
#text(fill: white, weight: "bold")[#title]
]
]
]
#block(width: 100%, inset: (top: 20pt, x: 10pt, bottom: 10pt))[
#body
]
]
}
#let stickybox(rotation: 0deg, width: 100%, body) = {
let stickyYellow = rgb(255, 240, 172)
return rotate(
rotation,
)[
#let shadow = 100%
#if width != 100% {
shadow = width
}
#place(
bottom + center,
dy: if type(width) == ratio { 0.2 * shadow } else { 0.05 * shadow },
)[
#image("background.svg", width: shadow - 3mm)
]
#block(
fill: stickyYellow,
width: width,
)[
#place(
top + center,
dy: -2mm,
)[
#image(
"tape.svg",
width: if type(width) == ratio { calc.clamp(width * 0.35cm / 1cm, 1, 4) * 1cm } else { calc.clamp(width * 0.35 / 1cm, 1, 4) * 1cm },
height: 4mm,
)
]
#block(width: 100%, inset: (top: 12pt, x: 8pt, bottom: 8pt))[
#body
]
]
]
}
|
https://github.com/monlie/WeeklyRCBI | https://raw.githubusercontent.com/monlie/WeeklyRCBI/main/README.md | markdown | MIT License | # WeeklyRCBI
WeeklyRCBI 是西安交大类脑中心的 Typst 周报模板,每位不再愿意忍受 Word 糟糕表现的同学都可以试一试。

## 简介
Typst 有 LaTex 一样强大的内容样式分离能力,同时还具备 Markdown 一样简单清晰的语法。只要把下面这段载入模板的代码放在开头,后面直接当记事本写都可以。
```typst
#import "weekly.typ": weekly
#show: doc => weekly(
doc,
author: "朱自清",
plan: "在清华园里继续写作。",
problem: "没什么问题。",
writting-date: datetime(year: 1927, month: 7, day: 1),
day-offset: -2,
)
// 下面直接开始写正文,不需要关心格式...
```
* `author`: 汇报人,填自己名字
* `plan`: 一周工作计划,如果不填,默认是“继续实验”
* `problem`: 可能存在问题,如果不填,默认是“无”
* `writting-date`: 手动指定时间,如果不填则默认是当天,即 `datetime.today()`
* `day-offset`: 偏移天数,仅在 `writting-date` 不填时有效,默认是 `0`,假设你是按时交周报的好学生。如果你周日才交周报,这个参数填 `-2` ,如果你太卷,到周三就写完了,请填 `2` 😅
如果你和我一样受够了每次手动输入周报的时间,不妨享受一下自动化带来的快乐。请注意看参数介绍,注意本模板提供了两种时间输入方式,一般来说如果不是补上周或更早的周报,仅用 `day-offset` 指定偏移天数即可。
## 使用
如果你像我一样很懒,可以直接用 Typst 官方提供的在线编辑器:[https://typst.app/](https://typst.app/) 只需要把项目里的所有文件都拖上去即可运行。
如果你比我勤快,可以在本地部署 Typst。别担心,Typst 是“下一代排版系统”,不会像 LaTex 一样动辄占据几个 G 的空间。事实上,Typst 只有几十 M。
有任何疑问应该第一时间阅读 Typst 中文教程:[https://typst-doc-cn.github.io/docs/tutorial/](https://typst-doc-cn.github.io/docs/tutorial/)
|
https://github.com/rice8y/cetzuron | https://raw.githubusercontent.com/rice8y/cetzuron/main/src/fcnn.typ | typst | #import "requirements.typ": *
#let fcnn(inputNodes, middleNodes, outputNodes, middleLayers: 3, label: true) = {
let inputTop = 3/4 * (inputNodes - 1)
let middleTop = 3/4 * (middleNodes - 1)
let outputTop = 3/4 * (outputNodes - 1)
let r = 0.5
let margin = -0.5
cetz.canvas({
import cetz.draw: *
set-style(stroke: 0.5pt)
//// draw nodes
// input
for i in range(1, inputNodes+1) {
circle((0, inputTop - 1.5 * (i + 1)), radius: r)
}
// middle
for i in range(1, middleLayers+1) {
for j in range(1, middleNodes+1) {
circle((3 * i, middleTop - 1.5 * (j + 1)), radius: r)
}
}
// output
for i in range(1, outputNodes+1) {
circle((3 * middleLayers + 3, outputTop - 1.5 * (i + 1)), radius: r)
}
//// draw lines
// input to middle
for i in range(1, inputNodes+1) {
for j in range(1, middleNodes+1) {
line((0.5, inputTop - 1.5 * (i + 1)), (2.5, middleTop - 1.5 * (j + 1)))
}
}
// middle to middle
if middleLayers!=1 {
for i in range(2, middleLayers+1) {
for j in range(1, middleNodes+1) {
for k in range(1, middleNodes+1) {
line((3 * i - 2.5, middleTop - 1.5 * (j + 1)), (3 * i - 0.5, middleTop - 1.5 * (k + 1)))
}
}
}
}
// middle to output
for i in range(1, middleNodes+1) {
for j in range(1, outputNodes+1) {
line((3 * middleLayers + 0.5, middleTop - 1.5 * (i + 1)), (3 * middleLayers + 2.5, outputTop - 1.5* (j + 1)))
}
}
//// labels
if label {
content((0, inputTop + margin), text[Input Layer])
if middleLayers==1 {
content((3 * middleLayers / 2 + 1.5, middleTop + margin), text[Hidden Layer])
} else {
content((3 * middleLayers / 2 + 1.5, middleTop + margin), text[Hidden Layers (#middleLayers dim)])
}
content((3 * middleLayers + 3, outputTop + margin), text[Output Layer])
}
})
}
|
|
https://github.com/jgm/typst-hs | https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/text/font-02.typ | typst | Other | // Error: 11-16 unexpected argument
#set text(false)
|
https://github.com/MattiaOldani/Teoria-dei-Linguaggi | https://raw.githubusercontent.com/MattiaOldani/Teoria-dei-Linguaggi/master/esercizi.typ | typst | // Setup
#import "template.typ": project
#show: project.with(
title: "Esercizi di teoria dei linguaggi"
)
#let medium-blue = rgb("#4DA6FF")
#let light-blue = rgb("#9FFFFF")
#let introduction(body) = block(
fill: medium-blue,
width: 100%,
inset: 8pt,
radius: 4pt,
body,
)
#let assignment(body) = block(
fill: light-blue,
width: 100%,
inset: 8pt,
radius: 4pt,
body,
)
#pagebreak()
// Esercizi
= Lezione 01
#pagebreak()
= Lezione 02
== Esercizio 01
#introduction(
[Considerate l'alfabeto $Sigma = {a,b}$.],
)
#assignment(
[- Fornite una grammatica context-free per il linguaggio delle stringhe palindrome di lunghezza pari su $Sigma$, cioè per l'insieme $"PAL"_"pari" = {w w^R bar.v w in Sigma^*}$.],
)
Regole di produzione:
- $S arrow.long epsilon$;
- $S arrow.long a S a$;
- $S arrow.long b S b$.
#assignment(
[- Modificate la grammatica precedente per generare l'insieme PAL di tutte le stringhe palindrome su $Sigma$.],
)
Regole di produzione:
- $S arrow.long epsilon$;
- $S arrow.long a S a$;
- $S arrow.long b S b$;
- $S arrow.long L$;
- $L arrow.long a$;
- $L arrow.long b$.
#assignment(
[- Per ogni $k in [0,3]$ rispondete alla domanda "il linguaggio PAL é di tipo $k$?" giustificando la risposta.]
)
- Tipo 0: sì, ogni linguaggio é un linguaggio di tipo 0;
- Tipo 1: sì, per ogni regola di produzione $alpha arrow.long beta$ vale $|beta| gt.eq |alpha|$;
- Tipo 2: sì, ogni regola di produzione $alpha arrow.long beta$ vede $alpha in V$ e $beta in (V union Sigma^*)$;
- Tipo 3: no, la regola $S arrow.long a S a$ non é nella forma $A arrow.long a B$ oppure $A arrow.long a$.
#assignment(
[Se sostituiamo l'alfabeto con $Sigma = {a,b,c}$, le risposte al punto precedente cambiano? E se lo sostituiamo con $Sigma = {a}$?]
)
Se $Sigma = {a,b,c}$ le risposte non cambiano visto che vanno aggiunte le regole:
- $S arrow.long c S c$;
- $L arrow.long c$.
Se $Sigma = {a}$ le regole di produzione diventano:
- $S arrow.long epsilon$;
- $S arrow.long a$;
- $S arrow.long a S a$;
ma questo non fa cambiare le risposte.
== Esercizio 02
Non ancora spiegato
== Esercizio 03
#introduction(
[Sia $Sigma = {(,)}$ un alfabeto i cui simboli sono la parentesi aperta e la parentesi chiusa.]
)
#assignment(
[Scrivete una grammatica context-free che generi il linguaggio formato da tutte le sequenze di parentesi correttamente bilanciate, come ad esempio `(()(()))()`.]
)
Regole di produzione:
- $S arrow.long epsilon$;
- $S arrow.long (S)$;
- $S arrow.long S S$.
#assignment(
[Risolvete il punto precedente per un alfabeto con due tipi di parentesi, come $Sigma = {(, ), [, ]}$, nel caso non vi siano vincoli tra i tipi di parentesi (le tonde possono essere contenute tra quadre e viceversa). Esempio `[()([])[]]`, ma non `[[][(])()]`.]
)
Regole di produzione:
- $S arrow.long epsilon$;
- $S arrow.long (S)$;
- $S arrow.long [S]$;
- $S arrow.long S S$.
#assignment(
[Risolvete il punto precedente con $Sigma = {(, ), [, ]}$, con il vincolo che le parentesi quadre non possano mai apparire all’interno di parentesi tonde. Esempio `[()(())[][]](()())`, ma non `[()([])[]]`.]
)
Regole di produzione:
- $S arrow.long epsilon$;
- $S arrow.long [S]$;
- $S arrow.long S S$;
- $S arrow.long I$;
- $I arrow.long epsilon$;
- $I arrow.long (I)$;
- $I arrow.long I I$.
== Esercizio 04
#introduction(
[Sia $G = (V, Sigma, P, S)$ la grammatica con $V = {S, B, C}$, $Sigma = {a, b , c}$ e $P$ contenente le seguenti produzioni: #list([$S arrow.long a S B C | a B C$;], [$C B arrow.long B C$;], [$a B arrow.long a b$;], [$b B arrow.long b b$;], [$b C arrow.long b c$;], [$c C arrow.long c c$.])]
)
#assignment(
[Dopo avere stabilito di che tipo é $G$, provate a derivare alcune stringhe. Riuscite a dire da quali stringhe é formato il linguaggio generato da $G$?]
)
La grammatica $G$ é di tipo 1.
Deriviamo qualche stringa:
- $S arrow.long a B C arrow.long a b C arrow.long a b c$;
- $S arrow.long a S B C arrow.long a a B C B C arrow.long a a b C B C arrow.long a a b B C C arrow.long a a b b C C arrow.long a a b b c C arrow.long a a b b c c $.
Il linguaggio $L(G)$ è l'insieme ${a^n b^n c^n bar.v n gt.eq 1}$.
== Esercizio 05
#introduction(
[Sia $G = (V, Sigma, P, S)$ la grammatica con $V = {S, B, C}$, $Sigma = {a, b, c}$ e $P$ contenente le seguenti produzioni: #list([$S arrow.long a B S c | a b c$;], [$B a arrow.long a B$;], [$B b arrow.long b b$.])]
)
#assignment(
[Dopo avere stabilito di che tipo é $G$, provate a derivare alcune stringhe. Riuscite a dire da quali stringhe é formato il linguaggio generato da $G$?]
)
La grammatica $G$ é di tipo 1.
Deriviamo qualche stringa:
- $S arrow.long a b c$;
- $S arrow.long a B S c arrow.long a B a b c c arrow.long a a B b c c arrow.long a a b b c c$.
Il linguaggio $L(G)$ è l'insieme ${a^n b^n c^n bar.v n gt.eq 1}$.
== Esercizio 06
#introduction(
[Sia $G = (V, Sigma, P, S)$ la grammatica con $V = {S, A, B, C, D, E}$, $Sigma = {a,b}$ e $P$ contenente le seguenti produzioni: #list([$S arrow.long A B C$;], [$A B arrow.long a A D | b A E | epsilon$;], [$D C arrow.long B a C$;], [$E C arrow.long B b C$;], [$D a arrow.long a D$;], [$D b arrow.long b D$;], [$E a arrow.long a E$;], [$E b arrow.long b E$;], [$C arrow.long epsilon$;], [$a B arrow.long B a$;], [$b B arrow.long b B$.])]
)
#assignment(
[Dopo avere stabilito di che tipo é $G$, provate a derivare alcune stringhe. Riuscite a dire da quali stringhe é formato il linguaggio generato da $G$?]
)
La grammatica $G$ é di tipo 1.
Deriviamo qualche stringa:
- $S arrow.long A B C arrow.long^* epsilon$;
- $S arrow.long A B C arrow.long a A D C arrow.long a A B a C arrow.long^* a a$;
- $S arrow.long^* a A B a C arrow.long a a A D a C arrow.long a a A a D C arrow.long a a A a B a C arrow.long a a A B a a C arrow.long^* a a a a$;
- $S arrow.long^* a A B a C arrow.long a b A E a C arrow.long a b A a E C arrow.long a b A a B b C arrow.long a b A B a b C arrow.long^* a b a b$;
- $S arrow.long A B C arrow.long b A E C arrow.long b A B b C arrow.long^* b b$;
- $S arrow.long^* b A B b C arrow.long b b A E b C arrow.long b b A b E C arrow.long b b A b B b C arrow.long b b A B b b C arrow.long^* b b b b$;
- $S arrow.long^* b A B b C arrow.long b a A D b C arrow.long b a A b D C arrow.long b a A b B a C arrow.long b a A B b a C arrow.long^* b a b a$.
Il linguaggio $L(G)$ è l'insieme ${a^(2n) union b^(2n) union (a b)^(2n) union (b a)^(2n) bar.v n gt.eq 0}$.
== Esercizio 07
#introduction(
[Sia $G = (V, Sigma, P, S)$ la grammatica con $V = {S, A, B, C, X, Y, L, R}$, $Sigma = {a}$ e $P$ contenente le seguenti produzioni: #list([$S arrow.long L X R$;], [$L X arrow.long L Y Y A | a C$;], [$A X arrow.long Y Y A$;], [$A R arrow.long B R$;], [$Y B arrow.long B X$;], [$L B arrow.long L$;], [$C X arrow.long a C$;], [$C R arrow.long epsilon$.])]
)
#assignment(
[Riuscite a stabilire da quali stringhe é formato il linguaggio generato da $G$?]
)
Deriviamo qualche stringa:
- $S arrow.long L X R arrow.long a C R arrow.long a$;
- $S arrow.long L X R arrow.long L Y Y A R arrow.long^* L X X R arrow.long a C X R arrow.long a a C R arrow.long a a$;
- $S arrow.long L X R arrow.long^* L X X R arrow.long L Y Y A X R arrow.long L Y Y Y Y A R arrow.long^* L X X X X R arrow.long^* a a a a$.
- $S arrow.long L X R arrow.long^* L X X X X R arrow.long^* L Y Y Y Y Y Y Y Y A R arrow.long^* L X X X X X X X X R arrow.long^* a a a a a a a a$.
Il linguaggio $L(G)$ è l'insieme ${a^(2^n) bar.v n gt.eq 0}$.
== Esercizio 08
#introduction([])
#assignment(
[Modificate la grammatica dell’esercizio 07 in modo da ottenere una grammatica di tipo 1 che generi lo stesso linguaggio.]
)
Modificando la regola $L B arrow.long L$ in $L B arrow.long C R L$ la grammatica diventa di tipo 1.
== Esercizio 09
#introduction(
[]
)
#assignment(
[Dimostrate che la grammatica $G = ({A, B, S}, {a, b}, P, S)$, con l’insieme delle produzioni $P$ elencate sotto, genera il linguaggio ${w in {a, b}^* bar.v forall x in {a, b}^* w eq.not x x}$: #list([$S arrow.long A B | B A | A | B$], [$A arrow.long a A a | a A b | b A a | b A b | a$], [$B arrow.long a B a | a B b | b B a | b B b | b$])]
)
Consideriamo in primo luogo i "casi base":
- $S arrow.long A arrow.long a$ va bene perché di lunghezza dispari;
- $S arrow.long B arrow.long b$ va bene perché di lunghezza dispari;
- $S arrow.long A B arrow.long^* a b$ va bene perché $a eq.not b$;
- $S arrow.long B A arrow.long^* b a$ va bene perché $b eq.not a$.
Consideriamo poi $S arrow.long A | B$:
$ S arrow.long A arrow.long & a A a arrow.long^* a^n A a^n arrow.long a^n a a^n; \ & a A a arrow.long^* a b^n A b^n a arrow.long a b^n a b^n a; \ & a A a arrow.long^* a {a,b}^n A {a, b}^n a arrow.long a {a,b}^n a {a, b}^n a; \ & a A b arrow.long^* dots space . $
$ S arrow.long B arrow.long & a B a arrow.long^* a^n B a^n arrow.long a^n b a^n; \ & a B a arrow.long^* a b^n B b^n a arrow.long a b^n b b^n a; \ & a B a arrow.long^* a {a,b}^n B {a, b}^n a arrow.long a {a,b}^n b {a, b}^n a; \ & a B b arrow.long^* dots space . $
Tutte le stringhe che vengono generate vanno bene perché sono di lunghezza dispari.
Consideriamo infine $S arrow.long A B | B A$ in due casi:
- se eseguiamo su $A$ e $B$ lo stesso numero di passi di derivazione abbiamo altri due casi:
- usiamo regole con lo "stesso contesto", ma alla fine avremo un carattere diverso nella posizione dove sono presenti $A$ e $B$;
- usiamo regole con "diverso contesto", ma la prima regola che rispecchia questa casistica ha almeno un carattere diverso (oltre ad avere il carattere in $A$ e $B$ diverso alla fine della derivazione);
- se eseguiamo su $A$ e $B$ un numero diverso di passi di derivazione, abbiamo due punti di partenza:
- partiamo da $A B$ e indichiamo con $n$ la lunghezza della stringa derivata da $A$ e con $k$ la lunghezza della stringa derivata da $B$, con $k > n$. Per ottenere due stringhe della stessa lunghezza devo rimuovere da $k$ un numero $frac(n-k,2)$ di caratteri e appenderli a $n$, ottenendo due stringhe di lunghezza $t$. Prima dell'ultimo passo di derivazione di $A$ la variabile $A$ era in posizione $frac(n-1, 2)$, mentre ora si trova in posizione $frac(t-1, 2) - frac(n-k,4)$ perché prima mi devo prima posizionare nel "nuovo centro" e poi mi devo spostare di una posizione indietro ogni due caratteri che avevo aggiunto. Facciamo lo stesso ragionamento per trovare l'indice dell'ultima $B$ di $B$. Le due posizioni trovate sono le stesse, ma prima dell'ultima derivazione in $A$ si aveva una $A$ e in $B$ si aveva una $B$, che però generano rispettivamente $a$ e $b$, quindi otteniamo due stringhe che sono sempre diverse.
- partiamo da $B A$ e facciamo lo stesso discorso, basta invertire l'ordine delle stringhe.
Abbiamo quindi dimostrato che $L(G) = {w in {a, b}^* bar.v forall x in {a, b}^* w eq.not x x}$.
#pagebreak()
= Lezione 03
== Esercizio 01
#introduction(
[]
)
#assignment(
[Costruite un automa a stati finiti che riconosca il linguaggio formato da tutte le stringhe sull’alfabeto ${a, b}$ nelle quali ogni a é seguita immediatamente da una $b$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione03-01.svg", width: 50%)
)
#v(12pt)
== Esercizio 02
#introduction(
[]
)
#assignment(
[Costruite un automa a stati finiti che riconosca il linguaggio formato da tutte le stringhe sull’alfabeto ${4, 5}$ che, interpretate come numeri in base $10$, rappresentano numeri interi che _non sono_ divisibili per $3$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione03-02.svg", width: 100%)
)
#v(12pt)
== Esercizio 03
#introduction(
[]
)
#assignment(
[Costruite un automa a stati finiti deterministico che riconosca il linguaggio formato da tutte le stringhe sull’alfabeto ${0, 1}$ che, interpretate come numeri in notazione binaria, denotano multipli di $4$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione03-03-01.svg", width: 60%)
)
#v(12pt)
#assignment(
[Utilizzando il non determinismo si riesce a costruire un automa con meno stati? Generalizzate l’esercizio a multipli di $2k$, dove $k > 0$ é un intero fissato.]
)
Utilizzando il non determinismo si usano ancora $4$ stati.
#v(12pt)
#figure(
image("assets-esercizi/lezione03-03-02.svg", width: 70%)
)
#v(12pt)
Generalizzando a multipli di $2k$, con $k > 0$, abbiamo:
- per il DFA $2^k$ stati;
- per il NFA $k+2$ stati.
== Esercizio 04
#introduction(
[]
)
#assignment(
[Costruite un automa a stati finiti che riconosca il linguaggio formato da tutte le stringhe sull’alfabeto ${0, 1}$ che, interpretate come numeri in notazione binaria, rappresentano multipli di $5$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione03-04.svg", width: 100%)
)
#v(12pt)
#pagebreak()
= Lezione 04
== Esercizio 01
#introduction(
[Considerate il linguaggio$ L = {w in {a,b}^* bar.v "il penultimo e il terzultimo simbolo di" w "sono uguali"}. $ ]
)
#assignment(
[Costruite un automa a stati finiti deterministico che accetta $L$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione04-01-01.svg", width: 100%)
)
#v(12pt)
#assignment(
[Costruite un automa a stati finiti non deterministico che accetta $L$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione04-01-02.svg", width: 80%)
)
#v(12pt)
#assignment(
[Dimostrate che per il linguaggio $L$ tutte le stringhe di lunghezza $3$ sono distinguibili tra loro.]
)
#v(12pt)
#align(center)[
#table(
columns: (10%, 10%, 10%, 10%, 10%, 10%, 10%, 10%, 10%),
inset: 10pt,
align: horizon,
[], [*$a a a$*], [*$a a b$*], [*$a b a$*], [*$a b b$*], [*$b a a$*], [*$b a b$*], [*$b b a$*], [*$b b b$*],
[*$a a a$*], [-], [$a$], [$epsilon$], [$epsilon$], [$epsilon$], [$epsilon$], [$a$], [$a a$],
[*$a a b$*], [-], [-], [$epsilon$], [$epsilon$], [$epsilon$], [$epsilon$], [$b b$], [$b$],
[*$a b a$*], [-], [-], [-], [$b$], [$a$], [$a a$], [$epsilon$], [$epsilon$],
[*$a b b$*], [-], [-], [-], [-], [$a a$], [$b$], [$epsilon$], [$epsilon$],
[*$b a a$*], [-], [-], [-], [-], [-], [$a$], [$epsilon$], [$epsilon$],
[*$b a b$*], [-], [-], [-], [-], [-], [-], [$epsilon$], [$epsilon$],
[*$b b a$*], [-], [-], [-], [-], [-], [-], [-], [$a$],
[*$b b b$*], [-], [-], [-], [-], [-], [-], [-], [-],
)
]
#v(12pt)
#assignment(
[Dimostrate che per il linguaggio $L$ la parola vuota é distinguibile da tutte le stringhe di lunghezza $3$.]
)
#v(12pt)
#align(center)[
#table(
columns: (10%, 10%, 10%, 10%, 10%, 10%, 10%, 10%, 10%),
inset: 10pt,
align: horizon,
[], [*$a a a$*], [*$a a b$*], [*$a b a$*], [*$a b b$*], [*$b a a$*], [*$b a b$*], [*$b b a$*], [*$b b b$*],
[*$epsilon$*], [$epsilon$], [$epsilon$], [$a b$], [$a$], [$a$], [$b a$], [$epsilon$], [$epsilon$],
)
]
#v(12pt)
#assignment(
[Utilizzando i risultati precedenti, ricavate un limite inferiore per il numero di stati di ogni automa deterministico che accetta L.]
)
L'insieme $X = {w in {a,b}^+ bar.v |w| = 3}$ é un insieme di parole tutte distinguibili tra loro rispetto al linguaggio $L$, come dimostrato nei punti precedenti, quindi ogni DFA per $L$ deve avere almeno $|X|$ stati, ovvero almeno $8$ stati.
== Esercizio 02
#introduction(
[]
)
#assignment(
[Costruite un insieme di stringhe distinguibili tra loro per ognuno dei seguenti linguaggi: #list([$L_1 = {w in {a,b}^* bar.v \#_a (w) = \#_b (w)}$,], [$L_2 = {a^n b^n bar.v n gt.eq 0}$,], [$L_3 = {w w^R bar.v w in {a,b}^*}$ dove, per ogni stringa $w$, $w^R$ indica la stringa $w$ scritta al contrario.])]
)
Costruiamo i seguenti insiemi:
- $X_1 = {a^i bar.v i gt.eq 1}$ di cardinalità infinita;
- $X_2 = {a^i bar.v i gt.eq 1}$ di cardinalità infinita;
- $X_3 = {(a b)^i bar.v i gt.eq 1}$ di cardinalità infinita.
#assignment(
[Per alcuni di questi linguaggi riuscite ad ottenere insiemi di stringhe distinguibili di cardinalità infinita? Cosa significa ciò?]
)
I linguaggi che hanno insiemi di stringhe distinguibili di cardinalità infinita sono linguaggi non di tipo $3$.
== Esercizio 03
#introduction(
[Considerate l’automa di Meyer e Fischer $M_n$ presentato nella Lezione $4$ (caso peggiore della costruzione per sottoinsiemi) e mostrato nella seguente figura:]
)
#v(12pt)
#figure(
image("assets-teoria/meyer-fischer.svg", width: 50%)
)
#v(12pt)
#assignment(
[Descrivete a parole la proprietà che deve soddisfare una stringa per essere accettata da $M_n$. Riuscite a costruire un automa non deterministico, diverso da $M_n$, per lo stesso linguaggio, basandovi su tale proprietà?]
)
Non lo so fare.
#pagebreak()
= Lezione 05
== Esercizio 01
#introduction(
[Considerate il linguaggio $ "DOUBLE"_k = {w w bar.v w in {a,b}^k}, $ dove $k > 0$ é un numero intero fissato.]
)
#assignment(
[É abbastanza facile trovare un fooling set di cardinalità $2^k$ per questo linguaggio. Riuscite a trovare un fooling set o un extended fooling set di cardinalità maggiore?]
)
Considero l'insieme $ P = {(x,x) bar.v x in {a,b}^k}. $
Questo é un extended fooling set per $"DOUBLE"_k$ perché:
+ $x x in "DOUBLE"_k$;
+ $x y in.not "DOUBLE"_k$.
La cardinalità di questo insieme é $2^k$, non penso di riuscire a fare meglio.
== Esercizio 02
#introduction(
[Considerate il linguaggio $ "PAL"_k = {w in {a,b}^k bar.v w = w^R}, $ dove $k$ é un intero fissato.]
)
#assignment(
[Qual é l’extended fooling set per $"PAL"_k$ di cardinalità maggiore che riuscite a trovare?]
)
Considero l'insieme $ P = {(x,x^R) bar.v x in {a,b}^k}. $
Questo é un extended fooling set per $"PAL"_k$ perché:
+ $x x^R in "PAL"_k$;
+ $x y^R in.not "PAL"_k$.
La cardinalità di questo insieme é $2^k$.
== Esercizio 03
#introduction(
[Considerate il linguaggio $ K_k = {w bar.v w = x_1 dot.op dots dot.op x_m dot.op x bar.v m > 0, x_1, dots, x_m, x in {a,b}^k, exists i in [1,m] bar.v x_i = x}, $ dov $k$ é un intero fissato. Si può osservare che ogni stringa $w$ di questo linguaggio é la concatenazione di blocchi di lunghezza $k$, in cui l’ultimo blocco coincide con uno dei blocchi precedenti.]
)
#assignment(
[Riuscite a costruire un (extended) fooling set di cardinalità $2^k$ o maggiore per il linguaggio $K_k$?]
)
Considero l'insieme $ P = {(x^m,x) bar.v x in {a,b}^k and m > 0}. $
Questo é un extended fooling set per $K_k$ perché:
+ $x^m x in K_k$;
+ $x^m y in.not K_k$.
La cardinalità di questo insieme é $2^k$.
#assignment(
[Quale é l’informazione principale che un automa non deterministico può scegliere di ricordare nel proprio controllo a stati finiti durante la lettura di una stringa per riuscire a riconoscere $K_k$?]
)
Un NFA dovrebbe formare prima l'albero di tutte le possibili stringhe di lunghezza $k$, inserendo la scommessa nei nodi ad altezza $k-1$. Questa scommessa fa tornare indietro alla radice, e si "vince" la scommessa quando si finisce nel nodo ad altezza $k$, ovvero la stringa di lunghezza $k$ letta ora é quella che sarà presente anche alla fine. Il numero di stati per questa parte é $2^(k+1) -1$.
Il controllo viene poi fatto con $k 2^k$ stati, dove solo l'ultimo é finale. Vanno aggiunti $(k-1) 2^k$ stati che cancellano gruppi di lunghezza $k$ prima dell'ultimo gruppo.
Il numero totale di stati é quindi $k 2^(k+1) + 2^k - 1$.
#assignment(
[Supponete di costruire un automa deterministico per riconoscere $K_k$. Cosa ha necessità di ricordare l’automa nel proprio controllo a stati finiti mentre legge la stringa in input?]
)
Un DFA deve ricordarsi le sequenze lunghe $k$ che ha trovato nella stringa.
#assignment(
[Utilizzando il concetto di distinguibilità, dimostrate che ogni automa deterministico che riconosce $K_k$ deve avere almeno $2^(2^k)$ stati.]
)
Costruisco l'insieme $ X = {S subset.eq {a,b}^k} = PP({a,b}^k) $ insieme delle parti di ${a,b}^k$.
Questo é un insieme di parole distinguibili tra loro perché $ forall X_1,X_2 in X quad exists x in X_1 - X_2 quad bar.v quad product_(x_1 in X_1) x_1 dot.op x in K_k and product_(x_2 in X_2) x_2 dot.op x in.not K_k. $
La cardinalità di questo insieme é $2^(|X|) = 2^(2^k)$.
== Esercizio 04
#introduction(
[Considerate il linguaggio $ J_k = {w bar.v w = x dot.op x_1 dot.op dots dot.op x_m bar.v m > 0, x_1, dots, x_m, x in {a,b}^k, exists i in [1,m] bar.v x_i = x}, $ dove $k$ é un intero fissato. Si può osservare che ogni stringa $w$ di questo linguaggio é la concatenazione di blocchi di lunghezza $k$, in cui il primo blocco coincide con uno dei blocchi successivi; ogni stringa di $J_k$ si ottiene “rovesciando” una stringa del linguaggio $K_n$ dell’esercizio $3$.]
)
#assignment(
[Supponete di costruire automi a stati finiti per $J_k$. Valgono ancora gli stessi limiti inferiori ottenuti per $K_n$ o si riescono a costruire automi più piccoli? Rispondete sia nel caso di automi deterministici sia in quello di automi non deterministici.]
)
Un DFA deve prima costruire l'albero di altezza $k$ che contiene tutte le possibili stringhe di lunghezza $k$ e poi, dopo ogni foglia, deve costruire un ciclo di $k$ stati che riconosce la sequenza definita dal cammino a quella foglia. Vanno aggiunti $(k-1) 2^k$ stati che cancellino le sequenze lunghe $k$ che non sono uguali alla prima letta.
Il numero totale di stati é $k 2^(k+1) + 2^k - 1$.
Un NFA deve fare la stessa cosa del DFA ma mettendo la scommessa nelle foglie.
Il numero totale di stati é ancora $k 2^(k+1) + 2^k - 1$.
== Esercizio 05
#introduction(
[]
)
#assignment(
[Ispirandovi all’esercizio $3$, fornite limiti inferiori per il numero di stati degli automi che riconoscono il seguente linguaggio: $ E_k = {w bar.v w = x_1 dot.op dots dot.op x_m bar.v m > 0, x_1, dots, x_m in {a,b}^k, exists i,j in [1,m] bar.v x_i = x_j}, $ dove $k$ é un intero fissato. Considerate sia il caso deterministico che quello non deterministico.]
)
Un NFA dovrebbe formare prima l'albero di tutte le possibili stringhe di lunghezza $k$, inserendo la scommessa nei nodi ad altezza $k-1$. Questa scommessa fa tornare indietro alla radice, e si "vince" la scommessa quando si finisce nel nodo ad altezza $k$, ovvero la stringa di lunghezza $k$ letta ora é quella che sarà presente successivamente. Il numero di stati per questa parte é $2^(k+1) -1$.
Il controllo viene poi fatto con $k 2^k$ stati, dove solo l'ultimo é finale. Vanno aggiunti $(k-1) 2^k$ stati che cancellano gruppi di lunghezza $k$ che sono sono uguali alla sequenza scelta.
Il numero totale di stati é quindi $k 2^(k+1) + 2^k - 1$.
Un DFA deve ricordarsi le sequenze lunghe $k$ che ha trovato nella stringa. Costruendo l'insieme $X$ dell'esercizio $3$ si può concludere che ogni DFA deve avere almeno $2^(2^k)$ stati.
#pagebreak()
= Lezione 06
== Esercizio 01
#introduction(
[]
)
#assignment(
[Scrivete un’espressione regolare per il linguaggio formato da tutte le stringhe sull’alfabeto ${0, 1}$ che, interpretate come numeri in notazione binaria, rappresentano potenze di $2$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione06-01.svg", width: 40%)
)
#v(12pt)
Imposto il sistema di equazioni: $ &cases(X_0 = 0 X_0 + 1 X_1 + epsilon, X_1 = 0 X_1 + epsilon) \ &cases(X_0 = 0 X_0 + 1 0^* + epsilon, X_1 = 0^*) quad . $
L'espressione regolare corrispondente é: $ X_0 &= 0^* (1 0^* + epsilon) \ X_0 &= 0^* 1 0^*. $
== Esercizio 02
#introduction(
[]
)
#assignment(
[Scrivete un’espressione regolare per il linguaggio formato da tutte le stringhe sull’alfabeto ${0, 1}$ che, interpretate come numeri in notazione binaria, _non rappresentano_ potenze di $2$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione06-02.svg", width: 60%)
)
#v(12pt)
Imposto il sistema di equazioni: $ &cases(X_0 = 0 X_0 + 1 X_1, X_1 = 0 X_1 + 1 X_2, X_2 = 0 X_2 + 1 X_2 + epsilon) \ &cases(X_0 = 0 X_0 + 1 X_1, X_1 = 0 X_1 + 1 (0 + 1)^*, X_2 = (0 + 1)^*) \ &cases(X_0 = 0 X_0 + 1 0^* 1 (0 + 1)^*, X_1 = 0^* 1 (0 + 1)^*) quad . $
L'espressione regolare corrispondente é: $ X_0 = 0^* 1 0^* 1 (0 + 1)^*. $
== Esercizio 03
#introduction(
[]
)
#assignment(
[Scrivete un’espressione regolare per il linguaggio formato da tutte le stringhe sull’alfabeto ${a, b}$ in cui le $a$ e le $b$ si alternano (come $a b a b$, $b a b$, $b$, ecc). Disegnate poi un automa per lo stesso linguaggio.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione06-03.svg", width: 50%)
)
#v(12pt)
Imposto il sistema di equazioni: $ &cases(X_0 = a X_1 + b X_2 + epsilon, X_1 = b X_2 + epsilon, X_2 = a X_1 + epsilon) \ &cases(X_0 = a X_1 + b (a X_1 + epsilon) + epsilon, X_1 = b (a X_1 + epsilon) + epsilon) \ &cases(X_0 = (a + b a) X_1 + b + epsilon, X_1 = b a X_1 + b + epsilon) \ &cases(X_0 = (a + b a) (b a)^* (b + epsilon) + b + epsilon, X_1 = (b a)^* (b + epsilon)) quad . $
L'espressione regolare corrispondente é: $ X_0 = (a + b a)(b a)^* b + (a + b a)(b a)^* + epsilon . $
== Esercizio 04
#introduction(
[]
)
#assignment(
[Scrivete un’espressione regolare per il linguaggio formato da tutte le stringhe sull’alfabeto ${a, b}$ nelle quali ogni $a$ é seguita immediatamente da una $b$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione06-04.svg", width: 40%)
)
#v(12pt)
Imposto il sistema di equazioni: $ &cases(X_0 = b X_0 + a X_1 + epsilon, X_1 = b X_0) \ &cases(X_0 = b X_0 + a b X_0 + epsilon, X_1 = b X_0) \ &cases(X_0 = (b + a b) X_0 + epsilon, X_1 = b X_0) \ &cases(X_0 = (b + a b)^* + epsilon, X_1 = b X_0) quad . $
L'espressione regolare corrispondente é: $ X_0 = (b + a b)^* + epsilon $
== Esercizio 05
#introduction(
[]
)
#assignment(
[Scrivete un’espressione regolare per il linguaggio formato da tutte le stringhe sull’alfabeto ${a, b}$ che contengono un numero di $a$ pari e un numero di $b$ pari.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione06-05.svg", width: 70%)
)
#v(12pt)
Imposto il sistema di equazioni: $ &cases(X_0 = a X_1 + b X_2 + epsilon, X_1 = a X_0 + b X_3, X_2 = b X_0 + a X_3, X_3 = a X_1 + b X_2) \ &cases(X_0 = a X_1 + b X_2 + epsilon, X_1 = a X_0 + b (a X_1 + b X_2), X_2 = b X_0 + a (a X_1 + b X_2)) \ &cases(X_0 = a X_1 + b X_2 + epsilon, X_1 = b a X_1 + a X_0 + b b X_2, X_2 = a b X_2 + b X_0 + a a X_1) \ &cases(X_0 = a X_1 + b (a b)^* (b X_0 + a a X_1) + epsilon, X_1 = b a X_1 + a X_0 + b b (a b)^* (b X_0 + a a X_1), X_2 = (a b)^* (b X_0 + a a X_1)) \ &cases(X_0 = a X_1 + b (a b)^* b X_0 + b (a b)^* a a X_1 + epsilon, X_1 = (b a + b b (a b)^* a a) X_1 + a X_0 + b b (a b)^* b X_0) \ &cases(X_0 = b (a b)^* b X_0 + (a + b (a b)^* a a) X_1 + epsilon, X_1 = (b a + b b (a b)^* a a)^* (a + b b (a b)^* b) X_0) quad . $
$ X_0 &= b (a b)^* b X_0 + (a + b (a b)^* a a) (b a + b b (a b)^* a a)^* (a + b b (a b)^* b) X_0 + epsilon \ X_0 &= (b (a b)^* b + (a + b (a b)^* a a) (b a + b b (a b)^* a a)^* (a + b b (a b)^* b)) X_0 + epsilon . $
L'espressione regolare corrispondente é: $ X_0 &= (b (a b)^* b + (a + b (a b)^* a a) (b a + b b (a b)^* a a)^* (a + b b (a b)^* b))^* . $
== Esercizio 06
#introduction(
[]
)
#assignment(
[Scrivete un’espressione regolare per il linguaggio formato da tutte le stringhe sull’alfabeto ${4, 5}$ che, interpretate come numeri in base $10$, rappresentano interi che non sono divisibili per $3$.]
)
#v(12pt)
#figure(
image("assets-esercizi/lezione06-06.svg", width: 60%)
)
#v(12pt)
Imposto il sistema di equazioni: $ &cases(X_0 = 0 X_0 + 1 X_1 + epsilon, X_1 = 0 X_2 + 1 X_0, X_2 = 0 X_1 + 1 X_2) \ &cases(X_0 = 0 X_0 + 1 X_1 + epsilon, X_1 = 0 1^* 0 X_1 + 1 X_0, X_2 = 1^* 0 X_1) \ &cases(X_0 = 0 X_0 + 1 X_1 + epsilon, X_1 = (0 1^* 0)^* 1 X_0) quad . $
$ X_0 &= 0 X_0 + 1 (0 1^* 0)^* 1 X_0 + epsilon \ X_0 &= (0 + 1(0 1^* 0)^* 1) X_0 + epsilon $
L'espressione regolare corrispondente é: $ X_0 = (0 + 1 (0 1^* 0)^* 1)^* . $
|
|
https://github.com/Its-Alex/resume | https://raw.githubusercontent.com/Its-Alex/resume/master/lib/languages.typ | typst | MIT License | #import "components/title.typ": customTitle
#let languages(title, languages) = [
#customTitle(title)
#grid(
columns: (50%, 50%),
gutter: 0pt,
row-gutter: 1.5em,
..languages.map((language) => [
#text(weight: 600)[#language.name] \
#text()[#language.level]
])
)
] |
https://github.com/ClazyChen/Table-Tennis-Rankings | https://raw.githubusercontent.com/ClazyChen/Table-Tennis-Rankings/main/history_CN/2019/MS-07.typ | typst |
#set text(font: ("Courier New", "NSimSun"))
#figure(
caption: "Men's Singles (1 - 32)",
table(
columns: 4,
[排名], [运动员], [国家/地区], [积分],
[1], [马龙], [CHN], [3637],
[2], [樊振东], [CHN], [3480],
[3], [林高远], [CHN], [3351],
[4], [许昕], [CHN], [3298],
[5], [梁靖崑], [CHN], [3283],
[6], [蒂姆 波尔], [GER], [3225],
[7], [张本智和], [JPN], [3206],
[8], [林昀儒], [TPE], [3151],
[9], [周雨], [CHN], [3120],
[10], [雨果 卡尔德拉诺], [BRA], [3117],
[11], [#text(gray, "张继科")], [CHN], [3104],
[12], [水谷隼], [JPN], [3065],
[13], [张禹珍], [KOR], [3055],
[14], [金光宏畅], [JPN], [3045],
[15], [于子洋], [CHN], [3038],
[16], [乔纳森 格罗斯], [DEN], [3038],
[17], [丹羽孝希], [JPN], [3035],
[18], [弗拉基米尔 萨姆索诺夫], [BLR], [3033],
[19], [王楚钦], [CHN], [3028],
[20], [方博], [CHN], [3027],
[21], [闫安], [CHN], [3025],
[22], [马蒂亚斯 法尔克], [SWE], [3022],
[23], [郑荣植], [KOR], [3012],
[24], [迪米特里 奥恰洛夫], [GER], [3002],
[25], [安宰贤], [KOR], [2997],
[26], [吉村和弘], [JPN], [2981],
[27], [托米斯拉夫 普卡], [CRO], [2961],
[28], [刘丁硕], [CHN], [2952],
[29], [郑培峰], [CHN], [2947],
[30], [周启豪], [CHN], [2946],
[31], [孙闻], [CHN], [2945],
[32], [帕特里克 弗朗西斯卡], [GER], [2941],
)
)#pagebreak()
#set text(font: ("Courier New", "NSimSun"))
#figure(
caption: "Men's Singles (33 - 64)",
table(
columns: 4,
[排名], [运动员], [国家/地区], [积分],
[33], [吉村真晴], [JPN], [2940],
[34], [西蒙 高兹], [FRA], [2934],
[35], [#text(gray, "丁祥恩")], [KOR], [2927],
[36], [贝内迪克特 杜达], [GER], [2906],
[37], [马克斯 弗雷塔斯], [POR], [2905],
[38], [艾曼纽 莱贝松], [FRA], [2889],
[39], [林钟勋], [KOR], [2886],
[40], [卢文 菲鲁斯], [GER], [2885],
[41], [李尚洙], [KOR], [2882],
[42], [<NAME>], [GER], [2881],
[43], [<NAME>], [SVK], [2880],
[44], [<NAME>], [IND], [2875],
[45], [夸德里 阿鲁纳], [NGR], [2867],
[46], [#text(gray, "大岛祐哉")], [JPN], [2865],
[47], [利亚姆 皮切福德], [ENG], [2865],
[48], [朱霖峰], [CHN], [2856],
[49], [徐晨皓], [CHN], [2850],
[50], [赵胜敏], [KOR], [2849],
[51], [上田仁], [JPN], [2843],
[52], [马特], [CHN], [2842],
[53], [<NAME>], [KOR], [2837],
[54], [安东 卡尔伯格], [SWE], [2835],
[55], [汪洋], [SVK], [2834],
[56], [赵子豪], [CHN], [2832],
[57], [神巧也], [JPN], [2830],
[58], [森园政崇], [JPN], [2828],
[59], [庄智渊], [TPE], [2828],
[60], [吉田雅己], [JPN], [2826],
[61], [塞德里克 纽廷克], [BEL], [2824],
[62], [PERSSON Jon], [SWE], [2819],
[63], [及川瑞基], [JPN], [2819],
[64], [薛飞], [CHN], [2809],
)
)#pagebreak()
#set text(font: ("Courier New", "NSimSun"))
#figure(
caption: "Men's Singles (65 - 96)",
table(
columns: 4,
[排名], [运动员], [国家/地区], [积分],
[65], [ZHAI Yujia], [DEN], [2801],
[66], [KOU Lei], [UKR], [2799],
[67], [特鲁斯 莫雷加德], [SWE], [2799],
[68], [雅克布 迪亚斯], [POL], [2797],
[69], [帕纳吉奥迪斯 吉奥尼斯], [GRE], [2796],
[70], [TAKAKIWA Taku], [JPN], [2787],
[71], [GERELL Par], [SWE], [2782],
[72], [松平健太], [JPN], [2780],
[73], [安德烈 加奇尼], [CRO], [2780],
[74], [克里斯坦 卡尔松], [SWE], [2779],
[75], [WEI Shihao], [CHN], [2779],
[76], [周恺], [CHN], [2771],
[77], [户上隼辅], [JPN], [2767],
[78], [巴斯蒂安 斯蒂格], [GER], [2765],
[79], [HIRANO Yuki], [JPN], [2765],
[80], [沙拉特 卡马尔 阿昌塔], [IND], [2764],
[81], [邱党], [GER], [2761],
[82], [村松雄斗], [JPN], [2761],
[83], [卡纳克 贾哈], [USA], [2761],
[84], [王臻], [CAN], [2760],
[85], [詹斯 伦德奎斯特], [SWE], [2748],
[86], [特里斯坦 弗洛雷], [FRA], [2746],
[87], [WANG Zengyi], [POL], [2743],
[88], [<NAME>], [KOR], [2742],
[89], [MONTEIRO Joao], [POR], [2740],
[90], [SHIBAEV Alexander], [RUS], [2740],
[91], [诺沙迪 阿拉米扬], [IRI], [2738],
[92], [SKACHKOV Kirill], [RUS], [2731],
[93], [博扬 托基奇], [SLO], [2726],
[94], [LANDRIEU Andrea], [FRA], [2726],
[95], [徐瑛彬], [CHN], [2724],
[96], [HABESOHN Daniel], [AUT], [2724],
)
)#pagebreak()
#set text(font: ("Courier New", "NSimSun"))
#figure(
caption: "Men's Singles (97 - 128)",
table(
columns: 4,
[排名], [运动员], [国家/地区], [积分],
[97], [蒂亚戈 阿波罗尼亚], [POR], [2717],
[98], [PENG Wang-Wei], [TPE], [2716],
[99], [廖振珽], [TPE], [2712],
[100], [<NAME>], [SWE], [2706],
[101], [宇田幸矢], [JPN], [2706],
[102], [牛冠凯], [CHN], [2705],
[103], [达科 约奇克], [SLO], [2704],
[104], [<NAME>], [SLO], [2702],
[105], [<NAME>], [POL], [2699],
[106], [<NAME>], [ALG], [2698],
[107], [<NAME>], [RUS], [2688],
[108], [#text(gray, "朴申赫")], [PRK], [2688],
[109], [安德斯 林德], [DEN], [2687],
[110], [GERALDO Joao], [POR], [2687],
[111], [SALIFOU Abdel-Kader], [FRA], [2685],
[112], [尼马 阿拉米安], [IRI], [2685],
[113], [田中佑汰], [JPN], [2685],
[114], [DRINKHALL Paul], [ENG], [2683],
[115], [陈建安], [TPE], [2678],
[116], [MATSUDAIRA Kenji], [JPN], [2676],
[117], [斯特凡 菲格尔], [AUT], [2675],
[118], [SIRUCEK Pavel], [CZE], [2674],
[119], [AKKUZU Can], [FRA], [2674],
[120], [黄镇廷], [HKG], [2673],
[121], [SIPOS Rares], [ROU], [2672],
[122], [木造勇人], [JPN], [2666],
[123], [LIU Yebo], [CHN], [2666],
[124], [奥维迪乌 伊奥内斯库], [ROU], [2664],
[125], [金珉锡], [KOR], [2664],
[126], [#text(gray, "高宁")], [SGP], [2663],
[127], [CHIANG Hung-Chieh], [TPE], [2661],
[128], [LAM Siu Hang], [HKG], [2657],
)
) |
|
https://github.com/crd2333/crd2333.github.io | https://raw.githubusercontent.com/crd2333/crd2333.github.io/main/src/docs/AI/index.typ | typst | #import "/src/components/TypstTemplate/lib.typ": *
#show: project.with(
title: "Artificial Intelligence",
lang: "zh",
)
- AI 笔记 |
|
https://github.com/voXrey/cours-informatique | https://raw.githubusercontent.com/voXrey/cours-informatique/main/typst/07-terminaison.typ | typst | #import "@preview/codly:0.2.1": *
#show: codly-init.with()
#codly()
#set text(font:"Roboto Serif")
= Terminaison de Programme <terminaison-de-programme>
#strong[Objectif :] Déterminer si un programme termine son exécution ou boucle indéfiniment.
```c
int f(int n) {
if (n % 2 == 0) f(n);
else return 1;
}
int g(int n) {
if (n == 0) return 1;
else return n*g(n-1);
}
```
#quote(
block: true,
)[
Dans cet exemple on voit bien que la fonction `f` ne termine pas toujours alors que la fonction `g` si.
]
== I - Terminaison d’un programme récursif <i---terminaison-dun-programme-récursif>
#strong[Important :] Un programme récursif se démontre par récurrence.
#quote(
block: true,
)[
Énoncé : $forall n in NN "g(n) termine"$.
Initialisation : Le programme termine directement (c’est le cas de base).
Hérédité : Pour n \> 0, le programme appelle g\(n-1), qui termine par HR puis g\(n) termine.
]
==== Suite de Fibonacci <suite-de-fibonacci>
```c
/// Suite de Fibonacci
int fibo(int n) {
if (n==0 || n==1) return 1;
else return fibo(n-1) + fibo(n-2);
}
```
#quote(
block: true,
)[
Énoncé : \$\\forall n \\in \\N\$ fibo\(n) termine.
Initialisation : Pour n \= 0 ou n \= 1 le programme termine (cas de base).
Hérédité : Pour n \> 1, on suppose que fibo\(n-1) et fibo\(n-2) terminent, or fibo\(n) n’effectue que ces deux appels, donc fibo\(n) termine également.
]
==== Suite de Syracuse <suite-de-syracuse>
```c
/// Suite de Syracuse
// pré-cond : n > 0
int syracuse(int n) {
if (n==1) return 0;
else if (n % 2 == 0)
return 1 + syracuse(n/2);
else
return 1 + syracuse(3*n + 1);
}
```
On est bloqué puisque 3n+1 \< n.~Il s’agit encore à ce jour d’un problème ouvert, on ne sait pas si ce programme termine.
==== Triangle de Pascal <triangle-de-pascal>
Pré-condition : $n in NN$ et $k in bracket.l.double 0 , n bracket.r.double$
```c
/// Triangle de Pascal
int binom(int k, int n) {
if (k == 0 || n == 0) return 1;
else return binom(n-1, k-1) + binom(n-1, k);
}
```
Nous allons procéder à une récurrence sur n
#quote(
block: true,
)[
Initialisation : n \= 0, le programme termine.
Hérédité : n \> 0, plusieurs cas :
- Si k \= 0 le programme termine.
- Si k \> 0 alors $n - 1 gt.eq 0$ et $k - 1 gt.eq 0$ donc ces valeurs (et k) respectent la pré-condition, donc les fonctions appelées récursivement terminent par HR.
]
== II - Terminaison de programmes impératifs <ii---terminaison-de-programmes-impératifs>
Les questions de terminaisons ne se poseront que pour les boucles en impératif. Pour montrer la terminaison d’une boucle on exhibe un variant de boucle.
C’est une quantité mathématique définie en utilisant les variables du programme.
- Cette quantité est à valeurs dans $NN$
- Cette quantité décroît strictement dans lors d’une exécution de boucle.
==== Recherche dichotomique <recherche-dichotomique>
Pré-condition : a est trié.
```c
/// Recherche dichotomique
int mem(int len, int* a, int elt) {
int i = 0;
int j = len;
while (i < j) {
int mid = (j+i)/2;
if (a[mid] == elt) return mid;
if (a[mid] > elt) j = mid;
else i = mid+1;
}
return -1;
}
```
#quote(
block: true,
)[
Variant de boucle choisi : `j-i`.\
Début de boucle : on a les variables i et j.\
Après un tour de boucle i et j valent i’ et j’.\
Si $a"[mid]" = "elt"$ : le programme termine.\
SI $a"[mid]" > "elt"$ : $i' = i$ et $j' = ⌊(i + j)/2⌋$\
- Si $i + j$ est pair alors $j prime - i prime = lr((i + j)) \/ 2 - i = lr((j - i)) \/ 2 < j - i$ car $j - i > 0$\
- si $i + j$ est impair alors $j' - i' = (i + j - 1)/2 - i = (j - i - 1)/2 < j - i$\
Si $a"[mid]" < "elt"$ alors les calculs sont analogues.\
Donc $j - i$ décroît strictement dans $NN$, elle atteindra donc 0 ce qui provoquera l’arrêt de la boucle.
]
==== Cas des boucles for <cas-des-boucles-for>
Une boucle for peut toujours se traduire par une boucle while.
Pour ```c for(int i = a; i < b; i++)``` on a toujours `b-i-a` comme variant. En principe elle termine toujours (sauf si mal écrite) mais par précaution on donnera le variant.
Exemple de traduction d’une boucle for en boucle while :
```c
int a,b;
/// Boucle for
for(int i = a; i < b; i++) foo();
/// Boucle while associée
int i = a;
while (i < b) {
foo();
i++;
}
```
|
|
https://github.com/cadojo/correspondence | https://raw.githubusercontent.com/cadojo/correspondence/main/src/options/options.typ | typst | MIT License | #let some(value) = value != none
|
https://github.com/Myriad-Dreamin/typst.ts | https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/packages/typst.node/npm/linux-x64-gnu/README.md | markdown | Apache License 2.0 | # `@myriaddreamin/typst-ts-node-compiler-linux-x64-gnu`
This is the **x86_64-unknown-linux-gnu** binary for `@myriaddreamin/typst-ts-node-compiler`
|
https://github.com/jeffa5/typstfmt | https://raw.githubusercontent.com/jeffa5/typstfmt/main/README.md | markdown | MIT License | # Typst formatter
`typstfmt` is a formatter for [Typst](https://typst.app) code.
It only formats inputs that are valid Typst code.
It aims to make the code consistent.
## Install
### Cargo
```sh
cargo install --git https://github.com/jeffa5/typstfmt
```
### Nix
The flake provides an overlay which you can use with nixpkgs.
### pre-commit
Add this to your `.pre-commit-config.yaml`:
```yaml
- repo: https://github.com/jeffa5/typstfmt
rev: '' # Use the sha / tag you want to point at
hooks:
- id: typstfmt
```
## Run
```sh
# format stdin
typstfmt
# format typst files in current directory
typstfmt *.typ
```
### Nix
```sh
nix run github:jeffa5/typstfmt
```
## Configuration
You can configure some aspects of the formatting with a `typstfmt.toml` file in the current directory, or specify its location with the `--config-path` flag.
The default configuration is:
```toml
indent = 2 # spaces
spacing = true # whether to manage spacing
```
## Development
### Fuzzing
List some fuzz targets:
```sh
cargo fuzz list
```
Then run one, e.g. for `crash_proof`:
```sh
cargo fuzz run crash_proof
```
### Testing against the package repo
The [`typst packages`](https://github.com/typst/packages) repo is a submodule (`typst-packages`).
We can run the formatter against it to check the formatting and for erroneous outputs with:
```sh
cargo run -- typst-packages --check
```
And try to format them all (useful for manual diffing):
```sh
cargo run -- typst-packages
```
## Acknowledgements
`typstfmt` is a rewrite of [`typst-fmt`](https://github.com/astrale-sharp/typst-fmt) which aims to retain all original text whilst also be able to be flexible in its configuration.
I tried writing some rules for that formatter before beginning the redesign present here.
|
https://github.com/thanhdxuan/dacn-report | https://raw.githubusercontent.com/thanhdxuan/dacn-report/master/datn-week-1/contents/04-bonus.typ | typst | = Những góp ý của hội đồng báo cáo đồ án chuyên ngành
== Xác định nhóm người dùng ứng dụng
_Vấn đề:_ Cần xác định nhóm người dùng cuối của hệ thống, có thể (Trong báo cáo chỉ xác định là nhà quản lý ngân hàng).
_Đề xuất của nhóm:_
Với những đặc tả của hệ thống hiện tại, nhóm người dùng hệ thống phải là những quản lý cấp cao, có khả năng sử dụng các chức năng của hệ thống để đánh giá, phân tích dựa trên các thông tin mà hệ thống cung cấp, từ đó tham khảo và có thể đưa ra những quyết định liên quan đến việc vay vốn ngân hàng.
== Xác thực thông tin giấy tờ người dùng cung cấp
_Vấn đề:_ Làm sao để xác thực các thông tin, giấy tờ của người dùng cung cấp cho hệ thống? Quy trình xác thực bao gồm những quy định nào, thông qua những đơn vị (hoặc bộ phận nào)?
_Đề xuất của nhóm:_
Các thông tin cần xác định khi vay vốn có thể chia thông tin khách hàng khi vay vốn thành 3 nhóm chính @Quytrinh_2023:
- Thông tin cá nhân: bao gồm họ tên, ngày tháng năm sinh, giới tính, địa chỉ thường trú, số chứng minh nhân dân/căn cước công dân, số điện thoại, email,...
- Thông tin tài chính: bao gồm thu nhập, chi phí, lịch sử tín dụng,...
- Thông tin về mục đích vay vốn: bao gồm mục đích vay vốn, số tiền vay, thời hạn vay,...
Thông tin khách hàng có thể được xác định thông qua các phương pháp sau:
- Trao đổi trực tiếp với khách hàng: Đây là phương pháp truyền thống và phổ biến nhất. Cán bộ ngân hàng sẽ trao đổi trực tiếp với khách hàng để thu thập thông tin.
- Nhận hồ sơ vay vốn: Hồ sơ vay vốn thường bao gồm các giấy tờ chứng minh thông tin cá nhân, tài chính và mục đích vay vốn của khách hàng.
- Thẩm định tín dụng: Thẩm định tín dụng là quá trình ngân hàng đánh giá khả năng vay vốn và khả năng trả nợ của khách hàng. Quá trình này thường bao gồm việc thu thập thông tin từ các nguồn khác nhau, chẳng hạn như cơ quan tín dụng quốc gia, cơ quan thuế,...
Về việc thẩm định hồ sơ, có thể sử dụng các phương pháp:
- Thẩm định thực tế: Đây là phương pháp thẩm định phổ biến nhất. Cán bộ thẩm định của ngân hàng sẽ trực tiếp đến kiểm tra thực tế tài sản thế chấp để xác định giá trị và khả năng thanh khoản của tài sản.
- Thẩm định giá: Ngân hàng sẽ thuê một công ty thẩm định giá chuyên nghiệp để thẩm định giá tài sản thế chấp.
- Thẩm định qua hồ sơ: Ngân hàng sẽ thẩm định tài sản thế chấp dựa trên hồ sơ pháp lý và tài liệu liên quan.
_Vì vậy_, việc thẩm định các thông tin mà khách hàng cung cấp đòi hỏi một đội ngũ có chuyên môn về tài chính, định giá. Do đó, nhóm đề xuất các thông tin về hồ sơ sẽ phải được các nhà quản lý ngân hàng chuyển cho đội ngũ thẩm định, từ đó có thể xác thực được thông tin mà khách hàng cung cấp trước khi sử dụng các chức năng của bộ công cụ.
== Xác định vai trò của mình và người dùng trong hệ thống
_Vấn đề:_ Trong sơ đồ use-case của nhóm chưa thể hiện được vai trò của mình trong hệ thống, với sơ đồ này, chỉ thể hiện mình là nhóm phát triển hệ thống và không thể can thiệp vào hệ thống.
#sym.arrow.r.double
Dẫn đến khó khăn trong việc cung cấp ứng dụng đến người dùng cũng như xử lý lỗi hệ thống phát sinh trong quá trình sử dụng.
_Đề xuất của nhóm:_ |
|
https://github.com/The-Notebookinator/notebookinator | https://raw.githubusercontent.com/The-Notebookinator/notebookinator/main/docs/src/basic_usage.md | markdown | The Unlicense | # Basic Usage
Now that you have the Notebookinator installed, you can start notebooking.
## Setup
You can use our [template](https://github.com/The-Notebookinator/quick-start-template) either by creating a GitHub repository based on it with GitHub's official template feature, or just by downloading it. You can download the template simply by cloning it.
```sh
git clone https://github.com/The-Notebookinator/quick-start-template.git
# alternatively if you made your own repository you can clone it like this:
git clone <your-url-here>
```
Once you've done that, open the newly downloaded folder inside of VSCode or your editor of choice.
## Editing Your Notebook
### Adding New Entries
The Notebookinator allows for three different types of entries, frontmatter, body, and appendix. Each will be rendered as its own section, and has its own page count.
#### Frontmatter
Frontmatter entries, as their name implies, are shown at the beginning of the notebook. Entries of this type typically contain things like introductions, and the table of contents.
The template stores all of the frontmatter entries into the `frontmatter.typ` file by default. To add more frontmatter entries, simply call the `create-frontmatter-entry` function inside of the file like so:
```typ
#create-frontmatter-entry(title: "About")[
Here's some info about this amazing notebook!
]
```
Frontmatter entries are rendered in the order they are created.
#### Body
The most common type of entry is the body entry. These entries store all of your notebook's main content.
The template puts all of the body entries inside of the `entries/` folder. To make a new entry, make a new file in that folder. Then, `#include` that file in the `entries/entries.typ` file. For example, if you created a file called `entries/my-entry.typ`, then you'd add this line to your `entries/entries.typ` file:
```typ
#include "./my-entry.typ"
```
Body entries will be displayed in the order they are included in the `entries/entries.typ` file.
Once you've done that, you'll need to create a new entry inside of that file. This can be done with the `create-body-entry` function. If the file only contains a single entry, we recommend using a show rule to wrap the function as well, which will pass all of the `content` in the file into the `create-body-entry` function.
You can create a new body entry like so:
```typ
// not all themes require every one of these options
#show: create-body-entry.with(
title: "My Awesome Entry",
type: "identify", // The type of the entry depends on which theme you're using
date: datetime(year: 2024, month: 1, day: 1),
)
```
#### Appendix
Appendix entries go at the end of the notebook, and are stored in the `appendix.typ` file.
You can create a new appendix entry like this:
```typ
#create-appendix-entry(title: "Programming")[
Here's information about how we programmed the robot.
#lorem(500)
]
```
### Changing the Theme
In order to change the theme you'll need to edit two files, `packages.typ` and `main.typ`.
The first thing you'll need to do is edit which theme is being imported in `packages.typ`. For example, if you wanted to switch to the `linear` theme from the `radial` theme, you'd change `packages.typ` to look like this:
```typ
// packages.typ
// this file allows us to only specify package versions once
#import "@local/notebookinator:1.0.1": *
#import themes.linear: linear-theme, components // components is imported here so we don't have to specify which theme's components we're using.
```
Once you do that, you'll want to edit your `main.typ` to use the `linear-theme` instead of the `radial-theme`.
```typ
// main.typ
#show: notebook.with(
// ...
theme: linear-theme,
)
```
```admonish note
Not all themes implement the same components, so you may encounter some issues when changing themes with a more developed notebook.
```
### Using Components
Components are reusable elements created by themes. These are just functions stored inside a `components` module. Each theme should expose its own separate `components` module.
`packages.typ` should already export this module, so you can access it just by `import`ing `packages.typ`
```typ
#import "/packages.typ": *
```
Now you can use any of the components in the theme by just calling them like you would a normal function. Here's how you would create a simple `pro-con` table.
```typ
#components.pro-con(
pros: [
Here are the pros.
],
cons: [
Here are the cons.
]
)
```
You can see what components a theme implements by reading the [API reference](./reference.md).
## Compiling / Viewing Your Notebook
Once you're happy with your notebook, you'll want to render it into a PDF.
You can do that with either of the following commands:
```sh
typst compile main.typ
# or if you want live updates
typst watch main.typ
```
You can then open `main.pdf` in any PDF viewer to see your rendered output.
|
https://github.com/gbrivady/typst-templates | https://raw.githubusercontent.com/gbrivady/typst-templates/main/report-long.typ | typst | /* Generic template for single-author long reports. WIP */
#let appendix(body) = {
set heading(numbering: "A.1", supplement: [Appendix])
counter(heading).update(0)
body
}
#let project(title: "", subtitle: "", author: "", language: "", body) = {
set document(author: author, title: title)
set text(lang: language)
set page(
paper: "a4",
margin: (inside: 3cm, outside: 3cm, bottom: 4cm, top: 3cm),
)
set heading(numbering: "1.1.1", supplement: [Chapter])
// Title Page
block(height: 20%)[
#grid(
columns: (1fr, .5fr, 1fr),
box(width: 2cm, height: 2cm, fill: red),
"",
align(right)[#box(width: 2cm, height: 2cm, fill: blue)],
)
]
block(
height: 40%,
)[
#align(
center,
)[
#line(start: (5%, 0%), end: (95%, 0%), stroke: (thickness: .5mm))
#v(.4cm)
#block(text(weight: "bold", size: 28pt, title))
#v(.2cm)
#block(text(weight: "light", size: 26pt, subtitle))
#v(.5cm)
#line(start: (5%, 0%), end: (95%, 0%), stroke: (thickness: .5mm))
#v(1.5cm)
#text(size: 20pt, author)
#v(1cm)
#text(
size: 20pt,
smallcaps(datetime.today().display("[day] [month repr:long] [year]")),
)
]
]
block(height: 35%)[
#grid(
columns: (40%, 20%, 40%),
rows: (100%),
gutter: 0pt,
align(bottom + left)[
Bottom left titlepage left subtext #lorem(20)
],
"",
align(bottom + right)[
Bottom right title page subtext
],
)
]
pagebreak()
// Header definition
let header = [#grid(
columns: (1fr, 5fr, 1fr),
align(left)[#rect(width: 75%, fill: gradient.linear(green, blue))],
align(center)[
#author
#v(4pt, weak: true)
#title
],
align(right)[#rect(width: 80%, fill: gradient.linear(..color.map.rainbow))],
)
#line(length: 100%)
]
// Main body.
set page(header: header, number-align: center)
set page(numbering: "1", number-align: center)
set par(justify: true)
// Set outline
show outline.entry.where(level: 1): it => {
v(8pt)
strong(it)
}
outline(
target: heading.where(supplement: [Chapter]),
depth: 3,
indent: auto,
fill: repeat[~~.],
)
outline(
title: none,
target: heading.where(supplement: [Appendix]),
depth: 1,
indent: auto,
fill: repeat[~~.],
)
pagebreak()
body
} |
|
https://github.com/Area-53-Robotics/53E-Notebook-Over-Under-2023-2024 | https://raw.githubusercontent.com/Area-53-Robotics/53E-Notebook-Over-Under-2023-2024/giga-notebook/entries/lift/identify.typ | typst | Creative Commons Attribution Share Alike 4.0 International | #import "/packages.typ": notebookinator
#import notebookinator: *
#import themes.radial.components: *
#show: create-body-entry.with(
title: "Identify: Elevation",
type: "identify",
date: datetime(year: 2024, month: 1, day: 03),
author: "<NAME>",
witness: "<NAME>",
)
After the Capital Beltway Challenge, we decided that our robot needed some way
to elevate. We noticed that not many teams were able to elevate at all, even
this late into the season, meaning that we can still score near maximum points
by elevating.
#image("./identify.svg")
= Design Goals
- Achieve at least a B tier elevation
- Be able to elevate within the last 10 seconds of the match
= Design Constraints
- Our elevation mechanism must start deployed with an 18" cube, and cannot
increase the robot's size to more than 36" across when extended
- Our robot is currently using all 88Ws of its allowed motors, meaning that we
need to either reallocate motors, or not use them at all
|
https://github.com/coco33920/.files | https://raw.githubusercontent.com/coco33920/.files/mistress/typst_templates/fiction/main.typ | typst | #import "template.typ": *
#show: book.with(
title: "Liam's Playlist",
author: "<NAME>",
dedication: [for Rachel],
publishing-info: [
UK Publishing, Inc. \
6 Abbey Road \
Vaughnham, 1PX 8A3
#link("https://example.co.uk/")
971-1-XXXXXX-XX-X
],
)
= Mondays
Liam hated Mondays. He hated waking up to the sound of his dad's old car sputtering to life outside his window. He hated the smell of burnt toast and instant coffee that filled the kitchen. He hated the sight of his mum's tired face as she handed him his lunch bag and kissed him goodbye. He hated the feel of his worn-out uniform and backpack as he walked to the bus stop. He hated the noise of the other kids on the bus, talking about their weekend plans and their latest crushes. He hated the fact that he had nothing to say to them, nothing to share, nothing to look forward to.
He got off the bus at his school and made his way to his locker, avoiding eye contact with anyone who might notice him or worse, pick on him. He was used to being invisible, being ignored, being alone. He didn't have any friends at school, or anywhere else for that matter. He didn't have any hobbies or interests that made him stand out or fit in. He didn't have any dreams or goals that gave him hope or motivation. He just had his routine: wake up, go to school, come home, do homework, watch TV, go to bed. Repeat.
He opened his locker and took out his books for his first class: English literature. He liked reading books sometimes, but he didn't like analyzing them or writing essays about them. He didn't see the point of studying something that had no relevance to his life or future. What did Shakespeare or Dickens have to do with him? What did he care about metaphors or themes or symbols? He just wanted to escape into a different world for a while, not dissect it.
He closed his locker and headed to class. As he walked down the hall, he saw her: <NAME>. She was new at school this year and she was beautiful. She had long blonde hair that cascaded over her shoulders like a waterfall. She had bright blue eyes that sparkled like diamonds in the sunlight. She had a perfect smile that lit up her face like a star in the night sky.
But he knew it was impossible. She was out of his league. She was from another world. He sighed and continued walking towards English literature. He hated Mondays.
= Music
#lorem(1500)
= Magic
#lorem(600)
/*
= Mystery
#lorem(600)
= Money
#lorem(6000)
= Mistakes
#lorem(6000)
= Memory
#lorem(6000)
= Miracle
#lorem(6000)
= Monday again
#lorem(6000)
|
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/ofbnote/0.2.0/ofb_note.typ | typst | Apache License 2.0 | #import "ofb_common.typ": *
#let todos=state("todos",())
// Todo generation
#let todo(who,what,when)={
if type(what)=="string" {
what=upper(what.at(0))+what.slice(1)
} else if type(what)=="content" {
what=eval(repr(what).replace(regex("\[."),it=>upper(it.text),count:1))
}
todos.update(it=>it+((who,what,when)))
strong[#who #when : #what]
}
// General setup
#let ofbnote(
meta: (:),
doc
) = {
// Page
set page(
background: {place(dx:10.8cm,dy:20cm,image("vagues.svg", fit:"stretch"))},
footer: [
#align(center,block[#context [#counter(page).display() / #counter(page).final().first()]]),
#place(top + right,dy: -30%,block[
#align(top + right,{text(fill: palettea,size: 0.8em)[
*Office français de la biodiversité*\
"Le Nadar", 5 Square Félix Nadar\
94300 Vincennes\
ofb.gouv.fr
]})
]
)],
footer-descent: 30%,
margin:(bottom: 17%, top: 3cm, left: 3cm, right: 3cm),
)
// Headings
set heading(numbering: "I.1.a ")
show par: set block(spacing: 1em, above: 1.5em, below: 1.5em)
show heading.where(level:1): it => block(width: 100%, above: 3em, below: 2em)[
#set text(size: 1.4em, weight: "bold", fill: palettea)
#it
#v(0.3em)
]
show heading.where(level:2): it => block(width: 100%, above: 2em, below: 2em)[
#set text(size: 1.2em, weight: "bold", style: "italic", fill: paletteb)
#it
#v(0.3em)
]
show heading.where(level:3): it => block(width: 100%, above: 2em, below: 2em)[
#set text(weight: "bold", fill: palettec)
#it
#v(0.3em)
]
show heading.where(level:4): it => text(weight: "bold", fill: palettea)[
#it.body + " "
]
_conf(meta: meta, [
// Document heading
#block(width: 100%,image("logo_ofb_sigle2.png",width: 25%))
#align(right,[
#text(fill: palettea,size: 2em,weight: "bold",meta.at("title"))\
#if meta.at("authors", default: none)!=none [
#text(fill: rgb("#666666"),style: "italic",meta.at("authors"))\
]
#if meta.at("date", default: none)!=none [
#text(fill: rgb("#666666"),style: "italic",meta.at("date"))\
]
#if meta.at("version", default: none)!=none [
#text(fill: rgb("#666666"),style: "italic","Version "+meta.at("version"))
]
])
// Generate todo list as a first section
#context {
let todolist=todos.final()
if todolist.len()>0 [
= Suites à donner
#mytable(columns: (auto,auto,auto),[Qui ?],[Quoi ?],[Pour quand ?],..(todolist.flatten()))
]
}
#doc
])
}
// Appendix
#let appendix(doc) = {
set heading(
supplement: "Annexe ",
numbering: (..nums) => if nums.pos().len()==1 { "Annexe " + numbering("A.", ..nums)} else {numbering("A.1.a.", ..nums)}
)
context counter(heading).update(0)
pagebreak(weak: true)
doc
}
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/teig/0.1.0/README.md | markdown | Apache License 2.0 | # teig
This package provides an `eigenvalue` function that calculates the eigenvalues of a matrix.
```typst
#import "@preview/teig:0.1.0": eigenvalues
#let data = (
(1, 2, 3),
(4, 5, 6),
(7, 8, 9),
)
#let evals = eigenvalues(data)
The eigenvalues of
$
#math.mat(..data)
$
are approximately
$
#math.vec(..evals.map(x => str(calc.round(x, digits: 3))))
$
```
 |
https://github.com/katamyra/Notes | https://raw.githubusercontent.com/katamyra/Notes/main/Compiled%20School%20Notes/CS1332/Modules/PatternMatching.typ | typst | #import "../../../template.typ": *
= Pattern Matching
== Boyer Moore Algorithm
#definition[
*Last Occurrence Table*: records the index of the last occurrence of the letter.
We store it in a pair \<letter, index> in a hashmap, and letters not in the alphabet of the pattern as marked as null, or returned as -1 in the functionality
]
*Boyer Moore Last Table(pattern)*
```java
m = pattern.length
last = HashMap<character, index>
for all i from 0 to m-1
last = put(pattern[i], i)
end for
return last
```
#theorem[
*Actual Search Algorithm*
+ Create the LSOT to optimize shifts past mismatches
+ Move right to left in pattern
+ If there is a match, continue comparing text and pattern
+ If there is a mismatch, look to see if text character is in the alphabet
- If the char is in the alphabet, align them
- If the char is not in the alphabet, then shift past mismatched area algotegher
]
|
|
https://github.com/alexanderkoller/typst-blinky | https://raw.githubusercontent.com/alexanderkoller/typst-blinky/main/examples/main.typ | typst | MIT License | #import "@preview/blinky:0.1.0": link-bib-urls
#let darkblue = rgb("000099")
#show cite: set text(fill: darkblue)
#show link: set text(fill: darkblue)
= Introduction
Let's cite:
- a conference paper (bibtex entry has both DOI and URL, DOI wins): @bender-koller-2020-climbing
- a journal paper (bibtex entry only has a DOI): @kuhlmann-etal-2015-lexicalization
- a book (bibtex entry only has an URL): @GareyJohnsonBook
- a "Misc" Arxiv paper (bibtex entry only has URL): @yao2023predictinggeneralizationperformancecorrectness
#let bibsrc = read("custom.bib")
#link-bib-urls(bibsrc)[
#bibliography("custom.bib", style: "./association-for-computational-linguistics-blinky.csl")
]
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/t4t/0.1.0/assert.typ | typst | Apache License 2.0 | // Asserts
#import "alias.typ"
#let that = assert
#let eq = assert.eq
#let ne = assert.ne
#let neq = assert.ne
#let not-none = assert.ne.with(none)
#let any( ..values, value, message:"" ) = assert(value in values.pos(), message:message)
#let not-any( ..values, value, message:"" ) = assert(value not in values.pos(), message:message)
#let any-type( ..types, value, message:"") = assert(type(value) in types.pos(), message:message)
#let not-any-type( ..types, value, message:"" ) = assert(type(value) not in types.pos(), message:message)
#let all-of-type( t, ..values, message:"") = assert(values.pos().all((v) => alias.type(v) == t), message:message)
#let none-of-type( t, ..values, message:"") = assert(values.pos().all((v) => alias.type(v) != t), message:message)
#let not-empty( value, message:"" ) = {
if type(value) == "array" {
assert.ne(value, (), message:message)
} else if type(value) == "dictionary" {
assert.ne(value, (:), message:message)
} else if type(value) == "string" {
assert.ne(value, "", message:message)
} else {
assert.ne(value, none, message:message)
}
}
#let new( test ) = (v, message:"") => assert(test(v), message:message)
|
https://github.com/mem-courses/calculus | https://raw.githubusercontent.com/mem-courses/calculus/main/homework-1/calculus-homework9.typ | typst | #import "../template.typ": *
#show: project.with(
title: "Calculus Homework #9",
authors: ((
name: "<NAME> (#47)",
email: "<EMAIL>",
phone: "3230104585"
),),
date: "November 28, 2023",
)
= P149 习题3-3 2(1)
#prob[写出 $f(x) = arcsin x$ 在 $x=0$ 处的三阶泰勒公式.]
$ f(x) = x + R_3(x) $
其中 $R_3 (x) = display((2x^2-1)/((1+x^2)^(5/2)))$.
= P150 习题3-3 2(3)
#prob[写出 $f(x) = sin x$ 在 $x=display(pi/2)$ 处的 $2n$ 阶泰勒公式.]
$ f(x)
&= f(pi/2) + sum_(i=1)^(2n) (f^((i))(pi/2))/(i!) x^i + R_(2n) (x)\
&= f(pi/2) + sum_(i=1)^(2n) (sin((i+1)/2 pi))/(i!) x^i + R_(2n) (x)\
&= sum_(i=0)^(n) ((-1)^i)/((2i)!) x^(2i) + R_(2n) (x)
$
其中 $display(R_(2n) (x) = sin(x+(2n+1)/2 pi)/((2n+1)!) (x-pi/2)^(n+1) = ((-1)^n cos(x))/((2n+1)!) (x-pi/2)^(n+1))$.
= P150 习题3-3 4
#prob[
利用基本函数的泰勒公式将下列函数展开成具有佩亚诺余项的泰勒公式:
(1) $f(x) = e^x$,在 $x=1$ 处,$n$ 阶.
]
$ f(x) = e + e (x-1) + e/2! (x-1)^2 + dots.c + e/n! (x-1)^n + o((x-1)^n) $
#prob[(2) $display(f(x) = ln (1-x)/(1+x))$,在 $x=0$ 处,$2n$ 阶.]
$
f(x)
&= ln(1-x) - ln(1+x)\
&= sum_(i=1)^(2n) (((-x)^i (-1)^(i+1))/i + (x^i (-1)^(i+1))/i) + o(x^(2n+1))\
&= - sum_(i=1)^n (x^(2i))/i + o(x^(2n+1))
$
= P150 习题3-3 5(1)
#prob[利用泰勒公式近似地计算 $root(5,250)$,并估计误差.]
TBD
= P150 习题3-3 5(3)
#prob[利用泰勒公式近似地计算 $sin 18 degree$,并估计误差.]
$
sin x = x - x^3/(3!) + R_3(x)
$
所以 $display(sin 18 degree approx pi/10 - 1/6(pi/10)^3 approx 0.309)$,其中误差 $|R_3(x)| <= display(1/24 (pi/10)^4) = 4.1 times 10^(-4)$.
= P150 习题3-3 7(1)
#prob[利用泰勒公式求极限:$ lim_(x->+oo) x^(3/2) (sqrt(x+1) + sqrt(x-1) - 2 sqrt(x)) $]
$ "原式"
&= lim_(x->+oo) x^2 (sqrt(1+1/x) + sqrt(1-1/x) - 2)\
&= lim_(x->0) (sqrt(1+x) + sqrt(1-x) - 2)/(x^2)\
&= lim_(x->0) ((1+1/2 x-1/8 x^2 + o(x^2)) + (1 + 1/2(-x) - 1/8 (-x)^2 + o(x^2)) - 2)/x^2\
&= lim_(x->0) (-1/4 x^2)/x^2 = -1/4
$
= P150 习题3-3 7(3)
#prob[利用泰勒公式求极限:$ lim_(x->+oo) [(x^3-x^2+x/2) e^(1/x) - sqrt(x^6+1)] $]
= P150 习题3-3 9
#prob[
设 $f(x)$ 在 $[0,1]$ 上具有二阶导数,且满足条件 $|f(x)|<=a$,$|f''(x)|<=b$,其中 $a,b$ 都是非负数,$c$ 是 $(0,1)$ 内任意一点.
(1) 写出 $f(x)$ 在 $x=c$ 处带有拉格朗日余项的一阶泰勒公式;
]
$
f(x) = f(c) + f'(c) (x-c) + R_1(x)
$
其中 $R_1(x) = display(f''(xi) (x-c)^2)$,$xi$ 在 $x$ 到 $c$ 之间.
#prob[(2) 证明:$display(|f'(c)|<=2a + b/2)$.]
= P129 习题3-1 12(1)
#prob[讨论 $y=3x^4 - 4x^3 + 1$ 的单调性.]
$y' = 12 x^3 - 12 x^2 = 12 x^2(x-1)$.令 $y'=0$,得 $x=0,sp x=1$.
故 $y$ 的单调增区间为 $(1,+oo)$,单调减区间为 $(-oo,1)$.
= P129 习题3-1 12(4)
#prob[讨论 $y=x-ln(1+x)$ 的单调性.]
$y' = 1-display(1/(1+x))=display(x/(1+x))$.令 $y'=0$,得 $x=0$;且当 $x=-1$ 时导数不存在.
故 $y$ 的单调增区间为 $(-oo,-1),(0,+oo)$,单调减区间为 $(-1,0)$.
= P129 习题3-1 13(3)
#prof[求函数 $y=x+e^(-x)$ 的极值.]
$y' = 1-e^(-x)$.令 $y'=0$ 得 $x=0$,所以当 $x=0$ 时原函数取到极小值 $1$.
= P129 习题3-1 13(6)
#prof[求函数 $y=(x+1)^(2/3)(x-2)^2$ 的极值.]
$ y'
&= 2/3 (x+1)^(-1/3) (x-2)^2 + 2(x+1)^(2/3) (x-2)\
&= (x-2)/root(3,x+1) (2/3(x-2) + 2(x+1))\
&= (2(x-2)(4x+1))/(3 root(3,x+1))
$
令 $y'=0$,得到 $x=2,sp x=-display(1/4)$ 且当 $x=-1$ 时 $y'$ 不存在.
故 $y$ 的单调增区间为 $(-1,-display(1/4)),(2,+oo)$,单调减区间为 $(-oo,-1),(-display(1/4),2)$.
= P130 习题3-1 16(2)
#prof[证明不等式:当 $x>0$ 时,$display(x-x^2/2 < ln(1+x) < x)$.]
根据泰勒中值定理,有
$
ln(1+x) = x-(xi_1^2)/2 = x-x^2/2+(xi_2^3)/3 quad quad (xi_1,xi_2 in (0,x))
$
所以
$
xi_1>0 => -(xi_1^2)/2<0 => ln(1+x) < x\
xi_2>0 => (xi_2^3)/3>0 => ln(1+x) > x-x^2/2
$
原不等式得证.
= P130 习题3-1 16(4)
#prof[证明不等式:当 $x>0$ 时,$display(2/(2x+1) < ln(1+1/x) < 1/sqrt(x^2+x))$.]
令 $u=display(1/x)+1>1$,原命题等价于证明:
$
(2(u-1))/(u+1) < ln(u) < (u-1)/sqrt(u)
$
令 $g(u) = ln(u) - display((2(u-1))/(u+1)),sp h(u) = ln(u) - display((u-1)/sqrt(u))$.我们有
$
g'(u) &= 1/u - 4/(u+1)^2 = ((u-1)^2)/(u(u+1)^2) > 0\
h'(u) &= 1/u - (sqrt(u) - (u-1)/(2sqrt(u)))/u = (2sqrt(u) - u - 1)/(2u sqrt(u)) = - (1+sqrt(u))^2/(2u sqrt(u)) < 0\
$
所以 $g(u)$ 和 $h(u)$ 分别在 $(1,+oo)$ 上单调递增、单调递减.故而当 $u>1$ 时:
$
ln(u) - display((2(u-1))/(u+1)) > g(0) &= 0\
ln(u) - display((u-1)/sqrt(u)) < h(0) &= 0\
$
即原不等式得证.
= P130 习题3-1 19
#prof[
设 $f(x)$ 在 $[0,1]$ 上连续,$(0,1)$ 内二阶可导,过点 $A(0,f(0))$ 与 $B(1,f(1))$ 的直线与曲线 $y=f(x)$ 相交于点 $(c,f(c))$,其中 $0<c<1$.证明:在 $(0,1)$ 内至少存在一点 $xi$,使 $f''(xi) = 0$.
]
构造函数:$ g(x) = f(x) - (f(1)-f(0))x - f(0) $
由已知,$g(0) = g(1) = g(c) = 0$.由于 $f(x)$ 在 $[0,1]$ 上连续且在 $(0,1)$ 内可导,故 $g(x)$ 也在 $[0,1]$ 上连续且在 $(0,1)$ 内可导.根据罗尔定理,必存在 $0<a<c<b<1$ 使得 $g'(a) = g'(b) = 0$.
容易证明 $g'(x)$ 也满足在 $[0,1]$ 上连续,且 $g''(x) = f''(x)$ 在 $(0,1)$ 内也存在,故根据罗尔定理必存在 $xi in (a,b)$ 使得 $g''(xi) = f''(xi) = 0$.
即在 $(0,1)$ 内必存在一点 $xi$ 使得 $f''(xi) = 0$.
= P187 第三章综合题 17
#prob[试证:当 $x>0$ 时,$(x^2-1) ln x >= (x-1)^2$.]
令 $t=x-1$,则原命题等价于证明 $t>-1$ 时 $t(t+2) ln (1+t) >= t^2$.
当 $t=0$ 时,不等式左右均为零,不等号显然成立;
当 $t>0$ 即 $x>1$ 时,只需证:
$
(t+2) ln (1+t) >= t <=> ln(1+t) - t/(t+2) >= 0
$
令 $g(t) = display(ln(1+t) - t/(t+2))$,有
$
g'(t)
= 1/(1+t) - ((t+2)-t)/((t+2)^2)
= ((t+2)^2-2(1+t))/((t+1)(t+2)^2)
= ((t+1)^2 + 1)/((t+1)(t+2)^2)
$
$g'(t) > 0$ 在 $(-1,+oo)$ 上成立,即 $g(t)$ 在 $(-1,+oo)$ 上单调增.
由于 $g(0) = ln 1 - 0 = 0$,所以 $forall t in (0,+oo)$,有 $g(t) > g(0) = 0$,不等号成立.
当 $t<0$ 即 $x<1$ 时,只需证:
$
(t+2) ln (1+t) >= t <=> ln(1+t) - t/(t+2) <= 0
$
根据前面的结论,$forall t in (-1,0)$,有 $g(t)<g(0)=0$,不等号成立.
综上,原不等式得证. |
|
https://github.com/jgm/typst-hs | https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/math/matrix-alignment-00.typ | typst | Other | // Test alternating alignment in a vector.
$ vec(
"a" & "a a a" & "a a",
"a a" & "a a" & "a",
"a a a" & "a" & "a a a",
) $
|
https://github.com/pku-typst/unilab | https://raw.githubusercontent.com/pku-typst/unilab/main/README.md | markdown | MIT License | # unilab
Typst Lab Report Template
## Local debugging
clone this repo into the [local package directory](https://github.com/typst/packages?tab=readme-ov-file#local-packages), notice that the version should be specified (e.g. `.../unilab/0.0.1/`)
## TODO
- [ ] en font support
- [ ] support school logo
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/cetz/0.3.0/src/polygon.typ | typst | Apache License 2.0 | #import "/src/vector.typ"
/// Returns a list of polygon points from
/// a list of segments.
///
/// Cubic segments get linearized by sampling.
///
/// - segment (array): List of segments
/// - samples (int): Number of samples
/// -> array
#let from-segments(segments, samples: 10) = {
import "/src/bezier.typ": cubic-point
let poly = ()
for ((kind, ..pts)) in segments {
if kind == "cubic" {
poly += range(0, samples).map(t => {
cubic-point(..pts, t / (samples - 1))
})
} else {
poly += pts
}
}
return poly
}
/// Computes the signed area of a 2D polygon.
///
/// The formula used is the following:
/// $ 1/2 \sum_{i}=0^{n-1} x_i*y_i+1 - x_i+1*y_i $
///
/// - points (array): List of Vectors of dimension >= 2
/// -> float
#let signed-area(points) = {
let a = 0
let n = points.len()
let (cx, cy) = (0, 0)
for i in range(0, n) {
let (x0, y0, ..) = points.at(i)
let (x1, y1, ..) = points.at(calc.rem(i + 1, n))
cx += (x0 + x1) * (x0 * y1 - x1 * y0)
cy += (y0 + y1) * (x0 * y1 - x1 * y0)
a += x0 * y1 - x1 * y0
}
return .5 * a
}
/// Returns the winding order of a 2D polygon
/// by using it's signed area.
///
/// Returns either "ccw" (counter clock-wise) or "cw" (clock-wise) or none.
///
/// - point (array): List of polygon points
/// -> str,none
#let winding-order(points) = {
let area = signed-area(points)
if area > 0 {
"cw"
} else if area < 0 {
"ccw"
} else {
none
}
}
// Calculate triangle centroid
#let triangle-centroid(points) = {
assert.eq(points.len(), 3)
let (mx, my, mz) = (0, 0, 0)
for p in points {
let (x, y, z) = p
mx += x
my += y
mz += z
}
return (mx / 3, my / 3, mz / 3)
}
// Calculate the centroid of a line, triangle or simple polygon
// Formulas:
// https://en.wikipedia.org/wiki/Centroid
#let simple-centroid(points) = {
return if points.len() <= 1 {
none
} else if points.len() == 2 {
vector.lerp(..points, .5)
} else if points.len() == 3 {
triangle-centroid(points)
} else if points.len() >= 3 {
// Skip polygons with multiple z values
let z = points.first().at(2, default: 0)
if points.any(p => p.at(2) != z) {
return none
}
let a = 0
let n = points.len()
let (cx, cy) = (0, 0)
for i in range(0, n) {
let (x0, y0, ..) = points.at(i)
let (x1, y1, ..) = points.at(calc.rem(i + 1, n))
cx += (x0 + x1) * (x0 * y1 - x1 * y0)
cy += (y0 + y1) * (x0 * y1 - x1 * y0)
a += x0 * y1 - x1 * y0
}
return (cx/(3*a), cy/(3*a), z)
}
}
|
https://github.com/yichenchong/game_theory_cw1 | https://raw.githubusercontent.com/yichenchong/game_theory_cw1/main/report/README.md | markdown | # Report
This is the final paper from the coursework.
The paper is written in [Typst](https://typst.app) (a great modern alternative to LaTeX), using the AMS Maths template. |
|
https://github.com/adamijak/typst-template | https://raw.githubusercontent.com/adamijak/typst-template/main/file.typ | typst | MIT License | = #lorem(3)
#lorem(96)
#figure(
image("./mmd/flow.mmd.png", width: 50%),
caption: [
Mermaid diagram
],
)
|
https://github.com/jgm/typst-hs | https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compute/calc-16.typ | typst | Other | // Test the `calc` function.
#test(calc.pow(10, 0), 1)
#test(calc.pow(2, 4), 16)
|
https://github.com/kyanbasu/pwr-typst | https://raw.githubusercontent.com/kyanbasu/pwr-typst/main/README.md | markdown | MIT License | # pwr-typst
This repository contains tools for working with [typst](https://typst.app/) and extensions/templates specifically made for PWr (Wrocław University of Science and Technology).
# Utilities for working with typst
[Excel copy-paste to typst table](https://kyanbasu.github.io/pwr-typst/)
|
https://github.com/typst-doc-cn/tutorial | https://raw.githubusercontent.com/typst-doc-cn/tutorial/main/src/basic/scripting-scope-and-style.typ | typst | Apache License 2.0 | #import "mod.typ": *
#show: book.page.with(title: "内容与样式")
这是本章的最后一节。经过两节稍显枯燥的脚本教程,我们继续回到排版本身。
在#(refs.writing-markup)[《初识标记模式》]中,我们学到了很多各式各样的内容。我们学到了段落、标题、代码片段......
接着我们又花费了三节的篇幅,讲授了各式各样的脚本技巧。我们学到了字面量、变量、闭包......
但是它们之间似乎隔有一层厚障壁,阻止了我们进行更高级的排版。是了,如果「内容」也是一种值,那么我们应该也可以更随心所欲地使用脚本操控它们。Typst以排版为核心,应当也对「内容类型」有着精心设计。
本节主要介绍如何使用脚本排版内容。这也是Typst的核心功能,并在语法上*与很多其他语言有着不同之处*。不用担心,在我们已经学了很多Typst语言的知识的基础上,本节也仅仅更进一步,教你如何真正以脚本视角看待一篇文档。
== 内容类型 <content-type-feature>
我们已经学过很多元素:段落、标题、代码片段等。这些元素在被创建后都会被包装成为一种被称为「内容」的值。这些值所具有的类型便被称为「内容类型」。同时「内容类型」提供了一组公共方法访问元素本身。
乍一听,内容就像是一个“容器”将元素包裹。但内容又不太像是之前所学过的数组或字典那样的复合字面量,或者说这样不方便理解。事实上,每个元素都有各自的特点,但仅仅为了保持动态性,所有的元素都被硬凑在一起,共享一种类型。有两种理解这种类型的视角:从表象论,「内容类型」是一种鸭子类型;从原理论,「内容类型」提供了操控内容的公共方法,即它是一种接口,或称特征(Trait)。
=== 特性一:元素包装于「内容」
我们知道所有的元素语法都可以等价使用相应的函数构造。例如标题:
#code(```typ
#repr([= 123]) \ // 语法构造
#repr(heading(depth: 1)[123]) // 函数构造
```)
一个常见的误区是误认为元素继承自「内容类型」,进而使用以下方法判断一个内容是否为标题元素:
#code(```typ
标题是heading类型(伪)?#(type([= 123]) == heading)
```)
但两者类型并不一样。事实上,元素是「函数类型」,元素函数的返回值为「内容类型」。
#code(```typ
标题函数的类型:#(type(heading)) \
标题的类型:#type([= 123])
```)
这引出了一个重要的理念,Typst中一切皆组合。Typst中目前没有继承概念,一切功能都是组合出来的,这类似于Rust语言的概念。你可能没有学过Rust语言,但这里有一个冷知识:
#align(center, [Typst $<=>$ Typ(setting Ru)st $<=>$ Typesetting Rust])
即Typst是以Rust语言特性为基础设计出的一个排版(Typesetting)语言。
当各式各样的元素函数接受参数时,它们会构造出「元素」,然后将元素包装成一个共同的类型:「内容类型」。`heading`是函数而不是类型。与其他语言不同,没有一个`heading`类型继承`content`。因此不能使用`type([= 123]) == heading`判断一个内容是否为标题元素。
=== 特性二:内容类型的`func`方法
所有内容都允许使用`func`得到构造这个内容所使用的函数。因此,可以使用以下方法判断一个内容是否为标题元素:
#code(```typ
标题所使用的构造函数:#([= 123]).func()
标题的构造函数是`heading`?#(([= 123]).func() == heading)
```)
// 这一段不要了
// === 特性二点五:内容类型的`func`方法可以直接拿来用
// `func`方法返回的就是函数本身,自然也可以拿来使用:
// #code(```typ
// 重新构造标题:#(([= 123]).func())([456])
// ```)
// 这一般没什么用,但是有的时候可以用于得到一些Typst没有暴露出来的内容函数,例如`styled`。
// #code(```typ
// #let type_styled = text(fill: red, "").func()
// #let st = text(fill: blue, "").styles
// #text([abc], st)
// ```)
=== 特性三:内容类型的`fields`方法
Typst中一切皆组合,它将所有内容打包成「内容类型」的值以完成类型上的统一,而非类型继承。
但是这也有坏处,坏处是无法“透明”访问内部内容。例如,我们可能希望知道`heading`的级别。如果不提供任何方法访问标题的级别,那么我们就无法编程完成与之相关的排版。
为了解决这个问题,Typst提供一个`fields`方法提供一个content的部分信息:
#code(```typ
#([= 123]).fields()
```)
`fields()`将部分信息组成字典并返回。如上图所示,我们可以通过这个字典对象进一步访问标题的内容和级别。
#code(```typ
#([= 123]).fields().at("depth")
```)
#pro-tip[
这里的“部分信息”描述稍显模糊。具体来说,Typst只允许你直接访问元素中不受样式影响的信息,至少包含语法属性,而不允许你*直接*访问元素的样式。
// 如下:
// #code.with(al: top)(````typ
// #let x = [= 123]
// #rect([#x <the-heading>])
// #x.fields() \
// #locate(loc => query(<the-heading>, loc))
// ````)
]
=== 特性四:内容类型与`fields`相关的糖 <grammar-content-member-exp>
由于我们经常需要与`fields`交互,Typst提供了`has`方法帮助我们判断一个内容的`fields`是否有相关的「键」。
#code(```typ
使用`... in x.fields()`判断:#("text" in `x`.fields()) \
等同于使用`has`方法判断:#(`x`.has("text"))
```)
Typst提供了`at`方法帮助我们访问一个内容的`fields`中键对应的值。
#code(```typ
使用`x.fields().at()`获取值:#(`www`.fields().at("text")) \
等同于使用`at`方法:#(`www`.at("text"))
```)
特别地,内容的成员包含`fields`的键,我们可以直接通过成员访问相关信息:
#code(```typ
使用`at`方法:#(`www`.at("text")) \
等同于访问`text`成员:#(`www`.text)
```)
== 内容的「样式」
我们接下来循着文本样式的脉络学习排版内容的语法。
重点1:文本是段落的重要组成部分,与之对应的内容函数是`text`。
我们知道一个函数可以有各种参数。那么我们从函数视角来看,内容的样式便由创建时参数的内容决定。
例如,我们想要获得一段蓝色的文本:
#code(```typ
#text("一段文本", fill: blue)
```)
`fill: blue`是函数的参数,指定了文本的样式。
这个视角有助于我们更好的将对「样式」的需求转换为对函数的操控。
例如,我们可以使用函数的`with`方法,获得一个固定样式的文本函数:
#code(```typ
#let warning = text.with(fill: orange)
#warning[警告,你做个人吧]
```)
== 上下文有关表达式
// contextual expression
在介绍重要语法之前,我们先来一道开胃菜。
#code(```typ
#context text.size
```)
== 「`set`」语法 <grammar-set>
重点2:一个段落主要是一个内容序列,其中有可能很多个文本。
#code(```typ
#repr([不止包含......一个文本!])
```)
假设我们想要让一整个段落都显示成蓝色,显然不能将文本一个个用`text.with(fill: blue)`构造好再组装起来。
这个时候「`set`」语法出手了。「`set`」关键字后可以跟随一个函数调用,为影响范围内所有函数关联的对应内容设置对应参数。
#code(```typ
#set text(fill: blue)
一段很长的话可能不止包含......一个文本!
- 似乎,列表中也有文本。
```)
重点:「`set`」的影响范围是其所在「作用域」内的后续内容。
我们紧接着来讲与之相关的,Typst中最重要的概念之一:「作用域」。
== 「作用域」 <grammar-scope>
// 内容块与代码块没有什么不同。
「作用域」是一个非常抽象的概念。但是理解他也并不困难。我们需要记住一件事,那就是每个「代码块」创建了一个单独的「作用域」:
#code(```typ
两只#{
[兔]
set text(rgb("#ffd1dc").darken(15%))
{
[兔白]
set text(orange)
[又白]
}
[,真可爱]
}
```)
从上面的染色结果来看,粉色规则可以染色到`[兔白]`、`[又白]`和`[真可爱]`,橘色规则可以染色到`[又白]`但不能染色到[,真可爱]。以内容的视角来看:
1. `[兔]`不是相对粉色规则的后续内容,更不是相对橘色规则的后续内容,所以它默认是黑色。
2. `[兔白]`是相对粉色规则的后续内容,所以它是粉色。
3. `[又白]`同时被两个规则影响,但是根据「执行顺序」,橘色规则被优先使用。
4. `[真可爱]`虽然从代码先后顺序来看在橘色规则后面,但不在橘色规则所在作用域内,不满足「`set`」影响范围的设定。
我们说「`set`」的影响范围是其所在「作用域」内的后续内容,意思是:对于每个「代码块」,「`set`」规则只影响到从它自身语句开始,到该「代码块」的结束位置。
接下来,我们回忆:「内容块」和「代码块」没有什么不同。上述例子还可以以「内容块」的语法改写成:
#code(```typ
两只#[兔#set text(fill: rgb("#ffd1dc").darken(15%))
#[兔白#set text(fill: orange)
又白],真可爱
]
```)
由于断行问题,这不方便阅读,但从结果来看,它们确实是等价的。
最后我们再回忆:文件本身是一个「内容块」。
#code(```typ
两小只,#set text(fill: orange)
真可爱
```)
针对文件,我们仍重申一遍「`set`」的影响范围。其影响等价于:对于文件本身,*顶层*「`set`」规则影响到该文件的结束位置。
#pro-tip[
也就是说,`include`文件内部的样式不会影响到外部的样式。
]
== 变量的可变性
理解「作用域」对理解变量的可变性有帮助。这原本是上一节的内容,但是前置知识包含「作用域」,故在此介绍。
话说Typst对内置实现的所有函数都有良好的自我管理,但总免不了用户打算写一些逆天的函数。为了保证缓存计算仍较为有效,Typst强制要求用户编写的*所有函数*都是纯函数。这允许Typst有效地缓存计算,在相当一部分文档的编译速度上,快过LaTeX等语言上百倍。
你可能不知道所谓的纯函数是为何物,本书也不打算讲解什么是纯函数。关键点是,涉及函数的*纯性*,就涉及到变量的可变性。
所谓变量的可变性是指,你可以任意改变一个变量的内容,也就是说一个变量默认是可变的:
#code(```typ
#let a = 1; #let b = 2;
#((a, b) = (b, a)); #a, #b \
#for i in range(10) { a += i }; #a, #b
```)
但是,一个函数的函数体表达式不允许涉及到函数体外的变量修改:
#code(
```typ
#let a = 1;
#let f() = (a += 1);
#f()
```,
res: [#text(red, [error]): variables from outside the function are read-only and cannot be modified],
)
这是因为纯函数不允许产生带有副作用的操作。
同时,传递进函数的数组和字典参数都会被拷贝。这将导致对参数数组或参数字典的修改不会影响外部变量的内容:
#code(```typ
#let a = (1, ); #a \ // 初始值
#let add-array(a) = (a += (2, ));
#add-array(a); #a \ // 函数调用无法修改变量
#(a += (2, )); #a \ // 实际期望的效果
```)
#pro-tip[
准确地来说,数组和字典参数会被写时拷贝。所谓写时拷贝,即只有当你期望修改数组和字典参数时,拷贝才会随即发生。
]
为了“修改”外部变量,你必须将修改过的变量设法传出函数,并在外部更新外部变量。
#code(```typ
#let a = (1, ); #a \ // 初始值
#let add-array(a) = { a.push(2); a };
#(a = add-array(a)); #a \ // 返回值更新数组
```)
#pro-tip[
一个函数是纯的,如果:
+ 对于所有相同参数,返回相同的结果。
+ 函数没有副作用,即局部静态变量、非局部变量、可变引用参数或输入/输出流等状态不会发生变化。
本节所讲述的内容是对第二点要求的体现。
]
== 「`set if`」语法 <grammar-set-if>
回到「set」语法的话题。假设我们脚本中设置了当前文档是否处于暗黑主题,并希望使用「`set`」规则感知这个设定,你可能会写:
#code(```typ
#let is-dark-theme = true
#if is-dark-theme {
set rect(fill: black)
set text(fill: white)
}
#rect([wink!])
```)
根据我们的知识,这应该不起作用,因为`if`后的代码块创建了一个新的作用域,而「`set`」规则只能影响到该代码块内后续的代码。但是`if`的`then`和`else`一定需要创建一个新的作用域,这有点难办了。
`set if`语法出手了,它允许你在当前作用域设置规则。
#code(```typ
#let is-dark-theme = true
#set rect(fill: black) if is-dark-theme
#set text(fill: white) if is-dark-theme
#rect([wink!])
```)
解读`#set rect(fill: black) if is-dark-theme`。它的意思是,如果满足`is-dark-theme`条件,那么设置相关规则。这其实与下面代码“感觉”一样。
#code(```typ
#let is-dark-theme = true
#if is-dark-theme {
set rect(fill: black)
}
#rect([wink!])
```)
区别仅仅在`set if`语法确实从语法上没有新建一个作用域。这就好像一个“规则怪谈”:如果你想要让「`set`」规则影响到对应的内容,就想方设法满足「`set`」影响范围的要求。
== 「内容」是一棵树
重点3:「内容」是一棵树,这意味着你可以“攀树而行”。
Typst对代码块有着的一系列语法设计,让代码块非常适合描述内容。又由于作用域的性质,最终代码块让「内容」形成为一颗树。
「内容」是一棵树。一个`main.typ`就是「内容」的一再嵌套。即便不使用任何标记语法,你也可以创建一个文档:
#code.with(al: top)(```typ
#let main-typ() = {
heading("生活在Content树上")
{
[现代社会以海德格尔的一句]
[“一切实践传统都已经瓦解完了”]
[为嚆矢。]
} + parbreak()
[...] + parbreak()
[在孜孜矻矻以求生活意义的道路上,对自己的期望本就是在与家庭与社会对接中塑型的动态过程。]
[而我们的底料便是对不同生活方式、不同角色的觉感与体认。]
[...]
}
#main-typ()
```)
// == 「样式链」
// 理解「作用域」对
== `plain-text`,以及递归函数
如果我们想要实现一个函数`plain-text`,它将一段文本转换为字符串。它便可以在树上递归遍历:
```typ
#let plain-text(it) = if it.has("text") {
it.text
} else if it.has("children") {
("", ..it.children.map(plain-text)).join()
} else if it.has("child") {
plain-text(it.child)
} else { ... }
```
所谓递归是一种特殊的函数实现技巧:
- 递归总有一个不调用其自身的分支,称其为递归基。这里递归基就是返回`it.text`的分支。
- 函数体中包含它自身的函数调用。例如,`plain-text(it.child)`便再度调用了自身。
这个函数充分利用了内容类型的特性实现了遍历。首先它使用了`has`函数检查内容的成员。
如果一个内容有孩子,那么对其每个孩子都继续调用`plain-text`函数并组合在一起:
```typ
#if it.has("children") { ("", ..it.children.map(plain-text)).join() }
#if it.has("child") { plain-text(it.child) }
```
限于篇幅,我们没有提供`plain-text`的完整实现,你可以试着在课后完成。
== 鸭子类型
这里值得注意的是,`it.text`具有多态行为。即便没有继承,这里通过一定动态特性,允许我们同时访问「代码片段」的`text`和「文本」的text。例如:
#code(```typ
#let plain-mini(it) = if it.has("text") { it.text }
#repr(plain-mini(`代码片段中的text`)) \
#repr(plain-mini([文本中的text]))
```)
这也便是我们在「内容类型」小节所述的鸭子类型特性。如果「内容」长得像文本(鸭子),那么它就是文本。
== 「内容」是一棵树(Cont.)
#pro-tip[
利用「内容」与「树」的特性,我们可以在Typst中设计出更多优雅的脚本功能。
]
=== CeTZ的「树」
CeTZ利用内容树制作“内嵌的DSL”。CeTZ的`canvas`函数接收的不完全是内容,而是内容与其IR的混合。
例如它的`line`函数的返回值,就完全不是一个内容,而是一个无法窥视的函数。
#code(```typ
#import "@preview/cetz:0.2.0"
#repr(cetz.draw.line((0, 0), (1, 1), fill: blue))
```)
当你产生一个“混合”的内容并将其传递给`cetz.canvas`,CeTZ就会像`plain-text`一样遍历你的混合内容,并加以区分和处理。如果遇到了他自己特定的IR,例如`cetz.draw.line`,便将其以特殊的方式转换为真正的「内容」。
使用混合语言,在Typst中可以很优雅地画多面体:
#code.with(al: top)(```typ
#import "@preview/cetz:0.2.0"
#align(center, cetz.canvas({
// 导入cetz的draw方言
import cetz.draw: *; import cetz.vector: add
let neg(u) = if u == 0 { 1 } else { -1 }
for (p, c) in (
((0, 0, 0), black), ((1, 1, 0), red), ((1, 0, 1), blue), ((0, 1, 1), green),
) {
line(add(p, (0, 0, neg(p.at(2)))), p, stroke: c)
line(add(p, (0, neg(p.at(1)), 0)), p, stroke: c)
line(add(p, (neg(p.at(0)), 0, 0)), p, stroke: c)
}
}))
```)
=== PNG.typ的树
我们知道「内容块」与「代码块」没有什么本质区别。
如果我们可以基于「代码块」描述一棵「内容」的树,那么一张PNG格式的图片似乎也可以被描述为一棵「字节」的树。
通过代码块语法,你可以在Typst中拼接字节,依像素地创建一张PNG格式的图片:
#code.with(al: top)(```typ
// Origin: https://typst.app/project/r0SkRmsZYIYNxjs6Q712aP
#import "png.typ": *
#let prelude = (0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A)
#let ihdr(w, h) = chunk("IHDR", be32(w) + be32(h) + (8, 2, 0, 0, 0))
#let idat(lines) = chunk("IDAT", {
let data = lines.map(line => (0x00,) + line).flatten()
let len = le32(data.len()).slice(0, 2)
(0x08, 0x1D, 0x01); len; len.map(xor.with(0xFF)); data; be32(adler32(data))
})
#align(center, box(width: 25%, image.decode(bytes({
let (w, h) = (8, 8)
prelude; ihdr(w, h); idat( for y in range(h) {( for x in range(w) {
(calc.floor(256 * x / w), 128, calc.floor(256 * y / h))
}, )} ); chunk("IEND", ())
}))))
```)
== 「`show`」语法 <grammar-show>
「`set`」语法是「`show set`」语法的简写。因此,「`show`」语法显然可以比`set`更强大。<grammar-show-set>
#code(```typ
#show: set text(fill: blue)
wink!
```)
我们可以看到「`show`」语法由两部分组成,由冒号分隔。
`show`的右半部分是一个函数,表示选择文档的一部分以作修改。
#pro-tip[
你可能会问,先姑且不问函数要怎么写,难道`set text(fill: blue)`也能算一个函数吗?
事实上,`set`规则是「内容类型」,它接受一个样式和一个内容,返回一个`styled`内容:
#code(```typ
#let x = [#set text(fill: blue)]
#x.func()
```)
以下使用方法非常黑客,请最好不要在你的文档中包含这种代码。仅用于理解:
#code(```typ
#let styled = [#set text(blue)].func()
#let styles = text("", red).styles
#styled([Red Text], styles)
```)
1. 第一行代码,我们说`func`方法返回内容函数本身,这里便返回了一个内部的函数`styled`。
2. 第二行代码,这里我们从`text`内容上找到了它关于设置红色文本的样式(参数)。
3. 第三行代码,把一个内容及一个无论如何从某处得到了的样式传递给`styled`函数。
4. 最终我们构造出了一个真实的红色文本。
]
`show`的左半部分是选择器,表示选择文档的一部分以作修改。它作用于「作用域」内的*后续*所有*被选择器选中*的内容。
如果选择器为空,则默认选择*后续所有*内容。这也是「`set`」语法对应规则的原理。如果选择器不为空,那么因为我们还没讲解选择器,所以这里不作过多讲解。
但有一种选择器比较简单易懂。我们可以将内容函数作为选择器,选择相应内容作影响。
以下脚本设置所有代码片段的颜色:
#code(```typ
#show raw: set text(fill: blue)
被秀了的`代码片段`!
```)
以下脚本设置所有数学公式的颜色,但同时也修改代码片段的颜色:
// todo: ugly code
#code(```typ
#show raw: set text(red)
#show math.equation: set text(blue)
#let dif2(x) = math.op(math.Delta + $x$)
一个公式:$ sum_(f in S(x))
#`refl`;(f) dif2(x) $
```)
我们说,`show`的右半部分是一个函数,表示选择文档的一部分以作修改。除了直接应用`set`,应该可以有很多其他操作。现在是时候解锁Typst强大能力了。
这个函数接受一个参数:参数是*未打包*(unpacked)的内容;这个函数返回一个*任意*内容。
以下示例说明它接受一个*未打包*的内容。对于代码片段,我们使用「`show`」语法择区其中第二行:
#code(````typ
#show raw: it => it.lines.at(1)
获取代码片段第二行内容:```typ
#{
set text(fill: true)
}
```
````)
在#link(<content-type-feature>)[《内容类型的特性》]中,我们所接触到的*已经打包*(packed)的代码片段并不包含`lines`字段。在打包后,内部大部分信息已经被屏蔽了。
以下示例说明它可以返回*任意*内容。这里我们选择语言为`my-calc`的代码片段,执行并返回一个*非代码片段*:
#code(````typ
#show raw.where(lang: "my-calc"): it => eval(it.text)
嵌入一个计算器语言,计算`1*2+2*(2+3)`:```my-calc 1*2+2*(2+3)```
````)
由于`show`的右半部分只要求接受内容并返回内容,我们可以有非常优雅的写法,使用一些天然满足要求的函数。
以下规则将每个代码片段用方框修饰:
#code(````typ
#show raw: rect
``` QwQ ```
````)
以下规则将每个代码片段用蓝色方框修饰:
#code(````typ
#show raw: rect.with(stroke: blue)
``` QwQ ```
````)
// == 内容的「实例化过程」
// 通过`query`我们获得同一个内容上更多的信息,即「样式」属性,即内容上的那些可选函数参数。
// 根据上述例子,我们来理解为什么它只提供了语法属性。假设只看`= 123`这5个字符,显然我们从*语法*上只能获得两个信息:
// + 它是一级标题。
// + 它的内容是`123`。
// 与之相对,当一个标题真正被放置到一个具体的「上下文」中时,才能真正关联与之相关的样式属性。例如,标题的`numbering`字段是与上下文相关的。
// - location()
// == import/include/styled
// == 「`include`」语法 <grammar-include>
// 介绍`read`,`eval(mode)`。
// 路径分为相对路径和绝对路径。如果是相对路径,`read("other-file.typ")`相当于在*当前*文件夹寻找对对应的文件。
// `include`的本质就是`eval(read("other-file.typ", mode: "markup"))`,获得一个「内容」,*插入到原地*。
// 假设我们有一个文件:
// #code(```typ
// // 以下是other-file.typ文件的内容
// 一段文本
// #set text(fill: red)
// 另一段文本
// ```)
// 那么```typ #include "other-file.typ"```将获得该文件的「内容」,*插入到原地*。
// #code(```typ
// #{
// set text(fill: blue)
// include "other-file.typ"
// }
// #include "other-file.typ"
// ```)
// `include`的文件是一个「内容块」,自带一个作用域。
== 总结
本节仅以文本、代码块和内容块为例讲清楚了文件、作用域、「set」语法和「show」语法。为了拓展广度,你还需要查看《基本参考》中各种元素的用法,这样才能随心所欲排版任何「内容」。
== 习题
// == 字数统计
// 从一个典型程序开始,这个程序基本解决我们一个需求:完成一段内容的字数统计。按照惯例,这一个程序涉及了本节所有的知识点。
// ```typ
// #let plain-text(it) = {
// if it.has("children") {
// ("", ..it.children.map(plain-text)).join()
// } else if it.has("child") {
// plain-text(it.child)
// } else if it.has("body") {
// plain-text(it.body)
// } else if it.has("text") {
// it.text
// } else if it.func() == smartquote {
// if it.double { "\"" } else { "'" }
// } else {
// " "
// }
// }
// ```
// 以及基于其上实现一个字数统计函数:
// ```typ
// #let word-count(it) = {
// plain-text(it).replace(regex("\p{hani}"), "\1 ").split().len()
// }
// ```
// 以下是该函数的表现:
// #code.with(scope: code-scope)(```typ
// #let show-me-the(it) = {
// repr(plain-text(it))
// [ 的字数统计为 ]
// repr(word-count(it))
// }
// #show-me-the([])\
// #show-me-the([一段文本]) \
// #show-me-the([A bc]) \
// #show-me-the([
// - 列表项1
// - 列表项2
// ])
// ```)
#let plain-text(it) = {
if it.has("children") {
("", ..it.children.map(plain-text)).join()
} else if it.has("child") {
plain-text(it.child)
} else if it.has("body") {
plain-text(it.body)
} else if it.has("text") {
it.text
} else if it.func() == smartquote {
if it.double {
"\""
} else {
"'"
}
} else {
" "
}
}
#let word-count(it) = {
plain-text(it).replace(regex("\p{hani}"), "\1 ").split().len()
}
#let code-scope = (plain-text: plain-text, word-count: word-count)
#let q1 = ````typ
#let plain-text(it) = {
if type(it) == str {
it
} else if it.has("children") {
("", ..it.children.map(plain-text)).join()
} else if it.has("child") {
plain-text(it.child)
} else if it.has("body") {
plain-text(it.body)
} else if it.has("text") {
it.text
} else if it.func() == smartquote {
if it.double {
"\""
} else {
"'"
}
} else {
" "
}
}
#let main-typ() = {
heading("生活在Content树上")
{
[现代社会以海德格尔的一句]
[“一切实践传统都已经瓦解完了”]
[为嚆矢。]
} + parbreak()
[...] + parbreak()
[在孜孜矻矻以求生活意义的道路上,对自己的期望本就是在与家庭与社会对接中塑型的动态过程。]
[而我们的底料便是对不同生活方式、不同角色的觉感与体认。]
[...]
}
#plain-text(main-typ())
````
#exercise[
实现内容到字符串的转换`plain-text`:对于文中出现的`main-typ()`内容,它输出:#rect(width: 100%, eval(q1.text, mode: "markup"))
][
#q1
]
#let q1 = ````typ
#let plain-text(it) = {
if type(it) == str {
it
} else if it.has("children") {
("", ..it.children.map(plain-text)).join()
} else if it.has("child") {
plain-text(it.child)
} else if it.has("body") {
plain-text(it.body)
} else if it.has("text") {
it.text
} else if it.func() == smartquote {
if it.double {
"\""
} else {
"'"
}
} else {
" "
}
}
#let word-count(it) = {
plain-text(it).replace(regex("\p{hani}"), "\1 ").split().len()
}
#let main-typ() = {
heading("生活在Content树上")
{
[现代社会以海德格尔的一句]
[“一切实践传统都已经瓦解完了”]
[为嚆矢。]
} + parbreak()
[...] + parbreak()
[在孜孜矻矻以求生活意义的道路上,对自己的期望本就是在与家庭与社会对接中塑型的动态过程。]
[而我们的底料便是对不同生活方式、不同角色的觉感与体认。]
[...]
}
#word-count(main-typ())
````
#exercise[
实现字数统计`word-count`:对于文中出现的`main-typ()`内容,它输出:#rect(width: 100%, eval(q1.text, mode: "markup"))
][
#q1
]
#exercise[
思考题:`plain-text`有何局限性?为什么在`show`规则影响下,`word-count`输出分别为4和5?
#code.with(scope: code-scope)(```typ
#let show-me-the(it) = {
it + [ 的字数统计为#word-count(it) ]
}
#show-me-the([#show raw: it => {"123"; it}; `一段文本`]) \
#show-me-the([#show: it => {"123"; it}; 一段文本])
```)
][
#q1
]
|
https://github.com/soul667/typst | https://raw.githubusercontent.com/soul667/typst/main/PPT/MATLAB/touying/docs/i18n/zh/docusaurus-plugin-content-docs/current/dynamic/cover.md | markdown | ---
sidebar_position: 4
---
# Cover 函数
正如您已经了解的那样,`uncover` 和 `#pause` 均会使用 `cover` 函数对不显示的内容进行遮盖。那么,这里的 `cover` 函数究竟是什么呢?
## 默认 Cover 函数:`hide`
`cover` 函数是保存在 `s.methods.cover` 的一个方法,后续 `uncover` 和 `#pause` 均会在这里取出 `cover` 函数来使用。
默认的 `cover` 函数是 [hide](https://typst.app/docs/reference/layout/hide/) 函数,这个函数能将内部的内容更改为不可见的,且不会影响布局。
## 更新 Cover 函数
有的情况下,您想用您自己的 `cover` 函数,那么您可以通过
```typst
let s = (s.methods.update-cover)(self: s, is-method: true, cover-fn)
```
方法来设置您自己的 `cover` 函数,其中如果设置 `is-method: false`,则 Touying 会帮您将 `cover-fn` 包装成一个方法。
## 半透明 Cover 函数
Touying 提供了半透明 Cover 函数的支持,只需要加入
```typst
#let s = (s.methods.enable-transparent-cover)(self: s)
```
即可开启,其中你可以通过 `alpha: ..` 参数调节透明度。
:::warning[警告]
注意,这里的 `transparent-cover` 并不能像 `hide` 一样不影响文本布局,因为里面有一层 `box`,因此可能会破坏页面原有的结构。
:::
:::tip[原理]
`enable-transparent-cover` 方法定义为
```typst
#let s.methods.enable-transparent-cover = (
self: none,
constructor: rgb,
alpha: 85%,
) => {
self.methods.cover = (self: none, body) => {
utils.cover-with-rect(
fill: utils.update-alpha(
constructor: constructor,
self.page-args.fill,
alpha,
),
body
)
}
self
}
```
可以看出,其是通过 `utils.cover-with-rect` 创建了一个与背景色同色的半透明矩形遮罩,以模拟内容透明的效果,其中 `constructor: rgb` 和 `alpha: 85%` 分别表明了背景色的构造函数与透明程度。
::: |
|
https://github.com/Mouwrice/resume | https://raw.githubusercontent.com/Mouwrice/resume/main/modules/about_me.typ | typst | #import "../brilliant-CV/template.typ": *
#cvSection("About Me")
Hi, I am a master student in Computer Science Engineering at the university of Ghent with a bachelor in Computer Science. I have an interest in developing correct, secure, and efficient software and I am on the lookout for new experiences!
I was born and raised at the lovely Belgian Coast, in Ostend. With the big sand beaches and North Sea becoming my second home. Now, after almost finishing my studies in the city of Ghent, I intend to move to Ghent where currently most of my activities take place.
|
|
https://github.com/Toniolo-Marco/git-for-dummies | https://raw.githubusercontent.com/Toniolo-Marco/git-for-dummies/main/book/components/gh-button.typ | typst | #let dropdown_icon = "▼"
#let gh_button = (img,btn_text, fill_color, text_color, stroke_color, separator) =>{
box(
fill: rgb(fill_color),
inset: 7pt,
baseline: 35%,
radius: 4pt,
stroke: rgb(stroke_color)+1pt
)[
#stack(dir:ltr, spacing: 15pt,
image.decode(img, width: 12pt),
text(
stroke: rgb(text_color),
font: "Noto Sans",
size: 10pt,
weight: "thin",
tracking: 1pt,
baseline: 1pt)[
#btn_text
],
if separator == true [
#box(
fill: none,
height: 0pt,
inset: 0pt,
radius: 0pt)[
#text(
stroke: rgb(stroke_color),
font: "Noto Sans",
fill: rgb(text_color),
size: 24pt,
weight: "thin",
baseline: -5pt)[|]
]
],
text(
stroke: rgb(stroke_color),
fill: rgb(stroke_color),
size: 12pt,
weight: "thin",
baseline: 0pt)[#dropdown_icon]
)
]
} |
|
https://github.com/sabitov-kirill/comp-arch-conspect | https://raw.githubusercontent.com/sabitov-kirill/comp-arch-conspect/master/questions/3_functional_schemes.typ | typst | #heading[Функциональные схемы.]
#emph[Функциональные схемы. Комбинационные схемы (мультиплексор, демультиплексор, дешифратор, сумматоры). Последовательные схемы (RS-триггер, JK-триггер, T-триггер, D-триггер). Схемы для выполнения арифметических операций (каскадный сумматор, АЛУ).]
#set par(justify: true)
#import "/commons.typ": imagebox
== Комбинационные схемы
=== Мультиплексор
_Мультиплексор_ --- схема, позволяющая передавать сигнал с одного из $2^n$ входов на единственных выход. При этом выбор желаемого входа осуществляется подачей соответствующей комбинации управляющих сигналов. Для этого на вход также подаются $n$ бит.
#columns(2)[
#imagebox("mux_outside.png", height: 170pt)
#colbreak()
#imagebox("mux_inside.png", height: 170pt)
]
Внутренняя реализация для $2^n$ значений использует n инверторов, $2^n$ ($n+1$)-битных and'ов и единственный $2^n$-битный or.
=== Шифратор
_Шифратор_ --- схема из функциональных элементов, которая позволяет по $n$-битному двоичному числу, в котором установлен ровно 1 бит, получить номер этого бита. Таким образом, при подаче сигнала на один из n входов (обязательно на один, не более) на выходе появляется двоичный код номера активного входа.
#columns(2)[
#imagebox("encoder_outside_scheme.jpg", height: 100pt, label: [Обозначение шифратора на схемах.])
#colbreak()
#imagebox("encoder_outside.png", height: 100pt, label: [Таблица истинности шифратора.])
]
#columns(2)[
#imagebox("encoder_inside.jpg", width: 270pt);
#colbreak()
_Внутреннее устройство схемы_ может показаться запутанным. На самом деле, чтобы её реализовать, достаточно посмотреть на двоичную запись чисел от 0 до $n-1$:
#columns(2)[
$0_10 = 000_2$\
$1_10 = 001_2$\
$2_10 = 010_2$\
$3_10 = 011_2$
#colbreak()
$4_10 = 100_2$\
$5_10 = 101_2$\
$6_10 = 110_2$\
$7_10 = 111_2$
]
]
- При подаче бита 0 не требуется устанавливать никакой бит, этот провод не участвует.
- При подаче бита 1 требуется установить только нулевой бит, поэтому провод идёт в $"or"_0$.
- При подаче бита 2 требуется установить только первый бит, поэтому провод идёт в $"or"_1$.
- При подаче бита 3 требуется установить биты 0 и 1, поэтому провод идёт в $"or"_0$ и $"or"_1$.
Реализация более сложных шифраторов $forall n in NN$ проводится по такому же принципу.
=== Дешифратор
_Дешифратор_ --- схема из функциональных элементов, имеющая $n$ входов и $2^n$ выходов. Позволяет по $n$-битному двоичному числу установить единицу имеено в тот выход, номеру которого соответвует двоичное число на входе.
#columns(2)[
#imagebox("decoder_outside.png", height: 150pt)
#colbreak()
#imagebox("decoder_inside.png", height: 130pt)
]
Внутренняя реализация для $n$ входов использует n инверторов и $2^n$ $n$-битных and'ов. Легко заметить, что никакие 2 входа одновременно не могут быть выставлены как 1.
=== Демультиплексор
#emph[Демультиплексор] --- это логическое устройство, предназначенное для переключения сигнала с ровно одного информационного входа на один из информационных выходов. Таким образом, демультиплексор в функциональном отношении противоположен мультиплексору.
Схема демультиплексора легко строится, если вы ознакомились со схемой дешифратора. Достаточно в каждый из and'ов подключить бит, который мы передаём. Таким образом схема принимает вид, подозрительно напоминабщая дешифратор:
#columns(2)[
#imagebox("demux_outside.png", height: 150pt)
#colbreak()
#imagebox("demux_inside.jpg", height: 150pt)
]
== Сумматоры
#v(10pt)
#columns(2)[
#align(center)[
*Частичный сумматор*
#imagebox("half_adder.png", height: 70pt)
]
#colbreak()
#align(center)[
*Полный сумматор*
#imagebox("full_adder.png", width: 200pt)
]
]
#emph[Частичный сумматор] реализован просто: бит результата получается, как _xor_ входных битов. Бит переноса же устанавливается только тогда, когда оба входных бита установлены.
#emph[Полный сумматор] предназначен для суммирования длинных чисел (длина > 1), так как в нём бит переноса может приходить из предыдущего разряда в следующий. Заметим в схеме аспекты:
+ Результирующий бит числа получается, как _xor_ $i$-х битов с $(i-1)$-м битом переполнения (тем, который пришел с предыдущего разряда).
+ Бит переполнения устанавливается в тех случаях, если:
- входные биты установлены ($A = 1 and B = 1$);
- входные биты различны $(A = 1 and B = 0) or (A = 0 and B = 1)$ и бит переполения установлен ($C = 1$).
Таким образом, полный сумматор корректен. Подлючив несколько таких последовательно, мы можем складывать $n$-битный числа $forall n in NN$.
== Последовательные схемы
_Последовательной_ логической схемой называется схема с памяьтю.
=== RS-триггер
#columns(2)[
#imagebox("rs-trigger_assync.jpg", height: 170pt, label: [
ассинхронный #emph[RS-триггер]
])
#colbreak()
#imagebox("rs-trigger_sync.gif", height: 170pt, label: [
синхронный #emph[RS-триггер]
])
]
Принцип работы _RS-триггера_ заключается в том, что он может сохранять своё предыдущее состояние пока оба входа неактивны и изменять его при подаче на один из входов единицы. При подаче единицы на оба входа состояние триггера вообще говоря неопределено.
У _RS-триггера_ есть 2 возможных состояния:
- Если подать на $S$ (set) 1, а на $R$ (reset) 0, то $Q$ станет 1, а $not Q$ станет 0.
- Если подать на $S$ (set) 0, а на $R$ (reset) 1, то $Q$ станет 0, а $not Q$ станет 1.
Такие значения сохранятся даже, когда $R$ и $S$ переключатся на 0.
Существуют два способа реализации #emph[RS-триггера]: _синхронный_ и _ассинхронный_. Отличие у них лишь в том, что в случае _синхронного RS-триггера_ значения на $Q$ и $not Q$ не изменятся, пока не будет подан сигнал синхронизации $C$, в то время как у _ассинхронного RS-триггера_ значения меняются сразу при изменении $R$ или $S$.
#pagebreak()
=== JK-триггер
#imagebox("jk-trigger.png", height: 170pt, label: [JK-триггер])
Работа _JK-триггера_ практически совпадает с тем, как работает _синхронный RS-триггер_. Для того чтобы исключить запрещённое состояние(когда $R = 1 and S = 1$), его схема изменена таким образом, что при подаче двух единиц _JK-триггер_ инвертирует хранимое значение: $Q$ превращается в $not Q$, а $not Q$ в $Q$.
=== T-триггер
#imagebox("t-trigger.png", height: 135pt, label: [T-триггер])
_T-триггер_ --- это _JK-триггер_, в который на $J$ и $K$ подаются *только одинаковые* значения. Таким образом, мы можем только инвертировать хранимый бит.
=== D-триггер
#imagebox("d-trigger.png", height: 135pt, label: [D-триггер])
_D-триггер_ --- это _JK-триггер_, в который на $J$ и $K$ подаются *только различные* значения. Таким образом, мы можем только устанавливать хранимый бит нулём либо единицей.
#pagebreak()
== Схемы для выполнения арифметических операций
=== Каскадный сумматор
_Каскадный сумматор_ --- логическая схема, осуществляющая сложение многоразрядных двоичных чисел. Реализуется простой цепочкой полных однобитных сумматоров.
#imagebox("cascade_adder.png", height: 100pt)
=== АЛУ (арифметико-логическое устройство)
#imagebox("alu_outside.png", height: 150pt)
_Арифметико-логическое устройство_ --- блок процессора, который служит для выполнения арифметических и логических преобразований (начиная от элементарных) над данными, называемыми в этом случае операндами.
#imagebox("alu_inside.png", height: 300pt)
Легко заметить, внутреннее устройство _АЛУ_ элементарно и крайне понятно. Если вы по каким то причинам не понимаете, как работает данная логическая схема, авторы статьи коллективно рекомендуют вам пойти и написать свой 32-х разрядный многотактовый процессор на архитектуре MIPS, используя язык :sparkles: #text(red)[verilog] :sparkles:.
|
|
https://github.com/VisualFP/docs | https://raw.githubusercontent.com/VisualFP/docs/main/SA/design_concept/content/poc/options_compiler_other.typ | typst | #import "../../../acronyms.typ": *
= Other Haskell Compiler-Platforms
Outside of the ubiquitous #ac("GHC"), described in @ghc, a few other Haskell
compilers were considered for this project. The most notable ones are:
_Hugs_, which is a compiler that provides an almost complete implementation
of Haskell 98 @hugs-compiler.
Unfortunately, Hugs is is not actively maintained anymore @hugs-compiler,
thus, it wasn't considered further.
Another Haskell compiler platform is the _Haskell Suite_, which
is a collection of tools and libraries that aim to implement a complete Haskell
compiler @haskell-suite.
The #ac("AST") interpreter is provided on hackage as the package
`haskell-src-exts`
#footnote("https://hackage.haskell.org/package/haskell-src-exts"). After
creating some example programs with it, it seems that the #ac("API") is
quite nice to use.
Unfortunately, the Haskell Suite is also not actively developed anymore, and
is currently on maintenance support @haskell-suite-maintenance. |
|
https://github.com/darkMatter781x/OverUnderNotebook | https://raw.githubusercontent.com/darkMatter781x/OverUnderNotebook/main/entries/entries.typ | typst | #include "./intro/intro.typ"
#include "./structure/structure.typ"
#include "./driver/driver.typ"
#include "./odom/odom.typ"
#include "./pros/pros.typ"
#include "./auton/disrupt/disrupt.typ"
#include "./auton/close-awp/close-awp.typ"
#include "./auton/far-awp/far-awp.typ"
#include "./auton/pre-skills/pre-skills.typ"
#include "./auton/skills/skills.typ"
#include "./boomerang/boomerang.typ"
#include "./versioning/versioning.typ"
#include "./pure_pursuit/pure_pursuit.typ"
#include "./lift/lift.typ" |
|
https://github.com/coljac/typst-dnd5e | https://raw.githubusercontent.com/coljac/typst-dnd5e/main/README.md | markdown | MIT License | # Typst DND5E template
This is a [Typst](https://typst.app) template for DND 5E content, suitable for [DMs Guild](https://www.dmsguild.com) and the like.
The template is called "dragonling" and can be imported as: `#import "@preview/dragonling:0.1.0": *`
See the [example](https://github.com/coljac/typst-dnd5e) which should mostly be self explanatory - it includes examples of tables, stat blocks and breakout boxes, and should serve as a good starting point for your own content.

## Basic usage
The `dndmodule` template sets up your document for you. The arguments you may want to specify up front are as follows:
- `title`: The document's title, this will be rendered as text. Omit if you already have cover art with the title.
- `subtitle`: A slug line for down the bottom of the front cover.
- `author`: Your name.
- `cover`: An `image` to use on the front cover
- `fancy_author`: This will put the author's name in that red flame thingy that D&D books tend to have.
- `logo`: Supply an `image` to put the logo on the front page.
- `font_size`: Defaults to `12pt`.
- `paper`: Defaults (sensibly) to `a4` (Americans, you might want `us-letter`).
From there, just about everything you need can be done with basic Typst markup. Some convenience functions are provided in the template:
`dnd`: Prints "Dungeons & Dragons" in small caps, as required per the official style guide.
`dndtab(name, columns: (1fr, 4fr), ..contents)`: A table with the conventional formatting. Defaults to 2 columns with ratio 1:4 as shown.
`breakoutbox(title, contents)`: Inserts a box with coloured background, and the optional title in small caps.
`statbox(stats)`: Accepts a dictionary with the following format. The `skillblock` and `traits` can contain arbitrary keys. After the traits, any of "Actions", "Reactions", "Limited Usage", "Equipment", or "Legendary Actions" will be subsequently shown if present.
```
#statbox((
name: "Creature name",
description: [Size creature, alignment],
ac: [20 (natural armor)],
hp: [29 (1d10 + 33)],
speed: [10ft, climb 10ft.],
stats: (STR: 13, DEX: 14, CON: 18, INT: 5, WIS: 4, CHA: 7), // Modifiers will be auto-calculated
skillblock: (
Skills: [Perception +6, Stealth +5],
Senses: [passive Perception 13],
Languages: [Gnomish],
Challenge: [5 (1800 XP)]
),
traits: (
("Trait name", [Trait desription]),
// ..
("Trait name", [Trait desription])
),
Actions: (
("Multiattack", [While the monster remains alive, it is a thorn in the party's side.]),
("Saliva", [If a character is eaten by the monster, it takes 1d10 saliva damage per round.]),
("Tentacle squeeze", [If the monster has captured an enemy, it can squeeze them for 1d12 crushing damage.])
)
))
```
`spell`: Accepts a dictionary as follows; the properties are all optional:
```
#spell((
name: "",
spell_type: [2nd level ...],
properties: (
("Casting time", []),
("Range", []),
("Duration", []),
("Components", []),
),
description: [Spell effects description]
)
)
```
## Page-wide images and tables
Typst currently has one significant limitation: it is not possible in 2-column mode to include a figure that spans both columns and floats (see [this issue](https://github.com/typst/typst/issues/553)).
I am hopeful for a fix in the near future. Apart from simply using one column throughout, the workaround for the time being is to insert a page break, start a new page with 1 column, insert a floating image, then continue with the text in a 2-column block. A helper function included, to be used like so:
```
// A page break will happen here.
#pagewithfig(top, image("images/my_image.png", width: 100%))[
The text that appears on the page goes here; you can include more text that will keep flowing, up to the end of the document, but you will need to close the content block if you need another image page.
]
```
However, you may have best results fiddling around with it yourself manually.
The downside with this method is that you must explicitly divide the content into pages in the right amounts, so that there is not a huge amount of whitespace on the page preceding the one with the image. This is only workable once the content is more or less finalized.
When this issue is fixed in Typst, it should just work without any need for this temmplate to be updated.
## Acknowledgements
Inspiration from the [DND LaTeX module](https://github.com/rpgtex/DND-5e-LaTeX-Template).
|
https://github.com/fredguth/abnt-typst | https://raw.githubusercontent.com/fredguth/abnt-typst/main/cap3.typ | typst | #pagebreak(to:"odd")
= \... e o imaginário.
== Considerações teóricas
No livro *Campos do imaginário*, <NAME> (1996, p. 231) conceitua o imaginário como "museu, reserva de museu, do conjunto de todas as imagens passadas e possíveis produzidas pelo *homo sapiens sapiens*". Nessa mesma página, Durand afirma que o imaginário não é "uma disciplina, mas um tecido conjuntivo 'entre' as disciplinas, o reflexo -- ou a 'reflexão'? -- que acrescenta ao banal significante os significados, o apelo do sentido".
Portanto, à pergunta "o que é o imaginário", cabem essas duas "respostas" que consistem, respectivamente, na definição de imaginário aqui adotada e na justificativa para a inclusão do estudo do imaginário.
Diferentemente da opção feita no capítulo 1, apenas pela imagem visual, no conceito de Aumont (2011), nos estudos do imaginário, todas as imagens possíveis produzidas e a serem produzidas são evocadas. Isso equivale a dizer que as imagens mentais -- poéticas e oníricas -- têm sua ocorrência registrada como de suma importância.
Nesse contexto, o estudo da imagem (capítulo 1) levou ao estudo do imaginário porque a compreensão das imagens, do seu estatuto no Ocidente e dos seus sentidos e funções passa necessariamente pela consideração da importância de conhecer os estudos sobre o imaginário e, mais que isso, pelo que Durand (1996, p. 232) chamou de "gigantesca inversão desses valores imaginários abandonados" -- revolução que será detalhada a seguir.
Já destacamos quão rodeados de imagens vivemos na atualidade, e a fotografia de <NAME> é uma das incontáveis produções da mídia que se expõem aos nossos olhares. A presença constante e ininterrupta dessas imagens na vida atual faz-nos pensar -- conforme defende Durand (1996; 1998) -- em como ela se difundiu e trouxe consigo a emergência "de um imaginário rico na sua pluralidade e sistémico \[que\] 'injecta-se', a pouco e pouco, em todas as disciplinas" (DURAND, 1996, p. 232).
Mas não foi sempre que imagem e imaginário foram acionados como via de acesso ao conhecimento de algum fenômeno na "galáxia de Gutenberg", isto é, na sociedade em que se observava a supremacia da imprensa e da comunicação escrita. Durand (1998, p. 7) chama a atenção para o recorrente comportamento iconoclasta das sociedades ocidentais: "O Ocidente, isto é, a civilização que nos sustenta a partir do raciocínio socrático e seu subsequente batismo cristão, além de desejar ser considerado, e com muito orgulho, o único herdeiro de uma única Verdade, quase sempre desafiou as imagens".
Durante séculos -- a partir de Aristóteles (século IV a.C.) --, conhecer a "verdade" era sinônimo de adquirir conhecimento pela experiência dos fatos. Desde então,
> \[\...\] a imagem, que não pode ser reduzida a um argumento "verdadeiro" ou "falso" formal, passa a ser desvalorizada, incerta e ambígua, tornando-se impossível extrair pela sua percepção \[\...\] uma única proposta "verdadeira" ou "falsa" formal. A imaginação, portanto, \[\...\] é suspeita de ser "a amante do erro e da falsidade". A imagem pode se desenovelar dentro de uma descrição infinita e uma contemplação inesgotável. Incapaz de permanecer bloqueada no enunciado claro de um silogismo, ela propõe uma "realidade velada" enquanto a lógica aristotélica exige "claridade e diferença". (DURAND, 1998, p. 10 -- grifos do autor)
Mais tarde, a partir do século XVII, com o advento do método científico "para descobrir *a* verdade nas ciências" (conforme propôs Descartes, em 1637, no seu famoso *Discurso do método*), reforçou-se o abandono da imagem.
E no século XVIII, o empirismo factual se configurou como mais um obstáculo para um imaginário "cada vez mais confundido com o delírio, o fantasma do sonho e o irracional" (DURAND, 1998, p. 14).
O século XIX trouxe consigo o positivismo e a desvalorização completa do imaginário, do pensamento simbólico e da metáfora.
> Qualquer "imagem" que não seja simplesmente um clichê modesto de um fato passa a ser suspeita. Neste mesmo movimento as divagações dos "poetas" (que passarão a ser considerados os "malditos"), as alucinações e os delírios dos doentes mentais, as visões dos místicos e as obras de arte serão expulsas da terra firme da ciência. (DURAND, 1998, p. 15 -- grifos do autor)
Ocorre, porém, que a rejeição tenaz dos valores e poderes do imaginário em favor da razão -- ou todo esse reforço ao conhecimento tributário do cientificismo e do historicismo dos últimos três séculos -- redundou justamente em enorme avanço técnico-científico, responsável pelas descobertas que permitiram a constelação daquilo que ficou conhecido como a civilização da imagem.
Estamos falando da química dos suportes (Niépce, <NAME>, <NAME>, <NAME>) e da física das comunicações (<NAME>, <NAME>, <NAME>, <NAME>, <NAME>), isto é, do surgimento das imagens técnicas -- fotografia e cinema --, da radiodifusão e da comunicação digital. Mas estamos falando também de <NAME> (1856-1939) e sua descoberta do inconsciente, que comprovou o papel decisivo das imagens como mensagens que afloram em forma de símbolos e promovem uma tomada de consciência por parte do indivíduo; e de <NAME> (1875-1961) e sua teoria dos arquétipos -- imagens primordiais originadas de uma repetição progressiva de uma mesma experiência durante muitas gerações, armazenadas no inconsciente coletivo.
Para Durand (1998), nisso consiste o paradoxo do imaginário no Ocidente, ou seja, por mais que imagem e imaginário tenham sido sistematicamente excluídos como via de acesso ao conhecimento, essa mesma relutância impulsionou um progresso técnico-científico que criou todas as condições para que nos tornássemos uma civilização que muito depende de ambos, mas que continuou a resistir à sua importância, relegando-os ao campo do "distrair".
Houve, porém, movimentos de resistência da imagem e do imaginário em face da iconoclastia do Ocidente. Essa resistência deve enormemente às religiões cristãs, apesar de a Reforma Protestante (século XVI) ter pregado contra a estética da imagem visual e em favor do sacrilégio do culto aos santos -- que resultaram na destruição de estátuas e quadros.
Aos excessos da Reforma, a Contrarreforma (século XVI) respondeu com a supervalorização da imagem barroca (séculos XVI-XVIII) na tentativa de reafirmar os valores cristãos:
> As imagens esculpidas ou pintadas, ou às vezes as imagens pintadas que imitam esculturas à *trompe-l'oeil*, invadem o vasto espaço desocupado das naves das novas basílicas de "estilo jesuíta" e os virtuosismos arquiteturais com os quais o Barroco beneficiará a Europa \[\...\] e que se estenderá durante quase três séculos pela Itália, Europa Central e\... América do Sul. (DURAND, 1998, p. 25 -- grifos do autor)
Em religiões como o Islamismo e o Judaísmo, se, por um lado, rejeitava-se a imagem visual, por outro, a imagem literária e a linguagem musical são muito valorizadas.
> O Islamismo compensava a proibição das imagens pintadas ou esculpidas com poetas de primeira grandeza (Attar, Hafiz, Saadi), a prática de recitais sagrados da música espiritual (*sama*) e a "recitação visionária" por meio de imagens literárias, portanto sem um suporte icônico, que consistia em uma técnica de recondução (*tawil*) à santidade inefável. Da mesma forma há no Judaísmo, ao lado das exegeses puramente legais, uma exegese "poética" das Escrituras (nas quais incluem-se os "livros" poéticos tais como o famoso e tão decantado "Cântico dos Cânticos") e, sobretudo, um investimento religioso na música do culto e mesmo na música denominada profana. (DURAND, 1998, p. 22-23 -- grifos do autor)
Quando da ruptura definitiva com a cristandade medieval, o imaginário encontrou refúgio nos movimentos artísticos pré-românticos e românticos (últimas décadas do século XVIII e grande parte do século XIX), porque "os poetas autenticam o que permanece", conforme observou <NAME>, poeta lírico alemão, depois validado pelos franceses Baudelaire e Rimbaud, este último responsável pela formulação da máxima de que "qualquer poeta tende a tornar-se um visionário" (DURAND, 1998, p. 28).
Simbolismo e surrealismo vieram, em seguida, atribuir maior significado às imagens no Ocidente, o que coincidiu com a valorização e emancipação sociais de pintores, escultores e poetas, ou "fazedores de imagens", como descreve Durand (1996, p. 233).
Daí em diante, diversos campos do conhecimento abriram-se para os estudos da imagem e do imaginário, sendo as letras e as artes, em todos os tempos, o "refúgio tolerado do imaginário", e as ciências duras as últimas a aceitá-los (DURAND, 1996, p. 232). Portanto, não como uma disciplina, mas como um "entre-saberes", o imaginário se faz presente em cada disciplina, levando consigo a possibilidade de se fazer ver por meio dos significantes, das parábolas, dos mitos, dos poemas, mas também do cinema, da fotografia, do vídeo e das imagens de síntese que nos rodeiam insistentemente.
Nesse sentido, imaginário e redes de comunicação têm estreita relação, porque estas, segundo Castells (2015, p. 98-99), "organizam a comunicação socializada", utilizando imagens de todos os tipos com as quais moldam a mente pública; e aquele, por sua vez, emerge dos materiais culturais distribuídos pelas redes de comunicação.
O paradoxo do imaginário no Ocidente (DURAND, 1998) parece evidente em tempos de redes de comunicação: não deixa de ser irônico que todo o esforço feito durante séculos para calar e desautorizar imagem e imaginário tenha produzido um efeito contrário, de supervalorização de crenças, valores e comportamentos traduzidos em imagens que circulam pelas redes de comunicação com uma liberdade sem precedentes. Mais que isso, conectam o local com o global e alcançam os recantos mais improváveis, estimulando movimentos migratórios, por exemplo, como o que levou <NAME> à morte trágica, mas também "projetos e valores alternativos propostos pelos atores sociais que têm como objetivo reprogramar a sociedade" (CASTELLS, 2015, p. 99), como a fotografia de Demir e as manifestações artísticas que deram à catástrofe a visibilidade que estatísticas, documentos oficiais e notícias não conseguiram.
== O naufrágio no imaginário coletivo
> No ano seiscentos da vida de Noé, no mês segundo, aos dezessete dias do mês, naquele mesmo dia se romperam todas as fontes do grande abismo, e as janelas dos céus se abriram,\
> \[\...\]
>
> E durou o dilúvio quarenta dias sobre a terra, e cresceram as águas e levantaram a arca, e ela se elevou sobre a terra.\
> E prevaleceram as águas e cresceram grandemente sobre a terra; e a arca andava sobre as águas.\
> \[\...\]
>
> Assim foi destruído todo o ser vivente que havia sobre a face da terra, desde o homem até ao animal, até ao réptil, e até à ave dos céus; e foram extintos da terra; e ficou somente Noé, e os que com ele estavam na arca.
>
> (BÍBLIA, Gênesis 7:11-23)
Segundo Annie <NAME> (2016, p. 46), "\[\...\] ainda que vista como punição divina que serve para confortar a ordem cristã, não há fim do mundo que não remeta a essa necessidade de figurar um caos, cuja emergência é, para nós, sempre esperada e temida". A catástrofe e o sentimento que desperta são, nesse sentido, "a primeira fenda do imaginário no mais profundo de nós (LE BRUN, 2016, p. 46).
> \[\...\] do caos ao Apocalipse, do Dilúvio ao fim dos tempos, da torre de Babel ao Ano mil, da desordem que engendra a ordem nos mitos fundadores à tábula rasa que conduz à "grande noite", inúmeras são as construções imaginárias que remetem à catástrofe como a uma constante em torno da qual a humanidade buscou se definir, estabelecendo sua relação com o mundo sob o signo do acidental. (LE BRUN, 2016, p. 43)
O dilúvio[^24] constitui a imagem fundadora da catástrofe ocidental. Não é, portanto, de admirar que catástrofes envolvendo naufrágios, tempestades e morte no mar tenham habitado nosso imaginário ao longo dos séculos XVIII, XIX e XX e que se manifestou largamente na literatura e nas artes plásticas do Ocidente, seja na estética barroca, romântica, simbolista ou vanguardista, para então chegar às telas de cinema.
// ![][24]
// []{=_Ref497153814 .anchor}**Figura 6 -- <NAME> <NAME>, Naufrágio de um Cargueiro, 1810**
// Fonte: <NAME>
Essa obra (Figura 6) faz parte de uma série de pinturas de grandes dimensões executadas pelo artista inglês na primeira década do século XIX dedicada à representação de catástrofes naturais e tempestades no mar, iniciada com *Bridgewater, Seapiece* (Coleção particular em depósito na National Gallery, Londres).
> A composição inscreve-se num universo de extrema sensibilidade face à natureza, dentro da melhor tradição inglesa de pintura do género, à qual o tema dos naufrágios, num país marítimo por excelência, foi especialmente grato. Turner não só absorve o legado da lição holandesa -- de <NAME>de, o Jovem, em particular -- como associa à sua expressão pictórica o peso do imaginário coletivo da época, vivido pelos seus contemporâneos de forma verdadeiramente emotiva e obsessiva.[^25]
Os relatos de viagens são antigos e sempre despertaram muito interesse, assim como estimularam o imaginário coletivo. Esses relatos se tornaram populares na Europa dos séculos XVI e XVII em virtude do comércio marítimo com o Oriente, fonte de curiosidade e de narrativas do gênero "maravilhoso", aqui compreendido como o registro em que o sobrenatural se une de forma harmoniosa à realidade para encantar o leitor.[^26]
Em consequência dessa popularização, os relatos de naufrágios surgiram em todos os países envolvidos no comércio marítimo e também conquistaram o público. Essa literatura tinha por objetivo exaltar e constituir a identidade dos países navegadores -- dentre os quais Portugal tem especial destaque -- e foi impulsionado pela disseminação do texto impresso -- também ocorrido entre a segunda metade do século XVI e início do século XVII -- a partir do surgimento de numerosas tipografias que se estabeleceram bem cedo em Portugal e trataram de publicar os relatos em forma de libretos baratos, acessíveis, portanto, a grande número de leitores.
<NAME> (2005, p. 28), a mais plausível explicação para o sucesso e o interesse que essas narrativas despertavam é "o gosto por histórias trágicas, profundamente arraigado no imaginário coletivo, que, em contato com os acontecimentos reais, se potencializa em uma percepção catastrófica do tempo".
As narrativas se confirmaram como gênero literário no século XVIII e, para Madeira (2005),
> \[\...\] prenunciam, de muitas maneiras, uma matriz estética barroca, com estratégias próprias de ficcionalização e estilização do real. Plasmada por meio de figuras incongruentes e formas descontínuas, a arte barroca atualiza as novas concepções espaciais que modelaram a sensibilidade moderna. O regime de imagens prevalecente nos relatos de naufrágios -- reiteradas e marcantes alegorias -- pode ser uma via de acesso privilegiada ao imaginário social da época que permita a compreensão de como uma sociedade delira, como metaforiza suas obsessões e seus medos. (MADEIRA, 2005, p. 36-37)
"Um grau excepcional de universalidade", conforme afirma Madeira (2005, p. 37), explica o enorme fascínio que esse tipo de narrativa exerce sobre indivíduos de qualquer época -- antes ou depois das grandes navegações. E as imagens de naufrágios são sempre muito carregadas de emoção. Assim descreveu Camões, no final do Canto I, de *Os Lusíadas*:
> *No mar tanta tormenta e tanto dano,*
>
> *Tantas vezes a morte apercebida!*
>
> *Na terra tanta guerra, tanto engano,*
>
> *Tanta necessidade avorrecida!*
>
> *Onde pode acolher-se um fraco humano,*
>
> *Onde terá segura a curta vida,*
>
> *Que não se arme e se indigne o Céu sereno*
>
> *Contra um bicho da terra tão pequeno*?
Já se vão 115 anos desde o naufrágio do Titanic:
> No momento em que o Titanic terminou de naufragar, às 2h20 do dia 15 de abril de 1912, teve início uma onda de fascínio que se espalharia pelo mundo e continuaria com impressionante força mesmo cem anos após a colisão com o iceberg. Houve desastres marítimos maiores, mais mortais, mais antigos e mais recentes, mas nenhum ocupou o mesmo lugar no imaginário popular como símbolo da incapacidade humana de controlar o universo, ainda que em posse da mais avançada tecnologia.[^27]
Segundo Pécora (2012), uma produção cultural diversificada e numerosa sobre o naufrágio do Titanic está à disposição dos fãs. Destacam-se o lançamento, em 1955, do romance *A Night to Remember*, de <NAME>, que reconstrói a tragédia a partir de depoimentos de sobreviventes; a descoberta dos destroços do navio pelo oceanógrafo <NAME>, em 1985, tema de uma série de documentários; e o sucesso do filme *Titanic*, de <NAME>, que bateu recordes de bilheteria em 1997 e deu ao seu produtor e diretor 11 Oscars em 1998.
Outras tantas tragédias no mar sobrevieram ao longo dos mais de cem anos que separam a fictícia <NAME>, ou <NAME> (sobrevivente do Titanic), de <NAME>, personagens cujos destinos passaram por enfrentar os perigos do mar, com desfechos opostos. Rose sobreviveu e tornou-se famosa e mundialmente conhecida, enquanto Aylan sucumbiu.
Ficção e realidade, em Rose e Aylan, se entrecruzam, alterando nossa percepção. Rose nasceu da imaginação de <NAME> para personalizar a tragédia do Titanic, viver uma curta e emocionante história de amor e sobreviver para dividir com espectadores do mundo inteiro suas recordações de cada detalhe do luxuoso, grande e veloz navio e o desespero de estar a bordo de um transatlântico com cerca de 2.200 pessoas, das quais aproximadamente 1.500 morreram. "Viveu" 101 anos e "morreu" serenamente, reescrevendo a tragédia de forma a permitir um *The End* de possibilidades. Diferentemente de Rose, Aylan não nasceu fruto da imaginação de um artista, viveu 3 anos em meio a uma guerra que já matou cerca de 500 mil sírios[^28] e morreu afogado no Mar Mediterrâneo. Do anonimato para a fama, houve o tempo do clique da câmera fotográfica. A imagem sobreviveu para contar sua história.
Uma criança que atravessa o mar e morre na travessia é uma imagem que choca. E de novo aproxima ficção e realidade, cujas fronteiras, pelo menos no imaginário coletivo, são bastante incertas. A fotografia de Aylan parece flutuar nessas fronteiras. Não se assemelha a uma foto rotineira de guerra, tampouco de catástrofe natural. Não nos parece típica do fotojornalismo, mas é jornalística. Aylan parece dormir, mas já não respira. Tudo aparenta calma em volta dele. Mas o mar continua trazendo mais e mais embarcações com fugitivos de guerra e outras catástrofes.
Um dos mais emblemáticos episódios medievais, datado de 1212, conhecido como a cruzada das crianças, tem também como desfecho o naufrágio de barcos cheios de crianças. A morte de <NAME> evoca a cruzada das crianças, dadas as coincidências que se observam entre ambas, mas também à inversão que <NAME> representa, considerando o imaginário ocidental: morto nas areias de Bodrum, na Turquia, Aylan se lançou ao mar em direção ao Ocidente, enquanto aquelas crianças iam em sentido contrário; ele era um curdo que professava (ou professaria) a religião islâmica, enquanto aquelas eram cristãs que tentavam chegar a Jerusalém para expulsar os muçulmanos da Terra Sagrada.
Nos dois casos, porém, partindo de regiões da Alemanha e da França ou da então desconhecida Kobane, na Síria, essas crianças nunca retornaram para seus lares. Morreram pelo caminho de fome ou de frio, ou afogadas, como Aylan.
Conta <NAME>, em 1896, que no século XII, na Europa, milhares de crianças cristãs, de cabelos ruivos e olhos verdes, vestidas de branco e com cruzes costuradas nas roupas, atenderam a um chamado e empreenderam uma travessia de milhares de quilômetros, peregrinando nas mais adversas condições climáticas, acreditando-se com o poder divino de libertar Jerusalém do domínio muçulmano, uma vez que quatro cruzadas já haviam falhado nessa missão -- e outras falhariam depois. Elas atravessaram o Mar Mediterrâneo em sete naves, das quais cinco soçobraram nas águas do Recife de Recluso. Sob diferentes visões -- um clérigo cristão, um leproso, o Papa Inocêncio III e o Papa Gregório IX, um escrevente, um monge maometano e as próprias crianças --, foram assim descritas por um goliardo:[^29]
> Aquelas crianças todas me pareceram sem nome. E é certo que Nosso Senhor Jesus tem preferência por elas. Ocupavam a estrada qual enxame de abelhas brancas. Não sei de onde vinham. Eram peregrinos bem pequenos. Traziam cajados de aveleira e bétula. Traziam a cruz ao ombro \[\...\]. São crianças selvagens e ignorantes. Erram rumo a não sei quê. Têm fé em Jerusalém. \[\...\] Elas não alcançarão Jerusalém. Mas Jerusalém as alcançará. (SCHWOB, 2011, p. 24)
Inspiradas em Schwob (1896), outras versões da cruzada das crianças surgiram, dentre as quais *O barco das crianças*, de <NAME> (2016), em linguagem destinada ao público jovem. O personagem Fonchito, já conhecido dos leitores de outra obra de Llosa, em contato com um senhor já velho, ouve a narrativa apaixonada que esse senhor faz da cruzada das crianças. À medida que a história contada a Fonchito se desenvolve, o limite que separa o real da ficção se torna mais e mais impreciso:
> -- Não estou vendo nenhum barco, moço -- atreveu-se a dizer.
>
> -- Não vê porque não apareceu esta manhã, mas se aparecesse provavelmente também não o veria. (p. 11)
>
> -- O senhor fala como se houvesse estado lá, moço, no meio dessas crianças -- disse Fonchito, estranhando. -- Como se tivesse vivido as coisas que conta.
>
> -- De certa maneira pode-se dizer que estive lá, que vivi -- reconheceu o velho, misteriosamente. -- Mas, que importância isso tem, meu jovem amigo? (p. 21)
>
> -- Mas então o senhor estava lá, entre as crianças que partiram nesse barco -- interrompeu Fonchito, maravilhado, com os olhos arregalados. -- Como pode ser, moço? Nesse caso, o senhor seria uma pessoa muito velha, teria centenas de anos de idade. E isso não é possível, ninguém vive tanto tempo.
>
> \[\...\]
>
> -- Desculpe, moço, mas ainda não respondeu à minha pergunta -- insistiu Fonchito, desconcertado. -- O senhor estava lá? Era uma dessas crianças que embarcaram para Jerusalém nesse primeiro barco?
>
> \[\...\]
>
> -- É que, é que\... -- hesitou Fonchito --, desculpe a minha insistência, mas se o senhor estava lá, naquele barco, agora já seria um fantasma, não é mesmo? (p. 27-29)
>
> -- E aquele primeiro barco, no qual o senhor estava? -- perguntou Fonchito, um pouco confuso. -- Porque o senhor foi uma das crianças sorteadas para viajar nele, não é verdade? Pelo menos foi isso que eu entendi. Ou será que estou enganado?
>
> -- Era e não era eu -- disse o velho \[\...\]. (p. 36)
>
> -- Estamos chegando mesmo ao final da história, moço? -- perguntou Fonchito na manhã seguinte.
>
> -- Pois é, estamos, sim -- respondeu. -- Mas não se preocupe. A vida e, principalmente, os livros estão cheios de histórias maravilhosas. Você pode lê-las e, se forem bem contadas, é exatamente como se as vivesse. (p. 83)
Uma história tão antiga, transformada em relato atual, inquieta porque dissolve as fronteiras do real. "Como se" nunca é o real, mas a sua representação, a imagem que construímos, e será mais ou menos impactante, se tornará um apelo mais ou menos irresistível, a depender da forma que lhe será dada de modo a conferir-lhe poder de convencimento ou de persuasão, conforme afirmam o velho senhor de *O barco das crianças* e o próprio Llosa, em outra obra -- não ficcional --, discutida na próxima seção.
== O náufrago Aylan segundo os fazedores de imagem
A tragédia da família Kurdi foi registrada e circulou apoiada nas bases da tecnologia que permite fotografar, mas também no fato de que a imagem, em si, só é terrível porque o horror que ela causa "provém do fato de *nós a olharmos* do seio da nossa liberdade" (BARTHES, 2013, p. 106). Vejamos em que consiste o apelo irresistível dessa fotografia.
Em *Cartas a um jovem escritor*, Llosa (2008, p. 33) se ocupa da forma do romance, "que, por mais paradoxal que pareça, é o atributo mais concreto que ele \[o romance\] possui, já que é através de sua forma que um romance ganha corpo, natureza tangível". Mas Llosa alerta (2008, p. 33): "a separação entre forma e conteúdo \[\...\] é artificial, admissível apenas quando objeto de explanação ou análise, já que o que o romance conta é inseparável da forma como é contado". A relação estreita entre forma e conteúdo é o que determina, para Llosa, se o que o romance conta é crível ou não. Quanto maior o poder de persuasão de um romance, melhor é o romance, conclui o prêmio Nobel de Literatura.
> Para dotar um romance de 'poder de persuasão', é preciso contar a sua história de modo a tirar o máximo de proveito das vivências implícitas na trama e nos personagens, conseguindo transmitir ao leitor uma ilusão de sua autonomia com relação ao mundo real em que reside quem o lê. \[\...\] Os bons romances -- os de peso -- não parecem contar uma história, mas nos fazer vivê-la, compartilhá-la, graças à persuasão de que se acham dotados. (LLOSA, 2008, p. 35-36)
Essa é uma discussão bastante técnica sobre o texto ficcional, obra de alguém que reveste a ficção de uma soberania que é sempre figurada, porque é sempre ficção, o que é o mesmo que dizer que o que existe é "uma ilusão de soberania", "uma impressão de independência, de emancipação do real". Esse alguém é responsável por ligar os romances ao mundo "por um cordão umbilical" (LLOSA, 2008, p. 38).
Vamos aqui retomar a ideia central de Llosa expressa na fala do velho senhor de *O barco das crianças* e na terceira carta de seu livro de 2008 e adaptá-la à leitura da fotografia de Demir para sustentar uma primeira suposição sobre o porquê da enorme repercussão dessa fotografia. Essa ideia é o poder de persuasão. Antes, porém, é preciso lembrar que uma fotografia não é uma cópia do real, mas uma representação (e assim voltamos a *Ceci n\`est pas une pipe*). E, como tal, traz consigo "o elogio da forma, a afirmação da individualidade do fotógrafo e o dialogismo com os modelos" (ROUILLÉ, 2009, p. 161). Com isso, queremos destacar o mito da transparência documental da fotografia para aproximar a imagem fotográfica em discussão, em alguma medida, da ficção que caracteriza o romance. Para tal empreitada, vamos nos apoiar em de-Andrés et al. (2016):
> A imagem de Aylan é dotada de considerável polissemia em termos de significado. O ícone abrange o conceito de imigração, refugiado, políticas de imigração, tragédia, vulnerabilidade e infância e contém os três tratamentos que uma imagem pode fornecer: documento, arte e sentimento. Apresenta uma visão diferente no universo da tragédia dos filhos de imigrantes e refugiados que atravessam o mar Egeu ou o Mediterrâneo. (DE-ANDRÉS et al., 2016, p. 33 -- tradução nossa)[^30]
Também vamos evocar Charaudeau (2012), quando o autor discute o tratamento da informação num ato de comunicação mediada:
> O tratamento é a maneira de fazer, o modo pelo qual o sujeito informador decide transpor em linguagem (e também iconicamente, caso possa recorrer à imagem) os fatos selecionados, em função do alvo predeterminado, com o efeito que escolheu produzir. Nesse processo, está em jogo a inteligibilidade da informação transmitida, e como não há inteligibilidade em si, esta depende de escolhas discursivas efetuadas pelo sujeito informador. Ora, toda escolha se caracteriza por aquilo que retém ou despreza; a escolha põe em evidência certos fatos deixando outros à sombra. (CHARAUDEAU, 2012, p. 39).
Tanto quanto o romance, a imagem fotográfica se compõe, tecnicamente, de forma e conteúdo, também inseparáveis. E, igualmente, o poder de persuasão da imagem fotográfica -- e aqui estamos tratando da fotografia de Demir -- foi decisivo para conferir a essa fotografia o estatuto de símbolo, e apenas a essa foto, a despeito da quantidade de imagens produzidas sobre o tema e que não lograram alcançar tamanho destaque. Se para Llosa esse poder de persuasão é o que determina quão "bom" é o romance, parece plausível aplicar o mesmo juízo de valor à fotografia de Demir para explicar por que a sua fotografia -- "aquela" -- foi eleita digna de se tornar capa de grandes jornais impressos ao redor do mundo e mote para tantas manifestações artísticas que circularam pela rede mundial de computadores logo após a distribuição da fotografia.
> Não estamos acostumados a ver imagens de crianças mortas, crianças afogadas, nos nossos jornais ou nas notícias da televisão. Como diz Sánchez, o que se destaca é que "o corpo está inteiro" quando normalmente a guerra e a catástrofe natural trazem-nos imagens de corpos mutilados, amputados ou quebrados. Os corpos dos afogados que normalmente são trazidos estão gravemente deteriorados, mas não nesta imagem. É um menino que pode ser claramente identificado por qualquer pessoa no Ocidente como "um de nós" \[\...\]. (DE-ANDRÉS et al., 2016, p. 34 -- tradução nossa)[^31]
A fotografia de Demir, aqui considerada "eleita" (= "boa") dentre tantas, não só nos conta uma história trágica, mas também nos faz "viver" essa história, na medida de seu enorme poder de persuasão, que encurta a distância que separa a longínqua história de Aylan Kurdi da nossa realidade, uma vez que nos coloca diante da relação complicada que há entre adultos e crianças em muitos pontos do planeta. Muito frequentemente, essa relação é paradoxal, pois oscila entre a responsabilidade do mundo adulto de garantir a proteção das crianças[^32] e a precaríssima condição de vida de milhões de crianças que têm seus direitos negados e são privadas de tudo de que precisam para crescer saudáveis e fortes, devido ao seu lugar de nascimento, sua origem familiar, sua raça, sua etnia, seu gênero ou porque vivem na pobreza ou têm alguma deficiência.[^33] Só em 2016, cerca de 535 milhões de crianças, ou seja, um quarto da população infantil do mundo, foram afetadas por desastres naturais, conflitos armados, violência e crise migratória.[^34]
Para Llosa (2008, p. 37), "o mau romance, que carece de poder de persuasão ou que muito pouco o tem, não nos convence da mentira que nos conta \[\...\]". Talvez esse seja o aspecto que diferencia a fotografia de Demir de outras que mostram crianças migrantes mortas durante a fuga, mas que pecam pelo excesso de realismo e pelo pouco poder de persuasão que encerram.
A esse respeito, Barthes (2013) nos alerta, a propósito de uma exposição de fotos-choque na Galeria D'Orsay, em Paris:
> A maior parte das fotografias aqui reunidas para chocar o público não produzem \[sic\] o menor efeito sobre nós, precisamente porque o fotógrafo substitui-se-nos larga e excessivamente na formação do seu tema: quase sempre trabalhou de forma exagerada o horror que nos propõe, acrescentando ao fato, por meio de contrastes ou aproximações, a linguagem tradicional do horror \[\...\]. Ora, nenhuma dessas fotografias, excessivamente hábeis, atinge-nos. É que perante elas ficamos despossuídos da nossa capacidade de julgamento: alguém tremeu por nós, refletiu por nós, julgou por nós; o fotógrafo não nos deixou nada -- a não ser um simples direito de uma aprovação intelectual \[\...\]. (BARTHES, 2013, p. 106-107)
A segunda suposição sobre o porquê da enorme repercussão da fotografia de Demir consiste na ambivalência entre dormir e morrer: eis que consiste no paradoxo da imagem da vida, apesar de ser da morte. Para defender essa ideia, vamos acompanhar os passos de <NAME>, em seu ensaio "Imagem do imperialismo" (2017, p. 21-35). Segundo <NAME> (2017, p. 16), <NAME> escreveu ensaios sobre fotografias que são "jornadas epistemológicas que nos levam além dos momentos representados, frequentemente além da fotografia". Portanto, observaremos a fotografia, mas com os olhos alcançando mais que o instante que ela mostra, porque acreditamos que essa fotografia não congelou um instante: o tempo já se estancara com a morte.
Há uma semelhança entre a criança que dorme e Aylan morto na praia. Destaque-se que as roupas de Aylan estão intactas e os sapatos não estão de acordo com o estereótipo de um menino refugiado que costumamos ver nos jornais ou na TV. Daí por que reconhecemos nele "o nosso filho", cuja morte nos é intolerável, mas cujo sono nos emociona. Dessa semelhança se serviram alguns artistas integrantes da *hashtag* \#kiyiyavuraninsanlik, criada para circular no Twitter.
// ![][29]
// []{#_Toc507740892 .anchor}**Figura 7 -- Travesseiro e cobertor**
// Fonte: #kiyiyavuraninsanlik
// ![][30]
// []{#_Toc507740893 .anchor}**Figura 8 -- Quarto de criança**
// Fonte: #kiyiyavuraninsanlik
// ![][31]
// []{#_Toc507740894 .anchor}**Figura 9 -- Sob as ondas do mar**
// Fonte: #kiyiyavuraninsanlik
// ![][32]
// []{#_Toc507740895 .anchor}**Figura 10 -- Do abraço da Síria**
// Fonte: #kiyiyavuraninsanlik
// ![][34]
// []{#_Toc507740896 .anchor}**Figura 11 -- Vamos mudar o mundo!!**
// Fonte: #kiyiyavuraninsanlik
// ![][35]
// []{#_Toc507740897 .anchor}**Figura 12 -- Outra vida**
// Fonte: #kiyiyavuraninsanlik
// ![][36]
// []{#_Toc507740898 .anchor}**Figura 13 -- Em um lugar melhor**
// Fonte: #kiyiyavuraninsanlik
// ![][37]
// []{#_Toc507740899 .anchor}**Figura 14 -- Por um mundo com humanidade**
// Fonte: #kiyiyavuraninsanlik
// ![][38]
// []{#_Toc507740900 .anchor}**Figura 15 -- O Espelho da Europa**
// Fonte: #kiyiyavuraninsanlik
Essa semelhança não deveria ser surpreendente, mas a sugerida ambivalência entre vida e morte, mais que surpreender, choca e horroriza. Em ambas as situações -- a criança que dorme e a criança sem vida --, ela está completamente exposta à observação atenta, desperta sentimentos e expõe nossos intemporais medos: a morte e o morrer.
Quem vela o sono de uma criança teme a síndrome da morte súbita, ainda um mistério para a medicina. A maioria dessas mortes acontece durante o sono. Não há muito o que fazer para impedi-la. Ensinam-nos apenas a pôr o bebê para dormir de barriga para cima ou de lado, medida que se torna rapidamente inútil, porque a criança logo aprende a se virar e a dormir de bruços, exatamente na posição em que Aylan foi fotografado.
Essa mesma posição para dormir -- de bruços -- pode ser observada em algumas das fotografias do fotógrafo sueco <NAME>, vencedor de dois prêmios World Press Photo Awards. Em 2015, Wennman fez uma parceria com a Agência das Nações Unidas para Refugiados (Acnur) e viajou por sete países no Oriente Médio e na Europa, onde conheceu crianças refugiadas que lhe mostraram onde dormiam. Seu objetivo era aumentar a conscientização sobre crianças refugiadas. Suas fotografias resultaram na exposição *Where the children sleep*, que mostra rostos, nomes e histórias de milhões de crianças refugiadas que passam a noite em ambientes insalubres ou totalmente inseguros, depois que sua vida foi violentamente transformada. A exposição fotográfica retrata o impacto devastador do conflito que já dura mais de seis anos, depois de essas crianças terem escapado do seu país devastado pela guerra.[^35]
Evocamos, aqui, esse fotojornalista e seu ensaio premiado, com o fim de demonstrar que a criança que dorme -- ainda mais a criança refugiada -- já estava presente na mídia e já havia sido explorada pelo fotojornalismo. Entretanto, de um ponto de vista oposto ao que trouxe a fotografia de Demir.
Além dessa semelhança, destacaremos as disparidades observadas entre Wennman e Demir. O primeiro vai em busca de fotografar crianças refugiadas dormindo. Havia em seu objetivo a ideia de produção. Ele sabia de antemão o que desejava fazer, como fazer, por que fazer. Ele tinha um projeto de trabalho desenhado, executou-o e deu um destino ao produto do seu trabalho que lhe rendeu prêmios. Esse percurso é diferente daquele que notabilizou Demir. Não foi objetivo da fotojornalista buscar uma criança refugiada morta/dormindo na praia para então levar tal imagem ao mundo. Antes, foi a imagem com que a fotojornalista se deparou que solicitou uma decisão imediata de Demir.
Wennman desfrutou de sucesso e foi laureado com seu ensaio fotográfico, atingindo um público específico, que acompanha a World Press Photo, organização independente sem fins lucrativos fundada em 1955 em Amsterdã, na Holanda, e conhecida por realizar anualmente a maior e mais prestigiada distinção de fotojornalismo do mundo. Todos os anos, as fotografias vencedoras são reunidas em uma exposição itinerante visitada por milhões de pessoas em 40 países e um livro com todos os registros premiados é publicado em seis idiomas diferentes.
Demir tornou-se conhecida por fotografar Aylan e entregar ao mundo uma fotografia cuja contemplação desencadeou ações. Pessoas comuns manifestaram sua indignação nas redes sociais e artistas responderam à provocação da foto com outras imagens. Sites de pesquisa registraram números recordes de buscas sobre refugiados, <NAME>, mortes no Mediterrâneo, movidos pelo incômodo absoluto que a imagem provocou. A imprensa mundial exibiu a foto na primeira página e desde então estudiosos de variadas áreas do conhecimento vêm publicando estudos e artigos sobre a foto de Aylan.
Tais disparidades não tornam ambos os fotojornalistas mais ou menos importantes, um em relação ao outro. Tanto nas fotografias de Wennman quanto na de Demir, está presente a criança que dorme -- ou que parece dormir. E isso nos leva a concluir que há, no sono da criança, qualquer coisa que emociona e desespera -- daí por que o projeto de Wennman era justamente fotografar crianças durante o sono -- porque a criança que dorme torna-se ainda mais adorável, mas também mais frágil, indefesa e exposta a todo tipo de ameaça. O senso de responsabilidade que a visão da criança que dorme nos desperta é tal que ver <NAME> morto -- à semelhança de um bebê que dorme -- se torna insuportável, porque nos dá a certeza de que falhamos espetacularmente. O tempo das fotos de Demir é de outra ordem do tempo das fotos de Wennman. Em Demir, é passado: "Isto foi", conclui Barthes (1990, p. 135-151). Em Wennman, é a prolongação do presente. O passado só se recupera recriando-o, sonhando-o, imaginando-o, ficcionalizando-o. O presente que se prolonga é uma prova do realismo.
Por isso, as imagens fotográficas de Aylan deslizam de um *analogon* -- perfeição analógica do real ou denotação -- para uma conotação (BARTHES, 1990). Sem dúvida, a ideia de *analogon* se aplica perfeitamente às fotos de Wennman, denotativas em toda a sua estrutura e trajetória. Mas às fotos de Demir, é forçoso ultrapassar a denotação e atribuir-lhe uma conotação:
> Ora, esse estatuto puramente \"denotante\" da fotografia, a perfeição e a plenitude de sua analogia, numa palavra sua \"objetividade\", tudo isso se arrisca a ser mítico (são os caracteres que o sentido comum atribui à fotografia): pois, de fato, há uma forte probabilidade (e isso será uma hipótese de trabalho) para que a mensagem fotográfica (ao menos a mensagem de imprensa) seja também ela conotada. A conotação não se deixa forçosamente apreender imediatamente ao nível da própria mensagem (ela é, se quisermos, simultaneamente visível e ativa, clara e implícita), mas pode-se já induzi-la de certos fenômenos que se passam ao nível da produção e da recepção da mensagem: de um lado, uma fotografia de imprensa é um objeto trabalhado, escolhido, composto, construído, tratado segundo normas profissionais, estéticas ou ideológicas, que são outros tantos fatores de conotação; e, de outro, essa mesma fotografia não é apenas percebida, recebida, ela é lida, ligada mais ou menos conscientemente pelo público que a consome a uma reserva tradicional de signos. (BARTHES, 1990, p. 14)
// ![][39]
// []{#_Toc507740901 .anchor}**Figura 16 -- Na grama**
// Fonte: <NAME> -- Where the children sleep
// ![][40]
// []{#_Toc507740902 .anchor}**Figura 17 -- Entre as árvores**
// Fonte: <NAME> -- Where the children sleep
// ![][44]
// []{#_Toc507740903 .anchor}**Figura 18 -- No asfalto**
// Fonte: <NAME> -- Where the children sleep
// ![][45]
// []{#_Toc507740904 .anchor}**Figura 19 -- Na rua**
// Fonte: <NAME> -- Where the children sleep
// As imagens de Aylan contam sua história de diferentes maneiras. Em março de 2016, o artista plástico finlandês Pekka Jylhä estreou a exposição *Nós herdamos a esperança -- o dom do esquecimento*, na Galeria Helsinki Contemporary, cuja principal atração era uma escultura em tamanho natural, colorida, de Aylan Kurdi, reproduzindo a fotografia de Demir.
// ![ view over Pekka Jylhä\'s exhibition \"We Have Inherited Hope - the Gift of Forgetting\" at Helsinki Contemporary. Picture: Ilpo Vainionpää]
// []{#_Toc507740905 .anchor}**Figura 20 -- Nós herdamos a esperança -- o dom do esquecimento**
// Foto: Ilpo Vainionpää
// Fonte: \<http://finlandtoday.fi/the-drowned-refugee-aylan-and-other-horrors-on-display-at-helsinki-contemporary/\>
// ![][47]
// []{#_Toc507740906 .anchor}**Figura 21 -- Até que o mar o liberte**
// Foto: Ilpo Vainionpää
// Fonte: \<http://finlandtoday.fi/the-drowned-refugee-aylan-and-other-horrors-on-display-at-helsinki-contemporary/\>
Em texto publicado no site *Finland Today*, <NAME> conta que, em Helsinki, em geral, as galerias são vazias, por isso foi surpreendente que a exposição atraísse público numeroso, colocado mais uma vez diante de uma imagem de tirar o fôlego. A mídia local divulgou as informações sobre a escultura, e os visitantes lotaram a galeria até o dia 3 de abril de 2016. Para Roberts, o sucesso da exposição decorreu do foco da mídia sobre a escultura, intitulada *Until the Sea Shall Him Free.* Assim, os visitantes foram conferir as dez obras da mostra, sabendo que se defrontariam com o horror em forma de escultura, antecipando um choque que já era esperado.
Embora possa inquietar ao extremo e suscitar discussões éticas, a interferência do artista, nesse caso, tem uma relevância decisiva: só ele é capaz de captar aspectos como a nossa capacidade de esquecer rapidamente os desastres que presenciamos em tempo real, mediados pelas tecnologias da comunicação, bem como a nossa necessidade de dilatar a sensação de comoção, ao mesmo tempo em que estende a duração do tema entre os assuntos mais vistos e discutidos, *on-line* e *off-line*.
É preciso repetir que a exposição do artista finlandês foi intitulada *Nós herdamos a esperança -- o dom do esquecimento*. Essa frase é um verso da poeta polonesa <NAME> (1923-2012), vencedora do Prêmio Nobel de Literatura de 1996, que revela o quanto somos capazes de esquecer, mas também o quanto essa capacidade é vital para a sobrevivência humana. Jylhä explorou exatamente esse ponto. Ao fazer a escultura de Aylan, ele disse ao mundo "Não esqueça", mas, com isso, revelou a nossa pouca disposição de manter a atenção nos eventos narrados pela mídia, inerentemente efêmeros. A necessidade de manter viva a memória da dor surge exatamente da nossa capacidade de esquecer.
Essa espécie de resiliência, tão bem representada pelo título da exposição, mostra a sensibilidade do artista finlandês para compreender a sociedade consumista, individualista e hedonista que somos, mas também sua capacidade de perceber que não se pode "morrer demais" a cada novo choque distribuído pela mídia. Isso explica a influência da poesia de Wislawa Szymborska sobre o trabalho de Jylhä:
> Autotomia[^36]\
> \
> Diante do perigo, a holotúria*[^37] *se divide em duas:\
> deixando uma sua metade ser devorada pelo mundo,\
> salvando-se com a outra metade.\
> \
> Ela se bifurca subitamente em naufrágio e salvação,\
> em resgate e promessa, no que foi e no que será.\
> \
> No centro do seu corpo irrompe um precipício\
> de duas bordas que se tornam estranhas uma à outra.\
> \
> Sobre uma das bordas, a morte, sobre outra, a vida.\
> Aqui o desespero, ali a coragem.\
> \
> Se há balança, nenhum prato pesa mais que o outro.\
> Se há justiça, ei-la aqui.\
> \
> Morrer apenas o estritamente necessário, sem ultrapassar a medida.\
> Renascer o tanto preciso a partir do resto que se preservou.\
> \
> Nós também sabemos nos dividir, é verdade.\
> Mas apenas em corpo e sussurros partidos.\
> Em corpo e poesia.\
> \
> Aqui a garganta, do outro lado, o riso,\
> leve, logo abafado.\
> \
> Aqui o coração pesado, ali o <NAME>,\
> três pequenas palavras que são as três plumas de um voo.\
> \
> O abismo não nos divide.\
> O abismo nos cerca.*[^38]*\
>
É assim que o artista acrescenta um ponto de vista privilegiado e sensível, atento à forma que deve ser atribuída a um conteúdo destinado a persuadir a audiência, fazendo-a se sentir "como se" estivesse vivendo aquela experiência. E é nesse sentido que entendemos a ficcionalização da história de <NAME> e o estatuto estético -- ou transestético[^39] (LIPOVETSKY; SERROY, 2015) -- das imagens de Aylan, desde a fotografia de Demir até a escultura de Jylhä.
== Têm mesmo poder as fotografias icônicas?
Quando da publicação das fotos de Demir, novamente veio à tona a discussão sobre o poder transformador das fotografias icônicas.[^40] Existe um conjunto dessas fotografias, largamente conhecidas no mundo inteiro, que entraram para a história devido ao seu poder transformador. A foto de Aylan seria a mais recente dessa lista.
Essa categoria de imagens, por sua capacidade denotativa de apresentar fatos presentes, serviram como um acionador para críticas. Em ensaio original dos anos 1970, Sontag (2004, p. 22) reconhece que a fotografia se tornou um dos principais meios para experimentar de forma indireta a realidade do mundo, mas adverte: "fotografar é, em essência, um ato de não intervenção". Segundo a autora, há situações em que o fotógrafo deve escolher entre uma fotografia e uma vida, e ele opta pela foto: "A pessoa que interfere não pode registrar; a pessoa que registra não pode interferir" (SONTAG, 2004, p. 22).
Nesse ensaio, Sontag (2004, p. 27-28) admite que usar a câmera é uma forma de participação: "As imagens que mobilizam a consciência estão sempre ligadas a determinada situação histórica. \[\...\] Fotos não podem criar uma posição moral, mas podem reforçá-la -- e podem ajudar a desenvolver uma posição moral ainda embrionária". É o caso da fotografia que ocupou a primeira página de jornais do mundo inteiro em um dia de 1972 -- menina do napalm --, contribuindo mais "\[\...\] para aumentar o repúdio público contra a guerra do que cem horas de barbaridades exibidas pela televisão" (SONTAG, 2004, p. 28). Trata-se da fotografia de Nick Ut responsável por ter colocado a opinião pública contra a Guerra do Vietnã. Destaque-se que Sontag (2004) reitera a limitação do poder transformador da fotografia, dado que
> O que determina a possibilidade de ser moralmente afetado por fotos é a existência de uma consciência política apropriada. Sem uma visão política, as fotos do matadouro da história serão, muito provavelmente, experimentadas apenas como irreais ou como um choque emocional desorientador. (SONTAG, 2004, p. 29)
// ![Kim Phuc e outras crianças correm depois de ser atacado com napalm. Foto: AP / Nick Ut]
// []{#_Toc507740907 .anchor}**Figura 22 -- A menina do napalm**
// <NAME> / Associated Press / 1972
Outro episódio histórico que gerou fotografias que abalaram o mundo foi por ocasião da invasão norte-americana ao Iraque, durante o governo Bush, no começo dos anos 2000. Detidos na prisão de Bagdá -- Abu Ghraib --, prisioneiros iraquianos foram mortos, torturados, abusados e tiveram suas imagens divulgadas pelos próprios reservistas do exército norte-americano.
// ![Um detido em Abu Ghraib. Foto: AP]
// []{#_Toc507740908 .anchor}**Figura 23 -- Um detido em Abu Ghraib**
// Fonte: Associated Press, 2004
Cerca de 200 fotografias feitas em Abu Ghraib nessas condições foram, em 2016, divulgadas pelo Pentágono, que foi forçado a publicá-las depois de perder uma batalha judicial para a União Americana das Liberdades Civis (ACLU). Ao todo, são 2.000 as fotos que a ACLU quer que o Departamento de Defesa norte-americano divulgue. Portanto, a história ainda não chegou ao fim: há 1.800 fotos a serem conhecidas pelo mundo.
Sontag (2008, p. 141) se manifestou novamente sobre "o poder insuperável \[das fotografias\] para determinar o que recordamos dos fatos". Diante da desmoralização em que o governo Bush se viu envolvido, "a reação inicial do governo foi dizer que o presidente estava chocado e indignado *com as fotos* -- como se o erro ou o horror estivesse nas imagens, não no que elas retratam" (SONTAG, 2008, p. 141 -- grifei). A indignação de Sontag diante de tais fotos levou-a a fazer uma crítica duríssima aos Estados Unidos, seu país, e ao povo americano, acusando-os de torturadores que se divertiram com a humilhação do inimigo: "E essas fotos que os americanos distribuíram anunciam ao mundo choque e terrível estupefação: um padrão de comportamento criminoso em franco desacato às convenções humanitárias internacionais (SONTAG, 2008, p. 149-150).
Esse episódio é significativo para o nosso estudo porque foi num de seus últimos artigos, publicado em maio de 2004 (poucos meses antes de morrer), no jornal *The New York Times*, que Sontag afirmou que a história recordará a Guerra do Iraque pelas fotografias dos soldados americanos na prisão de [Abu Ghraib][]:
> As fotos não vão desaparecer. Essa é a natureza do mundo digital em que vivemos. De fato, parece que elas eram necessárias para levar os nossos líderes a reconhecer que tinham um problema nas mãos. \[\...\] Ao que parece, foi preciso que as fotos surgissem para que a atenção deles despertasse, quando ficou claro que elas não poderiam ser apagadas; foram as fotos que tornaram tudo isso "real" para o presidente e seus associados. Até então, só havia palavras, que são mais fáceis de encobrir, em nossa era de auto-reprodução e autodisseminação digitais infinitas, e, portanto, muito mais fáceis de esquecer. (SONTAG, 2008, p. 152)
Foi também nesse mesmo ensaio que Sontag (2008, p. 155) admitiu: "Sim, parece que uma foto vale mil palavras". Essas palavras são praticamente as últimas do ensaio de 2004 e sinalizam que Sontag estava revendo sua posição diante da fotografia -- desenvolvida no ensaio dos anos 1970 e hoje disponível no livro *Sobre fotografia* (2004) --, o que já se nota em seu livro *Diante da dor dos outros* (2003). Se, naquele momento, ela defendia a dormência da consciência que fotografias não seriam capazes de neutralizar, no final da vida, ela escreve: "Bem\... Não" (SONTAG, 2002, p. 263-264). Sontag confessa não se sentir mais tão certa de suas antigas teses e expressa uma perspectiva mais positiva quanto à força das imagens de sofrimento como base para a ação.
Bem, sim, concordamos com Susan Sontag. Mas não é qualquer imagem que vale por mil palavras. Algumas apenas -- aquelas que nos mobilizam, que nos tocam, que nos inquietam. A de <NAME>, por exemplo.
|
|
https://github.com/cherryblossom000/typst-apa-bibliography | https://raw.githubusercontent.com/cherryblossom000/typst-apa-bibliography/main/example/main.typ | typst | #import "@local/apa-bibliography:0.0.1": apa-bibliography
#set page(height: auto)
#let (reference: r, reference-with-page: rp, reference-date-only: rd, bibliography) = apa-bibliography(yaml("works.yml"))
Typst is really cool #r.typst-is-cool.
Here's another sentence where I'm going to reference another article with a specific page number #(rp.company-report)[p. 3].
What about if two references have the same author and year? #r.same-year1 #r.same-year2
According to Smith #rd.typst-is-cool, this is just an example to show how you can cite just the date.
Multiple authors: #r.multiple-authors. This is also an example with no date.
You can use an acronym for an author. Here's the first time I'm going to reference the author #r.long-author1.
Now when I reference the same author again, it's going to show the acronym #r.long-author2.
= References
#bibliography
|
|
https://github.com/lublak/typst-echarm-package | https://raw.githubusercontent.com/lublak/typst-echarm-package/main/examples/mixed_charts.typ | typst | MIT License | #set page(width: 200mm, height: 150mm, margin: 0mm)
#import "../typst-package/lib.typ" as echarm
#echarm.render(width: 100%, height: 100%, options: (
xAxis: (
type: "category",
data: ("Val1", "Val2", "Val3", "Val4", "Val5", "Val6")
),
yAxis: (
type: "value"
),
series: (
(
data: (20, 330, 400, 218, 135, 147),
type: "line",
areaStyle: (:),
smooth: true
),
(
data: (200, 430,300, 260, 15, 23),
type: "line"
),
(
data: (150, 330, 400, 218, 135, 147),
type: "bar"
),
(
data: (60, 30, 300, 250, 130, 120),
type: "bar"
)
)
)) |
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/unichar/0.1.0/ucd/block-2460.typ | typst | Apache License 2.0 | #let data = (
("CIRCLED DIGIT ONE", "No", 0),
("CIRCLED DIGIT TWO", "No", 0),
("CIRCLED DIGIT THREE", "No", 0),
("CIRCLED DIGIT FOUR", "No", 0),
("CIRCLED DIGIT FIVE", "No", 0),
("CIRCLED DIGIT SIX", "No", 0),
("CIRCLED DIGIT SEVEN", "No", 0),
("CIRCLED DIGIT EIGHT", "No", 0),
("CIRCLED DIGIT NINE", "No", 0),
("CIRCLED NUMBER TEN", "No", 0),
("CIRCLED NUMBER ELEVEN", "No", 0),
("CIRCLED NUMBER TWELVE", "No", 0),
("CIRCLED NUMBER THIRTEEN", "No", 0),
("CIRCLED NUMBER FOURTEEN", "No", 0),
("CIRCLED NUMBER FIFTEEN", "No", 0),
("CIRCLED NUMBER SIXTEEN", "No", 0),
("CIRCLED NUMBER SEVENTEEN", "No", 0),
("CIRCLED NUMBER EIGHTEEN", "No", 0),
("CIRCLED NUMBER NINETEEN", "No", 0),
("CIRCLED NUMBER TWENTY", "No", 0),
("PARENTHESIZED DIGIT ONE", "No", 0),
("PARENTHESIZED DIGIT TWO", "No", 0),
("PARENTHESIZED DIGIT THREE", "No", 0),
("PARENTHESIZED DIGIT FOUR", "No", 0),
("PARENTHESIZED DIGIT FIVE", "No", 0),
("PARENTHESIZED DIGIT SIX", "No", 0),
("PARENTHESIZED DIGIT SEVEN", "No", 0),
("PARENTHESIZED DIGIT EIGHT", "No", 0),
("PARENTHESIZED DIGIT NINE", "No", 0),
("PARENTHESIZED NUMBER TEN", "No", 0),
("PARENTHESIZED NUMBER ELEVEN", "No", 0),
("PARENTHESIZED NUMBER TWELVE", "No", 0),
("PARENTHESIZED NUMBER THIRTEEN", "No", 0),
("PARENTHESIZED NUMBER FOURTEEN", "No", 0),
("PARENTHESIZED NUMBER FIFTEEN", "No", 0),
("PARENTHESIZED NUMBER SIXTEEN", "No", 0),
("PARENTHESIZED NUMBER SEVENTEEN", "No", 0),
("PARENTHESIZED NUMBER EIGHTEEN", "No", 0),
("PARENTHESIZED NUMBER NINETEEN", "No", 0),
("PARENTHESIZED NUMBER TWENTY", "No", 0),
("DIGIT ONE FULL STOP", "No", 0),
("DIGIT TWO FULL STOP", "No", 0),
("DIGIT THREE FULL STOP", "No", 0),
("DIGIT FOUR FULL STOP", "No", 0),
("DIGIT FIVE FULL STOP", "No", 0),
("DIGIT SIX FULL STOP", "No", 0),
("DIGIT SEVEN FULL STOP", "No", 0),
("DIGIT EIGHT FULL STOP", "No", 0),
("DIGIT NINE FULL STOP", "No", 0),
("NUMBER TEN FULL STOP", "No", 0),
("NUMBER ELEVEN FULL STOP", "No", 0),
("NUMBER TWELVE FULL STOP", "No", 0),
("NUMBER THIRTEEN FULL STOP", "No", 0),
("NUMBER FOURTEEN FULL STOP", "No", 0),
("NUMBER FIFTEEN FULL STOP", "No", 0),
("NUMBER SIXTEEN FULL STOP", "No", 0),
("NUMBER SEVENTEEN FULL STOP", "No", 0),
("NUMBER EIGHTEEN FULL STOP", "No", 0),
("NUMBER NINETEEN FULL STOP", "No", 0),
("NUMBER TWENTY FULL STOP", "No", 0),
("PARENTHESIZED LATIN SMALL LETTER A", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER B", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER C", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER D", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER E", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER F", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER G", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER H", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER I", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER J", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER K", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER L", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER M", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER N", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER O", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER P", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER Q", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER R", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER S", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER T", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER U", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER V", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER W", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER X", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER Y", "So", 0),
("PARENTHESIZED LATIN SMALL LETTER Z", "So", 0),
("CIRCLED LATIN CAPITAL LETTER A", "So", 0),
("CIRCLED LATIN CAPITAL LETTER B", "So", 0),
("CIRCLED LATIN CAPITAL LETTER C", "So", 0),
("CIRCLED LATIN CAPITAL LETTER D", "So", 0),
("CIRCLED LATIN CAPITAL LETTER E", "So", 0),
("CIRCLED LATIN CAPITAL LETTER F", "So", 0),
("CIRCLED LATIN CAPITAL LETTER G", "So", 0),
("CIRCLED LATIN CAPITAL LETTER H", "So", 0),
("CIRCLED LATIN CAPITAL LETTER I", "So", 0),
("CIRCLED LATIN CAPITAL LETTER J", "So", 0),
("CIRCLED LATIN CAPITAL LETTER K", "So", 0),
("CIRCLED LATIN CAPITAL LETTER L", "So", 0),
("CIRCLED LATIN CAPITAL LETTER M", "So", 0),
("CIRCLED LATIN CAPITAL LETTER N", "So", 0),
("CIRCLED LATIN CAPITAL LETTER O", "So", 0),
("CIRCLED LATIN CAPITAL LETTER P", "So", 0),
("CIRCLED LATIN CAPITAL LETTER Q", "So", 0),
("CIRCLED LATIN CAPITAL LETTER R", "So", 0),
("CIRCLED LATIN CAPITAL LETTER S", "So", 0),
("CIRCLED LATIN CAPITAL LETTER T", "So", 0),
("CIRCLED LATIN CAPITAL LETTER U", "So", 0),
("CIRCLED LATIN CAPITAL LETTER V", "So", 0),
("CIRCLED LATIN CAPITAL LETTER W", "So", 0),
("CIRCLED LATIN CAPITAL LETTER X", "So", 0),
("CIRCLED LATIN CAPITAL LETTER Y", "So", 0),
("CIRCLED LATIN CAPITAL LETTER Z", "So", 0),
("CIRCLED LATIN SMALL LETTER A", "So", 0),
("CIRCLED LATIN SMALL LETTER B", "So", 0),
("CIRCLED LATIN SMALL LETTER C", "So", 0),
("CIRCLED LATIN SMALL LETTER D", "So", 0),
("CIRCLED LATIN SMALL LETTER E", "So", 0),
("CIRCLED LATIN SMALL LETTER F", "So", 0),
("CIRCLED LATIN SMALL LETTER G", "So", 0),
("CIRCLED LATIN SMALL LETTER H", "So", 0),
("CIRCLED LATIN SMALL LETTER I", "So", 0),
("CIRCLED LATIN SMALL LETTER J", "So", 0),
("CIRCLED LATIN SMALL LETTER K", "So", 0),
("CIRCLED LATIN SMALL LETTER L", "So", 0),
("CIRCLED LATIN SMALL LETTER M", "So", 0),
("CIRCLED LATIN SMALL LETTER N", "So", 0),
("CIRCLED LATIN SMALL LETTER O", "So", 0),
("CIRCLED LATIN SMALL LETTER P", "So", 0),
("CIRCLED LATIN SMALL LETTER Q", "So", 0),
("CIRCLED LATIN SMALL LETTER R", "So", 0),
("CIRCLED LATIN SMALL LETTER S", "So", 0),
("CIRCLED LATIN SMALL LETTER T", "So", 0),
("CIRCLED LATIN SMALL LETTER U", "So", 0),
("CIRCLED LATIN SMALL LETTER V", "So", 0),
("CIRCLED LATIN SMALL LETTER W", "So", 0),
("CIRCLED LATIN SMALL LETTER X", "So", 0),
("CIRCLED LATIN SMALL LETTER Y", "So", 0),
("CIRCLED LATIN SMALL LETTER Z", "So", 0),
("CIRCLED DIGIT ZERO", "No", 0),
("NEGATIVE CIRCLED NUMBER ELEVEN", "No", 0),
("NEGATIVE CIRCLED NUMBER TWELVE", "No", 0),
("NEGATIVE CIRCLED NUMBER THIRTEEN", "No", 0),
("NEGATIVE CIRCLED NUMBER FOURTEEN", "No", 0),
("NEGATIVE CIRCLED NUMBER FIFTEEN", "No", 0),
("NEGATIVE CIRCLED NUMBER SIXTEEN", "No", 0),
("NEGATIVE CIRCLED NUMBER SEVENTEEN", "No", 0),
("NEGATIVE CIRCLED NUMBER EIGHTEEN", "No", 0),
("NEGATIVE CIRCLED NUMBER NINETEEN", "No", 0),
("NEGATIVE CIRCLED NUMBER TWENTY", "No", 0),
("DOUBLE CIRCLED DIGIT ONE", "No", 0),
("DOUBLE CIRCLED DIGIT TWO", "No", 0),
("DOUBLE CIRCLED DIGIT THREE", "No", 0),
("DOUBLE CIRCLED DIGIT FOUR", "No", 0),
("DOUBLE CIRCLED DIGIT FIVE", "No", 0),
("DOUBLE CIRCLED DIGIT SIX", "No", 0),
("DOUBLE CIRCLED DIGIT SEVEN", "No", 0),
("DOUBLE CIRCLED DIGIT EIGHT", "No", 0),
("DOUBLE CIRCLED DIGIT NINE", "No", 0),
("DOUBLE CIRCLED NUMBER TEN", "No", 0),
("NEGATIVE CIRCLED DIGIT ZERO", "No", 0),
)
|
https://github.com/frectonz/the-pg-book | https://raw.githubusercontent.com/frectonz/the-pg-book/main/book/126.%20addiction.html.typ | typst | addiction.html
The Acceleration of Addictiveness
July 2010What hard liquor, cigarettes, heroin, and crack have in common is
that they're all more concentrated forms of less addictive predecessors.
Most if not all the things we describe as addictive are. And the
scary thing is, the process that created them is accelerating.We wouldn't want to stop it. It's the same process that cures
diseases: technological progress. Technological progress means
making things do more of what we want. When the thing we want is
something we want to want, we consider technological progress good.
If some new technique makes solar cells x% more efficient, that
seems strictly better. When progress concentrates something we
don't want to want — when it transforms opium into heroin — it seems
bad. But it's the same process at work.
[1]No one doubts this process is accelerating, which means increasing
numbers of things we like will be transformed into things we like
too much.
[2]As far as I know there's no word for something we like too much.
The closest is the colloquial sense of "addictive." That usage has
become increasingly common during my lifetime. And it's clear why:
there are an increasing number of things we need it for. At the
extreme end of the spectrum are crack and meth. Food has been
transformed by a combination of factory farming and innovations in
food processing into something with way more immediate bang for the
buck, and you can see the results in any town in America. Checkers
and solitaire have been replaced by World of Warcraft and FarmVille.
TV has become much more engaging, and even so it can't compete with Facebook.The world is more addictive than it was 40 years ago. And unless
the forms of technological progress that produced these things are
subject to different laws than technological progress in general,
the world will get more addictive in the next 40 years than it did
in the last 40.The next 40 years will bring us some wonderful things. I don't
mean to imply they're all to be avoided. Alcohol is a dangerous
drug, but I'd rather live in a world with wine than one without.
Most people can coexist with alcohol; but you have to be careful.
More things we like will mean more things we have to be careful
about.Most people won't, unfortunately. Which means that as the world
becomes more addictive, the two senses in which one can live a
normal life will be driven ever further apart. One sense of "normal"
is statistically normal: what everyone else does. The other is the
sense we mean when we talk about the normal operating range of a
piece of machinery: what works best.These two senses are already quite far apart. Already someone
trying to live well would seem eccentrically abstemious in most of
the US. That phenomenon is only going to become more pronounced.
You can probably take it as a rule of thumb from now on that if
people don't think you're weird, you're living badly.Societies eventually develop antibodies to addictive new things.
I've seen that happen with cigarettes. When cigarettes first
appeared, they spread the way an infectious disease spreads through
a previously isolated population. Smoking rapidly became a
(statistically) normal thing. There were ashtrays everywhere. We
had ashtrays in our house when I was a kid, even though neither of
my parents smoked. You had to for guests.As knowledge spread about the dangers of smoking, customs changed.
In the last 20 years, smoking has been transformed from something
that seemed totally normal into a rather seedy habit: from something
movie stars did in publicity shots to something small huddles of
addicts do outside the doors of office buildings. A lot of the
change was due to legislation, of course, but the legislation
couldn't have happened if customs hadn't already changed.It took a while though—on the order of 100 years. And unless the
rate at which social antibodies evolve can increase to match the
accelerating rate at which technological progress throws off new
addictions, we'll be increasingly unable to rely on customs to
protect us.
[3]
Unless we want to be canaries in the coal mine
of each new addiction—the people whose sad example becomes a
lesson to future generations—we'll have to figure out for ourselves
what to avoid and how. It will actually become a reasonable strategy
(or a more reasonable strategy) to suspect
everything new.In fact, even that won't be enough. We'll have to worry not just
about new things, but also about existing things becoming more
addictive. That's what bit me. I've avoided most addictions, but
the Internet got me because it became addictive while I was using
it.
[4]Most people I know have problems with Internet addiction. We're
all trying to figure out our own customs for getting free of it.
That's why I don't have an iPhone, for example; the last thing I
want is for the Internet to follow me out into the world.
[5]
My latest trick is taking long hikes. I used to think running was a
better form of exercise than hiking because it took less time. Now
the slowness of hiking seems an advantage, because the longer I
spend on the trail, the longer I have to think without interruption.Sounds pretty eccentric, doesn't it? It always will when you're
trying to solve problems where there are no customs yet to guide
you. Maybe I can't plead Occam's razor; maybe I'm simply eccentric.
But if I'm right about the acceleration of addictiveness, then this
kind of lonely squirming to avoid it will increasingly be the fate
of anyone who wants to get things done. We'll increasingly be
defined by what we say no to.
Notes[1]
Could you restrict technological progress to areas where you
wanted it? Only in a limited way, without becoming a police state.
And even then your restrictions would have undesirable side effects.
"Good" and "bad" technological progress aren't sharply differentiated,
so you'd find you couldn't slow the latter without also slowing the
former. And in any case, as Prohibition and the "war on drugs"
show, bans often do more harm than good.[2]
Technology has always been accelerating. By Paleolithic
standards, technology evolved at a blistering pace in the Neolithic
period.[3]
Unless we mass produce social customs. I suspect the recent
resurgence of evangelical Christianity in the US is partly a reaction
to drugs. In desperation people reach for the sledgehammer; if
their kids won't listen to them, maybe they'll listen to God. But
that solution has broader consequences than just getting kids to
say no to drugs. You end up saying no to
science as well.
I worry we may be heading for a future in which only a few people
plot their own itinerary through no-land, while everyone else books
a package tour. Or worse still, has one booked for them by the
government.[4]
People commonly use the word "procrastination" to describe
what they do on the Internet. It seems to me too mild to describe
what's happening as merely not-doing-work. We don't call it
procrastination when someone gets drunk instead of working.[5]
Several people have told me they like the iPad because it
lets them bring the Internet into situations where a laptop would
be too conspicuous. In other words, it's a hip flask. (This is
true of the iPhone too, of course, but this advantage isn't as
obvious because it reads as a phone, and everyone's used to those.)Thanks to <NAME>, <NAME>, <NAME>, and
<NAME> for reading drafts of this.
|
|
https://github.com/DieracDelta/presentations | https://raw.githubusercontent.com/DieracDelta/presentations/master/polylux/book/src/utils/side-by-side.md | markdown | # Side by side
To make good use of the space on a slide, you will often want to place content
next to each other.
For convenience, Polylux provides the function `#side-by-side` for this purpose.
If you used
```typ
{{#include ../IMPORT.typ}}
```
you have it directly available.
Otherwise you can get if from the `utils` module.
It is basically a thin wrapper around the Typst function
[`#grid`](https://typst.app/docs/reference/layout/grid/) but tailored
towards this specific usecase.
In its simplest form, you can use it as
```typ
{{#include side-by-side.typ:6:12}}
```
resulting in

As you can see, the content arguments you provide will be placed next to each
other with equal proportions of width.
A spacing (gutter) of `1em` will also be put between them.
The widths and gutter can be configured using the `columns` and `gutter` optional
arguments, respectively.
They are propagated to `#grid` directly so you can look up possible values in
its documentation
([`gutter`](https://typst.app/docs/reference/layout/grid/#parameters-gutter)
and [`columns`](https://typst.app/docs/reference/layout/grid/#parameters-columns)
arguments).
If not specified, they fall back to these defaults:
- `gutter`: `1em`
- `columns`: `(1fr,) * n` if you provided `n` content arguments, that means an
array with the value `1fr` repeated `n` times.
A more complex example would therefore be:
```typ
{{#include side-by-side-kwargs.typ:6:12}}
```
resulting in

|
|
https://github.com/DaAlbrecht/lecture-notes | https://raw.githubusercontent.com/DaAlbrecht/lecture-notes/main/computer_networks/protocols_protocol_layers.typ | typst | MIT License | #import "../template.typ": *
= Protocols and protocol layers
Communication in computer networks relies on the exchange of messages.
For effective communication, participants must understand each other, which is achieved through agreed-upon protocols.
These protocols exist at various layers, from low-level bit transmission to high-level information representation.
\
Protocols define the syntax (message format) and semantics (vocabulary and meaning) of valid messages.
Due to the complex requirements of computer networks, communication is structured into layered models.
Each layer addresses a specific aspect of communication and provides interfaces to the layers above and below it.
This modular approach, where data is encapsulated at each layer, allows for flexibility.
\
Protocols within a layer can be changed or replaced without affecting the entire communication system, as long as the interfaces remain consistent.
The three most well-known layered models are the TCP/IP reference model, the OSI reference model, and the hybrid reference model.
#pagebreak()
== TCP/IP reference model
The TCP/IP reference model is a four-layer model that describes the protocols used in the Internet. The four layers are, starting from the top:
#figure(
image("../resources/tcp_ip_model.png", width: 50%),
caption: [TCP/IP reference model],
) <tcp_ip_reference_model>
#figure(
image("../resources/tcp_ip_model_data_view.png", width: 100%),
caption: [Each layer adds additional data],
) <tcp_ip_model_data_view>
#pagebreak()
== Hybrid reference model
#figure(
image("../resources/hybrid_reference_model.png", width: 25%),
caption: [Hybrid reference model],
) <hybrid_reference_model>
=== Physical layer
Responsible for transmitting raw bits (1s and 0s) over a medium.
It defines the physical connection to the transmission medium and how data is converted into signals.
Protocols in this layer determine the data rate and whether transmission can occur simultaneously in both directions.
=== Data link layer
Detects and manages errors that occur during bit transmission.
It controls access to the transmission medium and handles the framing of packets.
Physical (MAC) addresses are used to deliver frames within a physical network, but error recovery is not handled.
=== Network layer
Manages the routing and forwarding of data packets between logical networks, using IP addresses.
It handles packet encapsulation and routing across different subnets, typically using the IP protocol.
=== Transport layer
Ensures reliable data transfer between processes on different devices.
It uses end-to-end protocols, such as TCP for reliable, connection-oriented communication, and UDP for faster, connectionless communication.
It also addresses processes using port numbers and ensures correct data delivery.
=== Application Layer
Contains protocols that interface directly with applications (e.g., HTTP, FTP, SMTP).
It handles the actual exchange of messages, such as emails or web pages, according to the application protocols.
== OSI reference model
The OSI (Open Systems Interconnection) model, is a seven-layer reference framework designed to standardize and facilitate communication between different systems.
#figure(
image("../resources/osi_model.png", width: 50%),
caption: [OSI reference model],
) <osi_reference_model>
The additional layers in the OSI model are:
=== Session layer
The Session Layer is responsible for establishing, managing, and terminating sessions, which are virtual connections between applications on separate physical devices.
It also manages dialogue control and introduces checkpoints in long data transmissions for synchronization.
If a connection is interrupted, the transmission can resume from the last checkpoint instead of starting over.
=== Presentation layer
The Presentation Layer defines the rules for formatting (or presenting) messages.
It enables the sender to inform the receiver about the data format (e.g., ASCII) to allow any necessary conversion.
This layer also handles the definition of data structures, such as fields for names or ID numbers, as well as tasks like data compression and encryption.
However, similar to the Session Layer, the Presentation Layer is rarely used in practice today because application protocols typically handle these functions.
#pagebreak()
= Ethernet
Ethernet is a widely used LAN technology that defines the physical and data link layers of the OSI model.
It uses a bus or star topology and supports data rates of:
- 10 Mbps (Ethernet)
- 100 Mbps (Fast Ethernet)
- 1 Gbps (Gigabit Ethernet)
- 10 Gbps (10 Gigabit Ethernet) and higher.
Ethernet data is transmitted in frames, which consist of the following fields:
#table(
columns: (auto,auto,auto),
align: (left,center + horizon,left),
fill:(x,y) => fill_alternating(x, y),
[Field], [Octets], [Description],
[Preamble], [7], [Used for synchronization],
[Start Frame Delimiter (SFD)], [1], [Indicates the start of the frame],
[Destination MAC Address], [6], [Address of the recipient],
[Source MAC Address], [6], [Address of the sender],
[Type/Length], [2], [Indicates the type of data or the length of the frame],
[Data], [46-1500], [Payload data],
[Frame Check Sequence (FCS)], [4], [Error detection],
)
#figure(
image("../resources/ethernet_frame.png", width: 120%),
caption: [Ethernet frame structure],
) <ethernet_frame>
#pagebreak()
= Switches and Hubs
Switches and hubs are devices used to connect multiple devices in a network.
However, they operate differently and have distinct functions.
== Hubs
Hubs operate at the physical layer (Layer 1) of the OSI model.
They are simple devices that receive data packets from one device and broadcast them to all other connected devices.
Hubs use the bus topology, where all devices share the same communication medium thus increasing the collision domain.
They are considered inefficient for modern networks due to their broadcast nature and lack of intelligence and got replaced by switches.
== Switches
Switches operate at the data link layer (Layer 2) of the OSI model.
They are more intelligent than hubs and can learn the MAC addresses of devices connected to their ports.
Switches use this information to forward data packets only to the intended recipient, reducing unnecessary traffic and collisions by creating separate collision domains for each port.
At startup, the initial MAC address table of a switch is empty.
=== Flooding
When a switch receives a frame with an unknown destination MAC address, it floods the frame to all ports except the source port.
This allows the switch to learn the MAC address of the device that responds to the frame.
=== Forwarding
Once a switch learns the MAC addresses of devices connected to its ports, it forwards frames only to the port where the destination device is located.
This reduces unnecessary traffic and improves network efficiency.
= Spanning Tree Protocol (STP)
Spanning Tree Protocol (STP) is a network protocol that prevents loops in Ethernet networks.
Loops can occur when there are multiple paths between switches in a network, causing broadcast storms and network congestion.
Each switch undergoes a series of port states influenced by three timers.
Immediate transition from a blocking to a forwarding state could lead to loss of topology information and create loops, hence the need for five distinct port states:
#table(
columns: (auto,auto),
align: (left,left),
fill:(x,y) => fill_alternating(x, y),
[State], [Description],
[Blocking], [Port does not forward frames],
[Listening], [Port prepares to forward frames],
[Learning], [Port learns MAC addresses],
[Forwarding], [Port forwards frames],
[Disabled], [Port is administratively disabled],
)<spanning_tree_protocol>
The time a port spends in each state is determined by three timers, with only the Root Bridge having the authority to adjust them:
- Hello Timer: Interval between BPDUs, typically 2 seconds.
- Forward-Delay Timer: Time spent in Listening and Learning states, usually 15 seconds each (total 30 seconds).
- Max Age Timer: Time a switchport retains configuration info, typically 20 seconds.
When STP is enabled, ports pass through these states in sequence:\
Blocking → Listening → Learning → Forwarding, which takes about 50 seconds under standard settings—a significant duration in networking.
The convergence time, or time needed to recalculate the Spanning Tree after a link failure, is substantial, and this delay is a common critique of STP.
|
https://github.com/wyatt-feng/sustech-ug-thesis-typst | https://raw.githubusercontent.com/wyatt-feng/sustech-ug-thesis-typst/main/resources/template_en.typ | typst | Other | #import "@preview/tablex:0.0.8": tablex, rowspanx, colspanx
#let 字号 = (
初号: 42pt,
小初: 36pt,
一号: 26pt,
小一: 24pt,
二号: 22pt,
小二: 18pt,
三号: 16pt,
小三: 15pt,
四号: 14pt,
中四: 13pt,
小四: 12pt,
五号: 10.5pt,
小五: 9pt,
六号: 7.5pt,
小六: 6.5pt,
七号: 5.5pt,
小七: 5pt,
)
#let 字体 = (
仿宋: ("Times New Roman", "FangSong"),
宋体: ("Times New Roman", "Source Han Serif"),
黑体: ("Times New Roman", "Source Han Sans"),
楷体: ("Times New Roman", "KaiTi"),
英文: ("Times New Roman"),
代码: ("New Computer Modern Mono", "Times New Roman", "SimSun"),
)
#let lengthceil(len, unit: 字号.小四) = calc.ceil(len / unit) * unit
#let partcounter = counter("part")
#let chaptercounter = counter("chapter")
#let appendixcounter = counter("appendix")
#let footnotecounter = counter(footnote)
#let rawcounter = counter(figure.where(kind: "code"))
#let imagecounter = counter(figure.where(kind: image))
#let tablecounter = counter(figure.where(kind: table))
#let equationcounter = counter(math.equation)
#let biblio(path) = {
align(center)[#heading(numbering: none, "References")]
bibliography(path, title: none, style: "./gb-t-7714-2015-cn.csl")
}
#let skippedstate = state("skipped", false)
#let thesisnumbering(..nums, location: none) = locate(loc => {
let actual_loc = if location == none { loc } else { location }
if appendixcounter.at(actual_loc).first() < 1 {
numbering("1.1", ..nums)
} else {
numbering("A1.1", ..nums)
}
})
#let figurenumbering(..nums, location: none) = locate(loc => {
let actual_loc = if location == none { loc } else { location }
nums.pos().last()
})
#let numberedpar(numbering_scheme: "(1)", ..content) = {
content.pos().enumerate().map(it => {
// There is a bug in the upstream code that wouldn't apply first-line-indent
// in blocks, which affects tables, etc. The bug fix seems to be included in
// the codebase, but hasn't been released yet.
// TODO: remove manual indentation after bug fix.
// Actually, it wouldn't harm anyway.
par(justify: true, first-line-indent: 0em)[#h(2em)#numbering(numbering_scheme, it.at(0)+1)#h(1em)#it.at(1)]
}).join()
}
#let myunderline(s, width: 300pt, bold: false) = {
let chars = s.clusters()
let n = chars.len()
style(styles => {
let i = 0
let now = ""
let ret = ()
while i < n {
let c = chars.at(i)
let nxt = now + c
if measure(nxt, styles).width > width or c == "\n" {
if bold {
ret.push(strong(now))
} else {
ret.push(now)
}
ret.push(v(-1em))
ret.push(line(length: 100%))
if c == "\n" {
now = ""
} else {
now = c
}
} else {
now = nxt
}
i = i + 1
}
if now.len() > 0 {
if bold {
ret.push(strong(now))
} else {
ret.push(now)
}
ret.push(v(-0.9em))
ret.push(line(length: 100%))
}
ret.join()
})
}
#let outline(title: "Table of Contents", depth: none, indent: false) = {
align(center)[#text(font: 字体.英文, size: 字号.小二, weight: "bold", title)]
locate(it => {
let elements = query(heading.where(outlined: true).after(it), it)
for el in elements {
// Skip headings that are too deep
if depth != none and el.level > depth { continue }
let maybe_number = if el.numbering != none {
numbering(el.numbering.with(location: el.location()), ..counter(heading).at(el.location()))
h(0.5em)
}
let line = {
if maybe_number != none {
style(styles => {
let width = measure(maybe_number, styles).width
box(
width: lengthceil(width),
maybe_number
)
})
}
link(el.location(), if el.level == 1 {
set text(font: 字体.宋体, size: 字号.四号, weight: "bold")
el.body
} else {
set text(font: 字体.宋体, size: 字号.四号, weight: "regular")
el.body
})
// Filler dots
box(width: 1fr, h(10pt) + box(width: 1fr, repeat[.]) + h(10pt))
// Page number
let footer = query(selector(<__footer__>).after(el.location()), el.location())
let page_number = if footer == () {
0
} else {
counter(page).at(footer.first().location()).first()
}
link(el.location(), if el.level == 1 {
set text(font: 字体.宋体, size: 字号.四号, weight: "bold")
str(page_number)
} else {
str(page_number)
})
linebreak()
v(-0.2em)
}
line
}
})
}
#let codeblock(raw, caption: none, outline: false) = {
figure(
if outline {
rect(width: 100%)[
#set align(left)
#raw
]
} else {
set align(left)
raw
},
caption: caption, kind: "code", supplement: ""
)
}
#let appendix() = {
align(center)[#heading(numbering: none, "Appendices")]
appendixcounter.update(10)
chaptercounter.update(1)
counter(heading).update(1)
}
#let tbl(tbl, caption: "", source: "") = {
set text(font: 字体.英文, size: 字号.五号)
[
#figure(
tbl,
caption: caption,
supplement: [Table],
kind: table,
)
#if source != "" {
v(-1em)
align(left)[Data source: #source]
}
]
}
#let conf(
class: "",
serialnumber: "",
udc: "",
confidence: "",
available_for_reference: true,
author: "",
studentid: "",
blindid: "",
cheader: "",
title: "",
subtitle: "",
department: "",
major: "",
supervisor: "",
date: "",
cabstract: [],
ckeywords: (),
eabstract: [],
ekeywords: (),
acknowledgements: [],
linespacing: 1.5em,
parspacing: 1.5em,
outlinedepth: 3,
blind: false,
doc,
) = {
set page("a4",
margin: (top: 2.5cm, bottom: 2cm, left: 3cm, right: 2.5cm),
footer: locate(loc => {
if skippedstate.at(loc) and calc.even(loc.page()) { return }
[
#set text(字号.五号)
#set align(center)
#let heading_count = query(selector(heading).before(loc), loc).len()
#if heading_count < 2 {
// Skip the cover, the abstract, and the toc
counter(page).update(0)
} else {
let current_page = counter(page).at(loc).first()
[
#str(current_page)
]
}
#label("__footer__")
]
}),
)
set text(字号.二号, font: 字体.黑体, lang: "en")
set align(center + horizon)
set heading(numbering: thesisnumbering)
set figure(numbering: figurenumbering)
set math.equation(numbering: thesisnumbering)
set list(indent: 2em)
set enum(indent: 2em)
set par(leading: linespacing)
show strong: it => text(font: 字体.黑体, weight: "bold", it.body)
show emph: it => text(font: 字体.楷体, style: "italic", it.body)
show par: set block(spacing: linespacing)
show raw: set text(font: 字体.代码)
show footnote.entry: it => {
let loc = it.note.location()
let superscript_numbering = c => [#super[#c] ]
numbering(
superscript_numbering,
..counter(footnote).at(loc),
)
set text(font: 字体.宋体, size: 字号.小五)
it.note.body
}
show heading: it => [
// Cancel indentation for headings
#set par(first-line-indent: 0em)
#let sizedheading(it, size) = [
#set text(size)
#v(2em)
#if it.numbering != none {
strong(counter(heading).display())
h(0.5em)
}
#strong(it.body)
#v(1em)
]
#if it.level == 1 {
chaptercounter.step()
footnotecounter.update(())
rawcounter.update(())
sizedheading(it, 字号.三号)
} else {
if it.level == 2 {
sizedheading(it, 字号.四号)
} else if it.level == 3 {
sizedheading(it, 字号.小四)
} else {
sizedheading(it, 字号.小四)
}
}
]
show figure: it => [
#set align(center)
#if not it.has("kind") {
it
} else if it.kind == image {
it.body
align(center)[
#set text(font: 字体.黑体, size: 字号.五号, weight: "bold")
#it.caption
]
} else if it.kind == table {
align(center)[
#set text(font: 字体.黑体, size: 字号.五号, weight: "bold")
#it.caption
]
it.body
} else if it.kind == "code" {
[
#set text(字号.五号)
Code #it.caption
]
it.body
}
]
show ref: it => {
if it.element == none {
// Keep citations as is
it
} else {
// Remove prefix spacing
h(0em, weak: true)
let el = it.element
let el_loc = el.location()
if el.func() == math.equation {
// Handle equations
link(el_loc, [
Equation
#figurenumbering(chaptercounter.at(el_loc).first(), equationcounter.at(el_loc).first(), location: el_loc)
])
} else if el.func() == figure {
// Handle figures
if el.kind == image {
link(el_loc, [
Figure
#figurenumbering(chaptercounter.at(el_loc).first(), imagecounter.at(el_loc).first(), location: el_loc)
])
} else if el.kind == table {
link(el_loc, [
Table
#figurenumbering(chaptercounter.at(el_loc).first(), tablecounter.at(el_loc).first(), location: el_loc)
])
} else if el.kind == "code" {
link(el_loc, [
Code
#figurenumbering(chaptercounter.at(el_loc).first(), rawcounter.at(el_loc).first(), location: el_loc)
])
}
} else if el.func() == heading {
// Handle headings
if el.level == 1 {
link(el_loc, thesisnumbering(..counter(heading).at(el_loc), location: el_loc))
} else {
link(el_loc, [
Section
#thesisnumbering(..counter(heading).at(el_loc), location: el_loc)
])
}
}
// Remove suffix spacing
h(0em, weak: true)
}
}
let fieldname(name) = [
#set align(right + top)
#text(font: 字体.宋体, weight: "bold", name)
]
let fieldvalue(value) = [
#set align(center + horizon)
#set text(font: 字体.宋体, size: 字号.三号, weight: "bold")
#grid(
rows: (auto, auto),
row-gutter: 0.2em,
value,
line(length: 100%)
)
]
let smallfieldname(name) = [
#text(font: 字体.宋体, size: 字号.小四, weight: "regular", name)
]
let smallfieldvalue(value) = [
#set align(left + horizon)
#set text(font: 字体.宋体, size: 字号.小四, weight: "regular")
#grid(
rows: (1em, auto),
row-gutter: 0.2em,
value,
line(length: 4em)
)
]
// Cover page
grid(
columns: (2em, 5fr, 2em, 1fr),
align(left)[#smallfieldname("CLC")],
align(left)[#smallfieldvalue(class)],
align(left)[#smallfieldname("Number")],
align(left)[#smallfieldvalue(serialnumber)],
)
grid(
columns: (2em, 5fr, 6em, 2fr),
align(left)[#smallfieldname("UDC")],
align(left)[#smallfieldvalue(udc)],
align(left)[#smallfieldname("Available for reference")],
align(left, smallfieldname[
Yes
#if available_for_reference {
sym.checkmark
} else {
sym.ballot
}
#h(0.5em)
No
#if available_for_reference {
sym.ballot
} else {
sym.checkmark
}]),
)
box(
grid(
columns: (auto, auto),
gutter: 0.4em,
image("images/sustech-en.png", height: 4.8em, fit: "contain"),
)
)
linebreak()
v(1em)
text(font: 字体.宋体, size: 字号.小初, weight: "regular", "Undergraduate Thesis")
set text(font: 字体.宋体, size: 字号.三号, weight: "bold")
grid(
columns: (auto, auto),
column-gutter: 0.5em,
row-gutter: 1.5em,
align(right, "Thesis Title: "),
myunderline(title + "\n" + subtitle),
align(right, "Student Name: "),
fieldvalue(author),
align(right, "Student ID: "),
fieldvalue(studentid),
align(right, "Department: "),
fieldvalue(department),
align(right, "Program: "),
fieldvalue(major),
align(right, "Thesis Advisor: "),
fieldvalue(supervisor),
)
v(1em)
text(weight: "regular", size: 字号.三号)[Date: #date]
pagebreak()
// Honor Pledge
set align(left + top)
align(center, text(font: 字体.黑体, weight: "bold", size: 字号.二号, "Commitment of Honesty"))
v(3em)
set text(font: 字体.宋体, weight: "regular", size: 字号.四号)
numberedpar(numbering_scheme: "1.", "I solemnly promise that the paper presented comes from my independent research work under my supervisor’s supervision. All statistics and images are real and reliable.",
"2. Except for the annotated reference, the paper contents no other published work or achievement by person or group. All people making important contributions to the study of the paper have been indicated clearly in the paper.",
"I promise that I did not plagiarize other people’s research achievement or forge related data in the process of designing topic and research content.",
"If there is violation of any intellectual property right, I will take legal responsibility myself.",
)
v(3em)
align(right, text("Signature: " + h(5em)))
v(1em)
align(right, date)
pagebreak()
// English abstract
par(justify: true, first-line-indent: 0em, leading: 25pt)[
#align(center)[#text(font: 字体.黑体, size: 字号.二号, title)]
#if subtitle != "" {
align(right)[#text(font: 字体.黑体, size: 字号.小二, "---" + subtitle)]
}
#text(font: 字体.英文, size: 字号.三号, "[Abstract]: ")
#set text(font: 字体.英文, size: 字号.四号, weight: "regular")
#eabstract
#v(1fr)
#text(font: 字体.英文, size: 字号.三号, "[Keywords]: ")
#ekeywords.join("; ")
#v(1fr)
]
pagebreak()
// Chinese abstract
par(justify: true, first-line-indent: 0em, leading: 25pt)[
#text(font: 字体.黑体, size: 字号.三号, "[摘要]:")
#set text(font: 字体.宋体, size: 字号.四号, weight: "regular")
#cabstract
#v(2fr)
#text(font: 字体.黑体, size: 字号.三号, "[关键词]:")
#ckeywords.join(";")
#v(1fr)
]
pagebreak()
// Table of contents
outline(
title: "Table of Contents",
depth: outlinedepth,
indent: true,
)
pagebreak()
set align(left + top)
par(justify: true, first-line-indent: 2em, leading: linespacing)[
#set text(font: 字体.宋体, size: 字号.小四)
#show par: set block(spacing: parspacing)
#doc
]
if not blind {
pagebreak()
align(center)[#heading(level: 1, numbering: none, "Acknowledgements")]
par(justify: true, first-line-indent: 2em, leading: linespacing)[
#acknowledgements
]
partcounter.update(30)
}
}
|
https://github.com/1sSay/USPTU_conspects | https://raw.githubusercontent.com/1sSay/USPTU_conspects/main/src/math/Geometry.typ | typst | // Global settings and templates
#set text(14pt)
#let def(term, color: black) = {
box(stroke: color, inset: 7pt, text()[ #term ])
}
// Header
#align(center, heading(level: 1)[Математика \ Аналитическая геометрия])
#align(center, text(weight: "thin")[21.09.2024])
#align(center, text(weight: "thin")[Конспект Сайфуллина Искандара БПО09-01-24])
// Content
== Поверхности
=== Сфера
Очев
=== Гиперболоид
#def[
*Каноническое уравнение*:
$
frac(x^2, a^2) + frac(y^2, b^2) - frac(z^2, c^2) = 1
$
]
=== Парабалоид
#def[#text(red)[*TODO*]]
=== Эллипсоид
#def[#text(red)[*TODO*]]
=== Цилиндры
#def[#text(red)[*TODO*]]
|
|
https://github.com/Mc-Zen/quill | https://raw.githubusercontent.com/Mc-Zen/quill/main/src/draw-functions.typ | typst | MIT License | // INTERNAL GATE DRAW FUNCTIONS
#import "utility.typ"
#import "arrow.typ"
#import "layout.typ"
// Default gate draw function. Draws a box with global padding
// and the gates content. Stroke and default fill are only applied if
// gate.box is true
#let draw-boxed-gate(gate, draw-params) = align(center, box(
inset: draw-params.padding,
width: gate.width,
radius: gate.radius,
stroke: if gate.box { draw-params.wire },
fill: utility.if-auto(gate.fill, if gate.box {draw-params.background}),
gate.content,
))
// Same but without displaying a box
#let draw-unboxed-gate(gate, draw-params) = box(
inset: draw-params.padding,
fill: utility.if-auto(gate.fill, draw-params.background),
gate.content
)
// Draw a gate spanning multiple wires
#let draw-boxed-multigate(gate, draw-params) = {
let dy = draw-params.multi.wire-distance
let extent = gate.multi.extent
if extent == auto { extent = draw-params.x-gate-size.height / 2 }
let style-params = (
width: gate.width,
stroke: utility.if-auto(gate.stroke, draw-params.wire),
radius: gate.radius,
fill: utility.if-auto(gate.fill, draw-params.background),
inset: draw-params.padding,
)
align(center + horizon, box(
..style-params,
height: dy + 2 * extent,
gate.content
))
let draw-inouts(inouts, alignment) = {
if inouts != none and dy != 0pt {
let width = measure(line(length: gate.width)).width
let y0 = -(dy + extent) - draw-params.center-y-coords.at(0)
let get-wire-y(qubit) = { draw-params.center-y-coords.at(qubit) + y0 }
set text(size: .8em)
context {
for inout in inouts {
let size = measure(inout.label)
let y = get-wire-y(inout.qubit)
let label-x = draw-params.padding
if "n" in inout and inout.n > 1 {
let y2 = get-wire-y(inout.qubit + inout.n - 1)
let brace = utility.create-brace(auto, alignment, y2 - y + draw-params.padding)
let brace-x = 0pt
let size = measure(brace)
if alignment == right { brace-x += width - size.width }
place(brace, dy: y - 0.5 * draw-params.padding, dx: brace-x)
label-x = size.width
y += 0.5 * (y2 - y)
}
place(dy: y - size.height / 2, align(
alignment,
box(inout.label, width: width, inset: (x: label-x))
))
}
}
}
}
draw-inouts(gate.multi.inputs, left)
draw-inouts(gate.multi.outputs, right)
}
#let draw-targ(item, draw-params) = {
let size = item.data.size
box({
circle(
radius: size,
stroke: draw-params.wire,
fill: utility.if-auto(item.fill, draw-params.background)
)
place(line(start: (size, 0pt), length: 2*size, angle: -90deg, stroke: draw-params.wire))
place(line(start: (0pt, -size), length: 2*size, stroke: draw-params.wire))
})
}
#let draw-ctrl(gate, draw-params) = {
let color = utility.if-auto(gate.fill, draw-params.color)
if "show-dot" in gate.data and not gate.data.show-dot { return none }
if gate.data.open {
let stroke = utility.if-auto(gate.fill, draw-params.wire)
box(circle(stroke: stroke, fill: draw-params.background, radius: gate.data.size))
} else {
box(circle(fill: color, radius: gate.data.size))
}
}
#let draw-swap(gate, draw-params) = {
box({
let d = gate.data.size
let stroke = draw-params.wire
box(width: d, height: d, {
place(line(start: (-0pt, -0pt), end: (d, d), stroke: stroke))
place(line(start: (d, 0pt), end: (0pt, d), stroke: stroke))
})
})
}
#let draw-meter(gate, draw-params) = {
let content = {
set align(top)
let stroke = draw-params.wire
let padding = draw-params.padding
let fill = utility.if-none(gate.fill, draw-params.background)
let height = draw-params.x-gate-size.height
let width = 1.5 * height
height -= 2 * padding
width -= 2 * padding
box(
width: width, height: height, inset: 0pt,
{
let center-x = width / 2
place(path((0%, 110%), ((50%, 40%), (-40%, 0pt)), (100%, 110%), stroke: stroke))
set align(left)
arrow.draw-arrow((center-x, height * 1.2), (width * .9, height*.3), length: 3.8pt, width: 2.8pt, stroke: stroke, arrow-color: draw-params.color)
})
}
gate.content = rect(content, inset: 0pt, stroke: none)
if gate.multi != none and gate.multi.num-qubits > 1 {
draw-boxed-multigate(gate, draw-params)
} else {
draw-boxed-gate(gate, draw-params)
}
}
#let draw-nwire(gate, draw-params) = {
set text(size: .7em)
let size = measure(gate.content)
let extent = 2.5pt + size.height
box(height: 2 * extent, { // box is solely for height hint
place(dx: 1pt, dy: 0pt, gate.content)
place(dy: extent, line(start: (0pt,-4pt), end: (-5pt,4pt), stroke: draw-params.wire))
})
}
#let draw-permutation-gate(gate, draw-params) = {
let dy = draw-params.multi.wire-distance
let width = gate.width
if dy == 0pt { return box(width: width, height: 4pt) }
let separation = gate.data.separation
if separation == auto { separation = draw-params.background }
if type(separation) == color { separation += 3pt }
if type(separation) == length { separation += draw-params.background }
box(
height: dy + 4pt,
inset: (y: 2pt),
fill: draw-params.background,
width: width, {
let qubits = gate.data.qubits
let y0 = draw-params.center-y-coords.at(gate.qubit)
let bend = gate.data.bend * width / 2
for from in range(qubits.len()) {
let to = qubits.at(from)
let y-from = draw-params.center-y-coords.at(from + gate.qubit) - y0
let y-to = draw-params.center-y-coords.at(to + gate.qubit) - y0
if separation != none {
place(path(((0pt,y-from), (-bend, 0pt)), ((width, y-to), (-bend, 0pt)), stroke: separation))
}
place(path(((-.1pt,y-from), (-bend, 0pt)), ((width+.1pt, y-to), (-bend, 0pt)), stroke: draw-params.wire))
}
}
)
}
// Draw an lstick (align: "right") or rstick (align: "left")
#let draw-lrstick(gate, draw-params) = {
assert(gate.data.align in (left, right), message: "`lstick`/`rstick`: Only left and right are allowed for parameter align")
let content = box(inset: draw-params.padding, gate.content)
let size = measure(content)
let brace = none
if gate.data.brace != none {
let brace-height
if gate.multi == none {
brace-height = 1em + 2 * draw-params.padding
} else {
brace-height = draw-params.multi.wire-distance + .5em
}
let brace-symbol = gate.data.brace
if brace-symbol == auto and gate.multi == none {
brace-symbol = none
}
brace = utility.create-brace(brace-symbol, gate.data.align, brace-height)
}
let brace-size = measure(brace)
let width = size.width + brace-size.width + gate.data.pad
let height = size.height
let brace-offset-y
let content-offset-y = 0pt
if gate.multi == none {
brace-offset-y = size.height / 2 - brace-size.height / 2
} else {
let dy = draw-params.multi.wire-distance
// at first (layout) stage:
if dy == 0pt { return box(width: 2 * width, height: 0pt, content) }
height = dy
content-offset-y = -size.height / 2 + height / 2
brace-offset-y = -.25em
}
let brace-pos-x = if gate.data.align == right { size.width } else { gate.data.pad }
let content-pos-x = if gate.data.align == right { 0pt } else { width - size.width }
box(
width: width,
height: height,
{
place(brace, dy: brace-offset-y, dx: brace-pos-x)
place(content, dy: content-offset-y, dx: content-pos-x)
}
)
}
#let draw-gategroup(x1, x2, y1, y2, item, draw-params) = {
let p = item.padding
let (x1, x2, y1, y2) = (x1 - p.left, x2 + p.right, y1 - p.top, y2 + p.bottom)
let size = (width: x2 - x1, height: y2 - y1)
layout.place-with-labels(
dx: x1, dy: y1,
labels: item.labels,
size: size,
draw-params: draw-params, rect(
width: size.width, height: size.height,
stroke: item.style.stroke,
fill: item.style.fill,
radius: item.style.radius
)
)
}
#let draw-slice(x, y1, y2, item, draw-params) = layout.place-with-labels(
dx: x, dy: y1,
size: (width: 0pt, height: y2 - y1),
labels: item.labels,
draw-params: draw-params,
line(angle: 90deg, length: y2 - y1, stroke: item.style.stroke)
)
#let draw-horizontal-wire(x1, x2, y, stroke, wire-count, wire-distance: 1pt) = {
if x1 == x2 { return }
let wire = line(start: (x1, y), end: (x2, y), stroke: stroke)
range(wire-count)
.map(i => (2*i - (wire-count - 1)) * wire-distance)
.map(dy => place(wire, dy: dy))
.join()
}
#let draw-vertical-wire(
y1,
y2,
x,
stroke,
wire-count: 1,
wire-distance: 1pt,
) = {
let height = y2 - y1
let wire = line(start: (0pt, 0pt), end: (0pt, height), stroke: stroke)
let wires = range(wire-count)
.map(i => 2 * i * wire-distance)
.map(dx => place(wire, dx: dx))
place(
dx: x - (wire-count - 1) * wire-distance,
dy: y1,
wires.join()
)
}
#let draw-vertical-wire-with-labels(
y1,
y2,
x,
stroke,
wire-count: 1,
wire-distance: 1pt,
wire-labels: (),
draw-params: none,
) = {
let height = y2 - y1
let wire = line(start: (0pt, 0pt), end: (0pt, height), stroke: stroke)
let wires = range(wire-count)
.map(i => 2 * i * wire-distance)
.map(dx => place(wire, dx: dx))
layout.place-with-labels(
dx: x - (wire-count - 1) * wire-distance,
dy: y1,
labels: wire-labels,
draw-params: draw-params,
size: (width: 2 * (wire-count - 1) * wire-distance, height: height),
wires.join()
)
}
|
https://github.com/EricWay1024/Homological-Algebra-Notes | https://raw.githubusercontent.com/EricWay1024/Homological-Algebra-Notes/master/ha/c-gc.typ | typst | #import "../libs/template.typ": *
= Group (Co)homology
<group-cohomology>
== Definitions
#definition[
Let $G$ be a group. A *(left) $G$-module* is an abelian group $A$ together with a left group action $rho: G times A -> A$, with $rho(g, a)$ denoted as $g dot a$, such that
$
g dot (a_1 + a_2) = g dot a_1 + g dot a_2
$
for all $g in G$ and $a_1, a_2 in A$.
A *morphism* $A -> B$ of $G$-modules (or a *$G$-map*) is an abelian group homomorphism (i.e., $ZZ$-linear map) $phi: A -> B$ such that
$
phi(g dot a) = g dot phi (a)
$
for all $g in G$ and $a in A$.
The category of $G$-modules is denoted as $GMod$, where we write $hom_GMod$ as $homg$.
]
#note[
Recall that for any group $G$, the *integral group ring* $ZZ G$ consists of formal sums of elements of $G$ with integer coefficients:
$
sum_(g in G) f_g g,
$
where $f_g in ZZ$ is non-zero for only fintely many $g in G$. $ZZ G$ is a ring because the product of two elements of $ZZ G$ is well-defined.
]
#lemma[
There is an equivalence of categories $GMod iso ZGMod$.
]
This implies that $G$-modules can be seen as a special case of $R$-modules, so all the homological algebra we have developed applies.
// This indicates that $GMod$ is also an abelian category (which we love).
#definition[
A $G$-module is *trivial* if $g dot a = a$ for all $g in G$ and $a in A$.
We define a functor $triv: Ab -> GMod$ by sending an abelian group $A$ to a trivial $G$-module $A$.
]
#definition[
Let $A in GMod$. Then the submodule of *invariants* of $A$ is
$
A^G = {a in A : g dot a = a "for all" g in G}
$
and the module of *coinvariants* of $A$ is
$
A_G = A over angle.l g dot a - a : g in G, a in A angle.r.
$
]
#lemma[
$-^G$ and $-_G$ are functors $GMod -> Ab$.
]
#lemma[
We have adjunctions
$
-_G tack.l triv tack.l -^G.
$
]
#proof[
We first show
$
hom_G (triv(A), B) iso hom_Ab (A, B^G)
$
Take any $f : triv(A) -> B$, then $f$ is a group homomorphism $A -> B$ such that $f(g dot a) = g dot f(a)$ for all $g in G$ and $a in A$. But $g dot a = a$ due to triviality and hence $f(a) = g dot f(a)$, i.e. $f(a) in B^G$ for all $a$. Then $f$ is equivalent to a group homomorphism $A -> B^G$.
Now we prove
$
hom_Ab (A_G, B) iso homg (A, triv(B))
$
Take any $h : A -> triv(B)$, then $h$ is a group homomorphism $A -> B$ such that for all $g in G$ and $a in A$, $ h (g dot a) = g dot h(a) = h(a) <=> h(g dot a - a) = 0 <=> g dot a - a in Ker(h) $
which means $h$ is equivalent to a group homomorphism $A_G -> B$.
]
#corollary[
The functor $-_G : GMod -> Ab$ is right exact and the functor $-^G : GMod -> Ab$ is left exact.
]
#lemma[
Let $A$ be any $G$-module and let $ZZ$ be the trivial $G$-module. Then
$
A_G iso ZZ tpzg A
$
and
$
A^G iso hom_(ZZ G) (ZZ , A).
$
]
#remark[
In other words, $(-_G) = (ZZ tpzg -) = (- tpzg ZZ)$ (because the ring $ZZ G$ is commutative) and $(-^G )= hom_(ZZ G) (ZZ, -)$.
]
#proof[
We observe that the trivial module functor $triv: ZMod -> ZGMod$ can be seen as the functor
$"Hom"_(bb(Z)) lr((bb(Z) comma minus))$, where we consider $bb(Z)$ as a $bb(Z)$-$ bb(Z) G$ bimodule. By @tensor-right-exact-2,
$(bb(Z) times.circle_(bb(Z) G) -)$ is its left adjoint, which must
agree with its other left adjoint $minus_G$. (See an alternative proof in @notes[Lemma 13.9].)
For the second claim:
$A^G tilde.equiv "Hom"_(Ab) lr((bb(Z) comma A^G)) tilde.equiv$
$"Hom"_G lr((bb(Z) comma A))$.
]
#definition[
Let $A$ be a $G$-module. We define the *homology groups of
$G$ with coefficients in $A$* as the left derived functors of $(-^G)$:
$ H_ast lr((G , A)) = L_ast lr((minus^ G)) lr((A)) tilde.equiv "Tor"_ast^(bb(Z) G) lr((bb(Z) comma A)). $
By definition, $H_0 lr((G , A)) eq A_G$.
Similarly, we define the *cohomology groups of
$G$ with coefficients in $A$* as the right derived functors of $(-_G)$:
$ H^ast lr((G , A)) = R^ast lr((-_G)) lr((A)) tilde.equiv "Ext"_(bb(Z) G)^ast lr((bb(Z) comma A)). $
By definition, $H^0 (G, A) = A^G$.
]
#notation[
@weibel uses the notations $H_ast (G; A)$ and $H^ast (G; A)$ (with a semicolon instead of a comma).
]
// #example[
// #TODO
// ]
== First Homology
#note[
Recall the *commutator subgroup* of a group $G$ is defined as
$
[G, G] := angle.l g^(-1) h^(-1) g h : g , h in G angle.r
$
and the *abelianisation* of $G$ is $G over [G, G]$.
]
The aim is of this section is to show that $H_1 (G, ZZ) iso G over [G, G]$ for any group $G$.
#definition[
The *augmentation ideal* $frak(J)$ of $ZZ G$ is defined as the kernel of the ring map
$
epsilon: ZZ G &-> ZZ \
sum_(g in G) f_g g &|-> sum_(g in G) f_g.
$
]
#remark[
$epsilon$ is obviously a surjection, so $ZZ iso ZZ G over fJ$. Moreover, $epsilon$ can be considered as the start of a resolution of $ZZ$.
]
#lemma[
$frak(J)$ is a free $ZZ$-module with basis ${g - 1 : g in G without {1} }$ (where $1$ is the group identity of $G$).
]
<j-basis>
#proof[
Simply notice that $ZZ G$ as a free $ZZ$-module has a basis ${1} union {g - 1: g in G without {1}}$ and that $epsilon(g - 1) = 0$ for any $g in G$.
]
#lemma[
For any $G$-module $A$,
$H_0(G, A) = A_G iso A over fJ A.
$
]
#proof[
$A_G iso ZZ tpzg A iso (ZZ G over fJ) tpzg A iso A over fJ A$, where we use the definition of $fJ$ and @tensor-ideal.
]
#example[
Regarding $ZZ$, $ZZ G$ and $fJ$ as $G$-modules, we have $H_0 (G, ZZ) = ZZ over fJ ZZ = ZZ$, $H_0 (G, ZZ G) = ZZ G over fJ iso ZZ$, and $H_0 (G, fJ) = fJ over fJ^2$.
]
<group-h0>
#lemma[
$fJ over fJ^2 iso G over [G, G]$.
]
#note[
$fJ^2$ is the free $ZZ$-module with basis ${(g -1) (h - 1) : g, h in G without {1}}$.
]
#proof[
Define map $
theta : G &-> fJ over fJ^2 \
g &|-> g - 1 + fJ^2.
$
Take any $a, b in G$, then we have
$
theta(a b) = a b - 1 + fJ^2 = a b - 1 - (a - 1) (b - 1) + fJ^2 = (a - 1) + (b-1) +fJ^2 = theta(a) +theta(b),
$
so $theta$ is a group homomorphism. Since $fJ over fJ^2$ is abelian, we have
$
theta(a b a^(-1) b^(-1)) = theta(a) + theta(b) - theta(a) - theta(b) = 0,
$
so $[G, G] subset.eq Ker theta$, and $theta$ descends to a homomorphism
$macron(theta) : G over [G, G] -> fJ over fJ^2$.
Define group homomorphism $sigma : fJ &-> G over [G, G]$ linearly expanded by
$
n(g - 1) &|-> g^n [G, G ].
$
Then for $a, b in G$, we have
$
sigma((a - 1) (b - 1)) = sigma(a b - 1 - (a-1) - (b-1)) = a b a^(-1) b^(-1) [G, G] = [G, G].
$
So $sigma$ descends to a homomorphism $macron(sigma) : fJ over fJ^2 -> G over [G, G]$.
The result thus follows from the obvious fact that $macron(sigma)$ and $macron(theta)$ are mutual inverses.
]
#theorem[$H_1 (G , ZZ) iso G over [G, G].$
]
<homology-1>
// #remark[
// We can define topological space $B G$, the classifying space of $G$ (there is a simplicial object $B . G$ ...?), $H^ast (G, A) = H^ast_"sing" (B G , A)$, because $pi_1(B G, ast) = G$ and $H_1 = pi_1^Ab = G over [G, G] = G^Ab$. This is the meaning of the theorem.
// ]
#proof[
We have a short exact sequence
$ 0 arrow.r fJ arrow.r bb(Z) G arrow.r^epsilon bb(Z) arrow.r 0 $
of $G$-modules, where $ZZ$ is viewed as a trivial $G$-module. The #lest of $"Tor"_ast^(bb(Z) G)$ gives
$ H_1 lr((G , bb(Z) G)) arrow.r H_1 lr((G , bb(Z))) arrow.r fJ_G arrow.r lr((bb(Z) G))_G arrow.r^(epsilon_ast) bb(Z)_G arrow.r 0. $
Since $bb(Z) G$ is a projective and thus flat $bb(Z) G$-module, we have
$H_1 lr((G , bb(Z) G)) eq 0$ by @flat-tor. Notice that $ZZ_G = ZZ$ and
$ (bb(Z) G)_G iso ZZ$ by @group-h0.
Since $epsilon_ast$ is a surjection, we see $epsilon_ast$ must be an isomorphism $ZZ -> ZZ$. So we have
$H_1 lr((G , bb(Z))) tilde.equiv fJ_G eq fJ slash fJ^2 tilde.equiv G slash lr([G comma G]).$
]
// $ A_G = ZZ tpzg A = Coker(fJ tpzg A -> ZZ G tpzg A) = Coker(fJ tpzg A -> A) $ so $(ZZ G)_G = ZZ G over fJ iso ZZ$.
== Norm Element
#definition[
Let $G$ be a finite group. The *norm element* of $bb(Z) G$ is
$ N eq sum_(g in G) g in bb(Z) G. $
]
#notation[
Somehow the convention here is to use a capital letter $N$ for a _group element_, not a group.
]
#lemma[$N$ is a central element of $ZZ G$ and $N in lr((bb(Z) G))^G$.]
#proof[
For every $h in G$, we have $h N = sum_g h g$, but left multiplication by $h$ is nothing but a permutation of $G$ (recall Cayley's Theorem), so $h N = sum_(g') g' = N$ by reindexing. Similarly, $N h = N$.
]
#lemma[
The subgroup
$H^0 lr((G comma bb(Z) G)) eq lr((bb(Z) G))^G $
is the two-sided ideal $bb(Z) dot N$ of $bb(Z) G$ generated by $N$, and is thus isomorphic to $ZZ$.
]
<cohomology-zero>
#proof[
Take $a = sum_(g in G) n_g g in (ZZ G)^G$. Then for any $h in G$, $a = h a = sum_(g in G) n_g (h g)$. The coefficient for $g$ in $h a$ is $n_(h^(-1) g)$. Thus, for any $g, h in G$, we have $n_g = n_(h^(-1) g)$, which shows that all $n_g$ are the same. Hence $a = n N$ for some $n in ZZ$.
]
#lemma[
When group $G$ is finite,
$
fJ = Ker(ZZ G ->^N ZZ G) = {a in ZZ G : N a = 0} \
ZZ dot N = IM(ZZ G ->^N ZZ G).
$
]
<j-ker>
#proof[
Take $a = sum_(g in G) n_g g in ZZ G$ and write $N = sum_(h in G) h$. We have
$
N a = (sum_(h in G) h) (sum_(g in G) n_g g) = sum_(h in G) sum_(g in G) n_g (h g) = sum_(g' in G) sum_(g in G) n_g g' \ = sum_(g' in G) (sum_(g in G) n_g) g' = (sum_(g in G) n_g) (sum_(g' in G) g') = (sum_(g in G) n_g) N
$
Therefore $N a = 0$ if and only if $sum_(g in G) n_g = 0$, #iff $a in fJ$.
The image of $ZZ G ->^N ZZ G$ is also clear from above, since $(sum_(g in G) n_g)$ can take all values in $ZZ$.
]
#corollary[
For every finite group $G$, there is a #sest
$
ses(fJ, ZZ G, ZZ dot N, g: N).
$
]
<ses-norm>
== Finite Cyclic Groups
<finite-cyclic-groups>
Let $C_m eq ⟨sigma colon sigma^m eq 1⟩$ be the cyclic group of order
$m$ generated by $sigma$. Then the norm element of $C_m$ is
$ N = sum_(i = 0)^(m-1) sigma^i = 1 plus sigma plus dots.h plus sigma^(m minus 1) $
We observe
$ 0 eq sigma^m minus 1 eq lr((sigma minus 1)) N $
#remark[
The group ring of $C_m$ can be also viewed as $ZZ [sigma] over (sigma ^ m - 1)$.
]
// #lemma[
// ]
// #proof[
// In view of @j-ker, we only have to verify that $IM(ZZ C_m ->^N ZZ C_m) = ZZ dot N$. Take $a = sum_(j = 0)^(m-1) n_j sigma^j in ZZ C_m$, we compute
// $
// N a = (sum_(i = 0)^(m-1) sigma^i) (sum_(j = 0)^(m-1) n_j sigma^j) = sum_(i = 0)^(m-1) sum_(j = 0)^(m-1) n_j sigma^(i + j) = sum_(k = 0)^(m-1)
// $
// ]
#lemma[
There is a #sest
$
0 arrow.r bb(Z) dot N arrow.r bb(Z) C_m arrow.r^(sigma minus 1) fJ arrow.r 0.
$
]
<ses-cyclic>
#proof[
We calculate the image and kernel of the map $bb(Z) C_m arrow.r^(sigma minus 1) ZZ C_m$.
Take $a = sum_(j = 0)^(m-1) n_j sigma^j in ZZ C_m$. Then setting $n_(-1) = n_(m-1)$, we have
$
(sigma - 1) a = sum_(j = 0)^(m-1) n_j sigma^(j + 1) - sum_(j = 0)^(m-1) n_j sigma^j = sum_(j = 0)^(m-1) (n_(j-1) - n_j) sigma^(j).
$
Since
$
epsilon((sigma- 1)a) = sum_(j=0)^(m-1) (n_(j -1) - n_j) = 0,
$
we see that $(sigma - 1) a in fJ$. On the other hand, for any $b = sum_(k=0)^(m-1) f_k sigma^k in fJ$ such that $sum_(k=0)^(m-1) f_k = 0$, we can find $a$ such that $n_j = - sum_(k=0)^j f_k$ for $j = 0, ..., m-1$ (notice that $n_(m-1) = n_(-1) = 0$) so that $n_(j-1) - n_j = f_j$, or $(sigma - 1)a = b$. This shows that $IM(bb(Z) C_m arrow.r^(sigma minus 1) ZZ C_m) = fJ$.
For the kernel, $(sigma - 1) a = 0$ if and only if $n_(j-1) = n_j$ for all $j$, if and only if all $n_j$ are equal, #iff $a in ZZ dot N$.
]
#lemma[
The chain complex
$ dots.h arrow.r bb(Z) C_m arrow.r^(sigma minus 1) bb(Z) C_m arrow.r^N bb(Z) C_m arrow.r^(sigma minus 1) bb(Z) C_m arrow.r^epsilon bb(Z) arrow.r 0 $
is a projective resolution for $bb(Z)$ as a $bb(Z) C_m$-module.
]
#proof[
This is obtained by splicing the sequences from @ses-norm and @ses-cyclic together.
]
#theorem[
Let $A$ be a $G$-module, where $G = C_m$. Then
$ H_n lr((C_m , A)) eq cases(
A slash lr((sigma minus 1)) A quad &"if " n eq 0 comma,
A^(G ) slash N A quad &"if " n eq 1 comma 3 comma 5 comma dots.h comma,
brace.l a in A colon N a eq 0 brace.r slash lr((sigma minus 1)) A quad &"if " n eq 2 comma 4 comma 6 comma dots.h
)
$ $ H^n lr((C_m , A)) eq cases(
A^G quad &"if " n eq 0 comma,
brace.l a in A colon N a eq 0 brace.r slash lr((sigma minus 1)) A quad &"if " n eq 1 comma 3 comma 5 comma dots.h comma,
A^G slash N A quad &"if " n eq 2 comma 4 comma 6 comma dots.h
)
$
]
// #proof[
// Take the resolution obtained from the previous lemma, delete $ZZ$, apply $- tpzg A$ and $homg (-, A)$, and take homology.
// ]
#corollary[
We have
$ & H_n lr((C_m , bb(Z))) eq cases(bb(Z) quad & "if " n eq 0, bb(Z) slash m quad & "if " n gt.eq 1 "is odd " comma, 0 quad & "else. ")\
& H^n lr((C_m , bb(Z))) eq cases(bb(Z) quad& "if " n eq 0, bb(Z) slash m quad & "if " n gt.eq 2 "is even, ", 0 quad & "else. ") $]
// #remark[
// Galois homology? Tate homology?
// ]
== Free Groups
@weibel[p. 169].
#proposition[
Let $G$ be the free group on the set $X$, and consider
the augmentation ideal $frak(J)$ of $bb(Z) G$. Then $frak(J)$ is a free
$bb(Z) G$-module with basis the set
$X minus 1 eq brace.l x minus 1 colon x in X brace.r$.
]
#proof[
Every
$g in$ $G$ may be written uniquely as a reduced word in the symbols
$lr({x comma x^(minus 1) colon x in X})$; write $G lr((x))$ \(resp.
$G lr((x^(minus 1)))$ ) for the subset of all $g in G$ ending in the
symbol $x$ \(resp. in $x^(minus 1)$ ) so that
$
G minus brace.l 1 brace.r = {G(x)}_(x in X) union.sq {G(x^(-1))}_(x in X).
$
By @j-basis, $frak(J)$ is a free abelian group with
$bb(Z)$-basis $brace.l g minus 1 colon g in G$, $g eq.not 1 brace.r$.
Now we claim that the $ZZ$-basis $brace.l g minus 1 : g in G, g eq.not 1 brace.r$ can be uniquely rewritten in terms of the set $brace.l g lr((x minus 1)) : g in G, x in X brace.r$. We prove this by induction on word length of $g$. When the word length is $1$, either $g = x$ or $g = x^(-1)$ for some $x in X$, so the claim is trivial. When the word length is $n$, then we can write either $g = g' x$ (if $g in G(x)$) or $g = g' x^(-1)$ (if $g in G(x^(-1))$), where the word length of $g'$ is $(n-1)$. In the first case,
$ g -1 = g 'x minus 1 & eq g' lr((x minus 1)) plus lr((g' minus 1)), $
and in the second case,
$
g - 1 = g' x^(minus 1) minus 1 & eq minus lr((g' x^(minus 1))) lr((x minus 1)) plus lr((g' minus 1)). $
Hence in both cases, the claim follows from the induction hypothesis. We can similarly prove that we can uniquely rewrite ${g(x-1)}$ in terms of ${g-1: g!=1}$.
Therefore
$brace.l g lr((x minus 1))$ : $g in G comma x in X brace.r$ is another
$bb(Z)$-basis of $frak(J)$, and
$X minus 1 eq brace.l x minus 1 colon x in X brace.r$ is a
$bb(Z) G$-basis.
]
#corollary[
If $G$ is a free group on $X$, then $bb(Z)$ has the free
resolution
$ 0 arrow.r frak(J) arrow.r bb(Z) G arrow.r bb(Z) arrow.r 0 dot.basic $
Consequently,
$H_n lr((G , A)) eq H^n lr((G , A)) eq 0$ for
$n eq.not 0 comma 1$.
Moreover, when $A = ZZ$,
$ H_0 lr((G , bb(Z))) tilde.equiv H^0 lr((G , bb(Z))) tilde.equiv bb(Z) $
$ H_1 lr((G , bb(Z))) tilde.equiv xor.big_(x in X) bb(Z) \ H^1 lr((G , bb(Z))) tilde.equiv product_(x in X) bb(Z) $
]
#proof[
$H_ast lr((G , A))$ is the homology of
$0 arrow.r frak(J) times.circle_(bb(Z) G) A arrow.r A arrow.r 0$, and
$H^ast lr((G , A))$ is the cohomology of
$0 arrow.r A arrow.r "Hom"_G lr((frak(J) comma A)) arrow.r 0$. For
$A eq bb(Z)$, $H_0 (G, ZZ)$ and $H^0 (G, ZZ)$ come from @group-h0 and @cohomology-zero, respectively. $H_1 (G, ZZ) iso G over [G, G]$ by @homology-1, where $G over [G, G]$ is the free abelian group over $X$. We finally see that the differential $ZZ arrow.r "Hom"_G lr((frak(J) comma ZZ))$ must be zero, hence $H^1 (G, ZZ) = homg (fJ, ZZ) tilde.equiv product_(x in X) bb(Z)$.
]
// #TODO change the notation $H_ast (G, A)$ (comma not semicolon.)
== Derivations
<crossed-homomorphisms>
// Historically, the maps we are interested in are called \"crossed
// homomorphisms\". In these notes, however, we will adopt the more modern
// term \"derivations\". Many of the proofs from now on are omitted; we
// have opted just to define the objects and sketch the theory. The proofs
// can all be found in Weibel.
#definition[
Let $G$ be a group and $A$ be a left $G$-module. A
*derivation* of $G$ in $A$ is a set map $D colon G arrow.r A$ with
$ D lr((g h)) eq g D lr((h)) plus D lr((g)) $ for all $g comma h in G $.
Write $"Der"lr((G comma A))$ for the set of derivations of $G$ in $A$.
]
#remark[
In general, if $R$ is a ring and $A$ is an $R$-$R$-bimodule, a derivation of $R$ in $A$ is an abelian group homomorphism $D : R -> A$ such that
$ D lr((r s)) eq r D lr((s)) plus D lr((r)) s. $
Here for $R = ZZ G$, we have
$D lr((g)) h eq D lr((g))$ because we are viewing $A$ as a $ZZ G$-$ZZ G$-bimodule with
trivial $G$-action on the right.
]
#lemma[
$"Der"lr((G comma A))$ is an abelian group under pointwise addition.
]
#definition[
For $a in A$, let $D_a colon G arrow.r A$ be the map
$D_a lr((g)) eq g a minus a$. A derivation of the form $D_a$ is a *principal
derivation*. Write $"PDer"lr((G comma A))$ for the set of principal derivations of $G$ in $A$.
]
#lemma[
$D_a plus D_b eq$ $D_(a plus b)$ and $"PDer"lr((G comma A))$ is a subgroup of $"Der"lr((G comma A))$.
]
#lemma[
$PDer(G, A) iso A over A^G$.
]
<pder-ag>
#definition[
Let
$phi colon fJ arrow.r A$ be a $G$-map. Define
$D_phi colon G arrow.r A$ by
$D_phi lr((g)) eq phi lr((g minus 1)). $
]
#lemma[
The map $phi arrow.r.bar D_phi : "Hom"_G lr((fJ comma A)) arrow.r "Der"lr((G comma A))$ is a natural isomorphism of abelian groups.
]
#proof[@weibel[Lemma 6.4.4].
First we show that $D_phi : G-> A$ is indeed a derivation:
$
D_phi (g h) = phi (g h - 1) = phi (g h - g) + phi (g - 1) = g D_phi (h) + D_phi (g)
$
The map $phi arrow.r.bar D_phi$ is obviously a natural group homomorphism, so it remains to verify that it is an isomorphism.
Suppose $D_phi = 0$, i.e., $D_phi (g) = phi (g - 1) = 0$ for all $g in G$. Since ${g-1 : g!=1}$ forms a basis for $fJ$, we see that $phi = 0$. Hence the map $phi arrow.r.bar D_phi$ is an injection.
Take any $D in Der(G, A)$. Define $phi : fJ -> A$ by $phi(g - 1) = D(g)$ for all $g != 1$. This extends to an abelian group homomorphism since ${g-1 : g!=1}$ forms a basis of $fJ$. It is easy to show that $phi$ is a $G$-map and $D_phi = D$, so the map $phi arrow.r.bar D_phi$ is also a surjection.
]
#theorem[
$ H^1 lr((G comma A)) iso "Der"lr((G comma A)) over "PDer"lr((G comma A))$.
]
<h1ga>
#proof[@weibel[Theorem 6.4.5].
The short exact sequence
$ 0 arrow.r fJ arrow.r bb(Z) G arrow.r bb(Z) arrow.r 0 $
of $bb(Z) G$-modules gives a long exact sequence beginning with
$
0 arrow.r homg lr((bb(Z) comma A)) arrow.r homg lr((bb(Z) G comma A)) arrow.r homg lr((fJ comma A)) arrow.r Ext_(ZZ G)^1 (ZZ, A) arrow.r Ext_(ZZ G)^1 (ZZ G, A)
$
which reduces to
$ 0 arrow.r A^G arrow.r A arrow.r Der(G, A) arrow.r H^1 lr((G , A)) arrow.r 0 $
The result then follows from @pder-ag.
]
#corollary[
Let $A$ be a trivial $G$-module. Then
$ H^1 lr((G comma A)) iso "Der"lr((G comma A)) tilde.equiv "Hom"_Grp lr((G comma A)). $
]
// #theorem([Hilbert Theorem 90])[
// Let $L slash K$ be a finite Galois
// extension with Galois group $G$. Let $L^ast.basic$ be the unit group of
// $L$. Then $L^ast.basic$ is naturally a $G$-module, and
// $ H^1 lr((G comma L^ast.basic)) eq 0 $
// ]
== Bar Complexes
<bar-complex>
Throughout this section, $bb(Z)$ is a trivial $G$-module.
#definition[
The *unnormalised bar complex* is the chain complex
$ dots.h arrow.r B_2^u arrow.r B_1^u arrow.r B_0^u arrow.r^epsilon bb(Z) arrow.r 0 $
with $B_0^u eq bb(Z) G$ and $B_n^u$ is the free $ZZ G$-module on the set of all symbols $[g_1 tp ... tp g_n]$ with $g_i in G$ for $n >=1$.
The differential $d colon B_n^u arrow.r B_(n minus 1)^u$ is given by
$ d eq sum_(i eq 0)^n lr((minus 1))^i d_i, $
where
$ d_0 lr(([g_1 times.circle dots.h times.circle g_n])) &eq g_1 [g_2 times.circle dots.h times.circle g_n] \
d_i lr(([g_1 times.circle dots.h times.circle g_n])) &eq [g_1 times.circle dots.h times.circle g_i g_(i plus 1) times.circle dots.h times.circle g_n] quad upright(" for ") 1 lt.eq i lt.eq n minus 1 \
d_n lr(([g_1 times.circle dots.h times.circle g_n])) &eq [g_1 times.circle dots.h times.circle g_(n minus 1)]. $
]
#definition[
The *normalised bar complex* is
$ dots.h arrow.r B_2 arrow.r B_1 arrow.r B_0 arrow.r^epsilon.alt bb(Z) arrow.r 0, $
where $B_0 eq bb(Z) G$, and for $n gt.eq 1$, the group $B_n$ is the free $ZZ G$-module on the set of all symbols
$[g_1 lr(|dots.h|) g_n]$ with $g_i in G without brace.l 1 brace.r $.
The differential $d colon B_n arrow.r B_(n minus 1)$ is
$d eq sum_(i eq 0)^n lr((minus 1))^i d_i$, where
$ d_0 ([g_1 lr(|dots.h|) g_n]) & eq g_1 [g_2 lr(|dots.h|) g_n] \
d_i lr(([g_1 lr(|dots.h|) g_n])) & eq [g_1 lr(|dots.h|) g_i g_(i plus 1) lr(|dots.h|) g_n] quad upright(" for ") 1 lt.eq i lt.eq n minus 1\
d_n lr(([g_1 lr(|dots.h|) g_n])) & eq [g_1 lr(|dots.h|) g_(n minus 1)] $
We write $[]$ for $1 in B_0 eq bb(Z) G$. If any of the $g_i$ is 1 , we
write $lr([dots.h lr(|g_i|) dots.h])$ for $0 in B_n$.
]
#example[
We have
$ d lr((lr([g|h]))) &eq g lr([h]) minus lr([g h]) plus lr([g]), \
d lr((lr([f|g|h]))) &eq f lr([g|h]) minus lr([f g|h]) plus lr([f|g h]) minus lr([f|g]). $]
#theorem[
The normalised and unnormalised bar complexes are free
resolutions of $bb(Z)$ as a $bb(Z) G$-module.
]
#proof[
@weibel[Theorem 6.5.3]. We only give the proof for the normalised bar complexes, as the unnormalised case is the same. By @null-homotopic-acyclic, we only need to show that there exist abelian group homomorphisms $s_(-1): ZZ -> B_0$ and $s_n : B_n -> B_(n+1)$ for $n >= 0$ such that $d s + s d = 1$. The desired construction is given as
$s_(-1) (1) = [ ]$
and
$
s_n (g_0[g_1|...|g_n]) = [g_0|g_1|...|g_n]
$
for $n >=0$.
]
#corollary[
$H^ast lr((G comma A))$ is the
cohomology of either the chain complex $"Hom"_G lr((B_ast^u comma A))$
or $"Hom"_G lr((B_ast comma A))$.
]
This allows us to give an explicit description of group cohomology.
#definition[
Define an *$n$-cochain* as a function $f colon G^n arrow.r A$. An
$n$-cochain $phi$ is *normalised* if
$phi lr((g_1 comma dots.h comma g_n)) eq 0$ whenever there exists some $g_i = 1$.
Define the differential $d$ of an $n$-cochain $phi$ as an $(n+1)$-cochain $d phi$ given by
$ (d phi) lr((g_1 comma dots.h comma g_(n+1))) eq g_1 phi lr((g_2 comma dots.h comma g_(n+1))) plus sum_(i eq 1)^(n) lr((minus 1))^(i) phi lr((dots.h comma g_i g_(i plus 1) comma dots.h)) plus lr((minus 1))^(n+1) phi lr((g_1 comma dots.h comma g_(n))). $
If $phi$ is an $n$-cochain such that $d phi eq 0$, then $phi$ is an *$n$-cocycle*. If $phi'$ is an $(n-1)$-cochain, then the
$n$-cochain $d phi'$ is an *$n$-coboundary*. Write $Z^n lr((G comma A))$ and
$B^n lr((G comma A))$ for the abelian groups of $n$-cocycles and
$n$-coboundaries respectively.
]
From the definition, we see that
$"Hom"_G lr((B_n^u comma A))$ consists of all $n$-cochains,
while $"Hom"_G lr((B_n comma A))$ consists of all normalised $n$-cochains.
#corollary[
$ H^n lr((G comma A)) eq Z^n lr((G comma A)) slash B^n lr((G comma A))$.
]
#example[
$ H^1 lr((G comma A)) eq "Der"lr((G comma A)) slash "PDer"lr((G comma A))$.
]
#proof[
@weibel[Example 6.5.6]. This is a direct proof of @h1ga using bar resolutions. A $0 $-cochain is a map $1 arrow.r A$, that is, an element
of $A$. If $a in A$, then $d a$ is the map $G arrow.r A$ sending $g$ to
$g a - a$, which is a principal derivation by definition. Therefore, $phi in B^1 (G, A)$ #iff there exists $a in A$ such that $phi = d a$, #iff $phi in PDer(G, A)$. So $B^1 (G, A) = PDer(G, A)$.
On the other hand, $phi in Z^1 (G, A)$ #iff $d phi = 0$, #iff for all $g, h in G$,
$
0 = (d phi) (g, h) = g phi(h) - phi (g h) + phi(g)
$
#iff $phi in Der(G, A)$. Thus $Z^1 (G, A) = Der(G, A)$.
]
== Group Extensions
<group-extensions>
#definition[
Let $A$ be an abelian group and let $G$ be a group. An *extension* of $G$
by $A$ is a short exact sequence
$ 0 arrow.r A arrow.r E arrow.r^pi G arrow.r 1. $
The extension *splits* if $pi$ has a section, i.e., if there is a group
homomorphism $s colon G arrow.r E$ such that $pi compose s eq id_G$.
Extensions
$ 0 arrow.r A arrow.r E_i arrow.r^pi G arrow.r 1, $
for $i eq 1 comma 2$ are *equivalent* if there is a group isomorphism
$E_1 arrow.r E_2$ such that the obvious diagram commutes.
]
#theorem[ There is a natural bijection between $H^2 lr((G comma A))$ and the equivalence classes of extensions of $G$ by $A$.]
#proof[@weibel[Classification Theorem 6.6.3].]
// == The Bar Resolution
// We have adjunction
// $
// ZZ G tp_ZZ - : Ab arrows.lr ZGMod : "Forget"
// $
// $
// ... -> B^n_2 -> B^n_1 -> B^n_0 -> ZZ -> 0
// $
// $B^n_n$ is the free $ZZ G$-module on basis $[g_1 tp ... tp g_n]$ for $g_i in G$.
// $d: B^n_n -> B^n_(n-1)$
// #theorem[
// The bar complexes form a free resolution of the $ZZ G$-module $ZZ$.
// ]
|
|
https://github.com/sora0116/unix_seminar | https://raw.githubusercontent.com/sora0116/unix_seminar/master/presentation/template.typ | typst | #import "@preview/polylux:0.3.1"
#let slide-title = state("slide-title", [])
#let slide-author = state("slide-author", [])
#let slide-date = state("slide-date", datetime.today())
#let lr_margin = 40pt
#let colors = (rgb("#f6f6f6"), rgb("#d6e4f0"), rgb("#1e56a0"), rgb("#163172"))
#let full_block(bg: colors.at(0), body) = block(width: 100%, height: 100%, fill: bg)[#body]
#let debug_box(c) = block(width: 100%, height: 100%, fill: colors.at(c))
#let my-theme(
title: [title],
author: [author],
date: datetime.today(),
aspect-ratio: "4-3",
body
) = [
#set document(
title: title,
author: author,
keywords: (""),
date: date,
)
#set page(
paper: "presentation-" + aspect-ratio,
margin: 0pt,
)
#set text(size: 25pt, font: "<NAME>")
#show heading: it => {}
#slide-title.update(title)
#slide-author.update(author)
#slide-date.update(date)
#counter(page).update(0)
#body
]
#let title-slide(
title: none,
author: none,
upper-content: (title: none) => [
#set align(center + bottom)
#text(size: 1.5em)[
#if (title != none) {title} else {context slide-title.get()}
]
#v(.5em)
],
lower-content: (author: none) => [
#set align(right + top)
#v(.5em)
#text(size: 1.2em)[
#if (author != none) {author} else {context slide-author.get()}
]#linebreak()
#context slide-date.get().display("[year]年[month]月[day]日")#linebreak()
],
body
) = [
#polylux.polylux-slide()[
#grid(
columns: (lr_margin, 1fr, lr_margin),
rows: (1fr, 2%, 1fr),
[],[
#upper-content(title: title)
],[],grid.cell(colspan: 3)[#debug_box(2)],[],[
#lower-content(author: author)
],[]
)
]
#body
]
#let slide(
title: [title],
body
) = [
#polylux.polylux-slide()[
#grid(
rows: (10%, 1%, 1fr, 3%),
[
#full_block(bg: colors.at(0))[
#grid(
columns: (2em, 1fr, 2em),
[],[
#block(height: 100%)[
#align(horizon)[#text(size: 1.5em)[#title]]
]
],[]
)
]
],
[#full_block(bg: colors.at(2))[]],
[#block(width: 100%, height: 100%, inset: 1em, fill: colors.at(0))[#body]],
[
#set text(size: 0.5em)
#grid(
columns: (25%, 50%, 25%),
[
#full_block(bg: colors.at(2))[
#align(center+horizon)[
#text(fill: colors.at(0))[#context slide-author.get()]
]
]
],[
#full_block(bg: colors.at(1))[
#align(center+horizon)[#context slide-title.get()]
]
],[
#full_block(bg: colors.at(2))[
#align(center+horizon)[
#text(fill: colors.at(0))[#counter(page).display("1 / 1", both: true)]
]
]
]
)
],
)
]
]
|
|
https://github.com/sabitov-kirill/comp-arch-conspect | https://raw.githubusercontent.com/sabitov-kirill/comp-arch-conspect/master/questions/12_parallel.typ | typst | #heading[Параллелизм.]
#emph[Параллелизм (MIMD, что такое SMT, SIMD, почему GPU это SIMD).]
#import "/commons.typ":imagebox
== Вводная терминология
#emph[Процесс] --- часть программы, запущенная на выполнение. Процессы в общем случае не имеют общего кода и общей памяти. Они достаточно независимы друг от друга.
#emph[Поток] --- наименьшая последовательность инструкций внутри процесса, которой может независимо управлять планировщик.
#emph[Многоядерный вычислитель] --- несколько вычислительных ядер на одном кристалле процессора. При это L1 кэш уникальный для каждого ядра, L2 уникальный или общий, а L3 - общий для всех.
#emph[Многопроцессорный вычислитель] --- физически несколько процессоров (многоядерных или одноядерных).
== Виды параллельных архитектур (параллелизм по инструкциям и по данным)
+ SISD (Single Instruction stream Single Data stream) - простой одноядерный процессор.
+ SIMD (Single Instruction Multiple Data) - архитектура, характерная для видеокарт и векторных процессоров.
+ MISD (Multiple Instructions Single Data) - не имеет практического применения.
+ MIMD (Miltiple Instructions Multiple-Data) - многоядерный процессор.
=== MIMD
#imagebox("MIMD.png", height: 200pt)
Архитектура MIMD используется для многопроцессорных архитектур общего назначения. При небольшом количестве ядер можно поддерживать работу с памятью в форме модели UMA.
#imagebox("MIMDsec.png", height: 200pt)
При большом количестве вычислителей общую память поддерживать сложно, поэтому используется модель разделения памяти NUMA.
=== SMT и HyperThreading
#emph[SMT (Simultaneous Multithreading / Одновременная многопоточность)] - подход при котором один процессор выполняет несколько потоков операций.
#emph [Hyperthreading] - конкретная реализация SMT от Intel.
#emph[Алгоритм работы]:
+ Каждое ядро может хранить состояние двух потоков, используя два набора регистров и два контроллера прерываний.
+ Количество реальных вычислителей не меняется (один), однако вычислитель постоянно переключается между двумя потоками, которые обрабатывает текущее ядро
+ Данная модель возможна и эффективна при кэш промахах одного из процеесов. Поскольку в данном случае оптимизируется время, потраченное на поиск данных в оперативной памяти (пока один процесс делает запрос, над данными второго начинается работа)
=== SIMD
Векторные процессоры - первые, кто реализовывал архитектуру SIMD. Основное отличие в том, что операндами могут выступать целые массивы данных. Однако векторные процессоры не увенчались успехом, более того, почти все современные микропроцессоры могут производить векторные вычисления (семейство расширений SSE).
=== Почему GPU это SIMD?
#imagebox("fermi.png", height: 200pt)
#grid(columns: (auto, 170pt), column-gutter: 15pt, [
GPGPU — техника использования графического процессора видеокарты, предназначенного для компьютерной графики, в целях производства математических вычислений, которые обычно проводит центральный процессор. Например, достаточно удобно рассчитывать задачи связанные с машинным обучением, к примеру, перемножение матриц, поскольку в данном случае можно хорошо использовать параллельные вычисления.
На примере архитектуры, представленной выше, у нас есть множество мультипроцессоров, у них у всех есть кэш второго уровня. Видеокарта может одновременно использовать все имеющиеся мультироцессоры, выполняя на каждом разные инструкции. Непосредственно подход SIMD реализуется внутри данных потоковых мультипроцессоров. Смотря на картинку выше, нетрудно описать реализацию, которая состоит в том, что на одном столбце можно выполнить какую-то одну операцию над различными данными.
], imagebox("cuda.png"))
|
|
https://github.com/darkMatter781x/OverUnderNotebook | https://raw.githubusercontent.com/darkMatter781x/OverUnderNotebook/main/entries/intro/intro.typ | typst | #import "/packages.typ": notebookinator, gentle-clues
#import notebookinator: *
#import themes.radial.components: *
#import gentle-clues: *
#import themes.radial.components.icons
#import "/util.typ": qrlink
#show: create-body-entry.with(
title: "Introduction",
type: "notebook",
date: datetime(year: 1, month: 1, day: 1),
author: "<NAME>",
)
= What is this?
This notebook contains:
#grid(
columns: 2,
rows: 4,
gutter: 2mm,
image.decode(
utils.change-icon-color(raw-icon: icons.target, fill: blue),
height: 6em,
),
[
== Decide
What decisions we made regarding the management of the code typically and why we
made them.
],
image.decode(
utils.change-icon-color(raw-icon: icons.terminal, fill: purple),
height: 6em,
),
[
== Program
The commented code that we run on our robot along with visuals to aid in
understanding.
],
image.decode(utils.change-icon-color(raw-icon: icons.page, fill: pink), height: 6em),
[
== Concept
Explanation of algorithms and concepts that we use in our code along with
resources for further learning.
],
image.decode(utils.change-icon-color(raw-icon: icons.page, fill: gray), height: 6em),
[
== Notebook
Talks about the notebook itself like this entry.
],
)
= Formatting
#grid(
columns: 2,
rows: 3,
[
You'll also notice that the formatting of this notebook is in stark contrast to
the formatting of the engineering notebook. This is due to to the software
differences between the two. The engineering notebook is written google slides,
which gives total control over the formatting of the notebook, whilst still
being easily and graphically editable, whereas for the code notebook we opted
for Typst. Typst enables us to write all the notebook with typst code which
gives us far more control over the formatting of the notebook, and also allows
us to easily include and format code.
],
figure(
qrlink("hhttps://github.com/darkMatter781x/OverUnderNotebookr", size: 0.2em),
caption: [
source code for notebook
],
),
[
= _The Notebookinator_
Another benefit of using Typst is that we can use libraries like _The Notebookinator_ which
provides tools and examples for creating visually appealing notebooks with
typst.
],
figure(
qrlink("https://github.com/BattleCh1cken/notebookinator", size: 0.2em),
caption: [_The Notebookinator_ Github repository],
),
[
= Inspiration
A lot of this notebook's formatting was inspired by <NAME> of 53E's
notebook, which served as a great example of how to use _The Notebookinator_. He
was also very willing to answer any questions we had about the notebook, and we
are very grateful for his help.
],
figure(
qrlink("https://github.com/Area-53-Robotics/53E-Notebook", size: 0.2em),
caption: [53E's notebook],
),
)
|
|
https://github.com/Enter-tainer/typstyle | https://raw.githubusercontent.com/Enter-tainer/typstyle/master/tests/assets/typstfmt/160-fletcher.typ | typst | Apache License 2.0 | #figure(
fletcher.diagram(
node-outset: .5em,
node-stroke: .075em,
node((+1, 0), [variable], radius: 3em), // test
node((+1.25, .75), [const], radius: 3em), // this is the second test
edge((0, 0), (+1, 0), "=>"),
edge((0.25, .75), (+1, 0), "->"),
edge((0.25, .75), (+1.25, .75), "=>"),
node((+0, 0), [pointer to \ variable], radius: 3em),
node((-1, 0), [pointer to \ pointer to \ variable], radius: 3em),
node((-.75, .75), [pointer to \ pointer to \ const], radius: 3em),
node((+0.25, .75), [pointer to \ const], radius: 3em),
edge((-1, 0), (+0, 0), "=>"),
edge((-.75, .75), (+0, 0), "-X->"),
edge((-.75, .75), (+0.25, .75), "=>"),
)
)
|
https://github.com/Skimmeroni/Appunti | https://raw.githubusercontent.com/Skimmeroni/Appunti/main/C++/Introduzione/Casting.typ | typst | Creative Commons Zero v1.0 Universal | #import "@preview/showybox:2.0.1": showybox
Cosí come in (quasi) tutti i linguaggi di programmazione tipizzati, in C++ é
possibile fare *casting*, ovvero trasformare il tipo di dato di una variabile
in un tipo di dato diverso, purché compatibile. Alcuni cast sono *impliciti*,
ovvero dove il compilatore opera "dietro le quinte" un cambio di tipo se questo
é in grado di intuirlo da solo. Questo é comodo, perché non é necessario
specificare istruzioni aggiuntive, ma puó essere rischioso perché potrebbe
diventare difficile ricostruire a ritroso che tale casting é avvenuto. Il
cast C *esplicito* ha invece questa forma:
```
var_type1 = (Type1)var_type2
```
C++, per quanto possa utilizzare i due cast sopra citati, possiede i seguenti
cast speciali:
```
var_type1 = static_cast<Type1>(var_type2)
var_type1 = const_cast<Type1>(var_type2)
var_type1 = reinterpret_cast<Type1>(var_type2)
var_type1 = dynamic_cast<Type1>(var_type2)
```
- `static_cast` é sostanzialmente analogo al casting esplicito del C;
- `const_cast` é un cast speciale utile per "de-proteggere" i dati,
permettendo di accedere ad un dato costante attraverso un puntatore;
- `reinterpret_cast` é un cast speciale che "forza" un cast anche quando
questo porta a conclusioni ambigue, di fatto "reinterpretando" il significato
dei singoli byte;
- `dynamic_cast` é un cast speciale che permette di fare downcasting in una
gerarchia di classi.
#showybox[
```
int i;
double d;
i = static_cast<int>(d); // Similar to i = (int)d in C fashion
int* pi;
const int* cpi = &i;
pi = static_cast<int*>(cpi); // NOT allowed, can't edit i through cpi
pi = const_cast<int*>(cpi); // Allowed
char* c;
c = reinterpret_cast<char*>(&i); // Allowed, integer now a char sequence
*(c + 2) // Editing i byte by byte
```
]
|
https://github.com/dark-flames/resume | https://raw.githubusercontent.com/dark-flames/resume/main/libs/trans.typ | typst | MIT License | #import "@preview/shiroa:0.1.0": is-web-target
#let multiLang(env, ..c) = {
let lang = env.at("x-lang", default: "en")
c.named().at(lang)
} |
https://github.com/Myriad-Dreamin/apollo-typst | https://raw.githubusercontent.com/Myriad-Dreamin/apollo-typst/main/content/posts/simple-se.md | markdown | +++
title = "笨蛋如何进行架构设计"
date = "2024-07-16"
[taxonomies]
tags=["software-engineering"]
[extra]
typst = "simple-se"
+++
|
|
https://github.com/dainbow/MatGos | https://raw.githubusercontent.com/dainbow/MatGos/master/themes/definition.typ | typst | #import "../conf.typ": *
= Определения и формулировки
Тут собраны всякие общие определния, которые вас могут спросить и примерные идеи их доказательств
== $NN$
Вводим аксиоматически. То есть говорим,
#definition[
Если множесто $NN$ и функция $"Sc": NN -> NN$ удовлетворяют следующим аксиомам, то это множество называется множеством натуральных чисел. (Ну или как то так. Вряд ли это спросят кончено.)
+ $0 in NN$
+ $forall n in NN space exists ! "Sc"(n) in NN$. ($"Sc"$ - "следующее число")
+ $forall n in NN space "Sc"(n) != 0$
+ $"Sc"(n) = "Sc"(m) => n = m$ (равенство в теоретико-множественном смысле)
+ (индукция) $forall Mu subset.eq NN: space 0 in Mu and (n in Mu => "Sc"(n) in Mu) => Mu = NN$
]
Операция $+$ вводится как рекурсивное перекладывание $"Sc"$
Операция $<=$ вводится как $n <= m <=>^"def" exists k in NN: n+k = m $
== $RR$
Считаем, что $ZZ$, $QQ$ как ни будь определите.
Аксиоматически:
#definition[ $(RR, +, times, <=, 0, 1) $ удовлетворяют следующим свойствам
#set enum(numbering: "I.1")
+ Операция $+: space RR^2 -> RR$ - в смылсе поля
+ Операция $times: space RR^2 -> RR$ - в смылсе поля
+ Операция $<=$ - линейный порядок, уважает сложнение/умножение с положительными и
+ $forall x in RR space forall y in RR without {0} space exists p in ZZ: space p y > x $ ($ZZ$ получается как $1 + 1 + 1 + 1 dots$)
+ Полнота
+ вариант: в смысле функана - каждая фундаментальная сходится.
+ вариант: по лектору $forall A, B subset.eq RR: space A union B = RR and A sect B = emptyset and (forall a in A, b in B space a <= b) => exists c: space forall a in A space forall b in B space a <= c <= b$
]
#note[
Построение $RR$. Строится как множество классов эквивалентности над фундаментальными последовательностями (у лектора: стягивающимися рациональными отрезками).
Все свойства кроме полноты в полуавтоматическом режиме переносятся с $QQ$.
Полнота(в смысле лектора):
Строим приближающую последовательность десятичных приближений.
То есть сначала берем наибольшее целое в $A$, наименьшее целое в $B$.
Затем среди чисел с 1 знаком после запятой. И так далее.
Получаем последовать стягивающихся отрезков. (Или если поочерди брать, то просто фундаментальную последовательность).
Она и будет представителем искомого $c$.
]
== $CC$
Строится как $RR^2$ с базисом $(1, i)$ сложение покомпонентное.
Умножение $a, b, c, d in RR space (a 1 + b i) * (c 1 + d i) = (a c-b d) 1 + (b c + a d)i$ |
|
https://github.com/AnsgarLichter/hka-thesis-template | https://raw.githubusercontent.com/AnsgarLichter/hka-thesis-template/main/common/todo.typ | typst | #let todo(body) = [
#let rblock = block.with(stroke: red, radius: 0.5em, fill: red.lighten(80%))
#let top-left = place.with(top + left, dx: 1em, dy: -0.35em)
#block(inset: (top: 0.35em), {
rblock(width: 100%, inset: 1em, body)
top-left(rblock(fill: white, outset: 0.25em, text(fill: red)[*TODO*]))
})
<todo>
]
#let outline-todos(title: [TODOS]) = {
heading(numbering: none, outlined: false, title)
locate(loc => {
let queried-todos = query(<todo>, loc)
let headings = ()
let last-heading
for todo in queried-todos {
let new-last-heading = query(selector(heading).before(todo.location()), loc).last()
if last-heading != new-last-heading {
headings.push((heading: new-last-heading, todos: (todo,)))
last-heading = new-last-heading
} else {
headings.last().todos.push(todo)
}
}
for head in headings {
link(head.heading.location())[
#numbering(head.heading.numbering, ..counter(heading).at(head.heading.location()))
#head.heading.body
]
[ ]
box(width: 1fr, repeat[.])
[ ]
[#head.heading.location().page()]
linebreak()
pad(left: 1em, head.todos.map((todo) => {
list.item(link(todo.location(), todo.body.children.at(0).body))
}).join())
}
})
} |
|
https://github.com/SWATEngineering/Docs | https://raw.githubusercontent.com/SWATEngineering/Docs/main/src/2_RTB/VerbaliInterni/VerbaleInterno_231201/meta.typ | typst | MIT License | #let data_incontro = "01-12-2023"
#let inizio_incontro = "08:30"
#let fine_incontro = "10:00"
#let luogo_incontro = "Discord" |
https://github.com/hchap1/typst-spell-check | https://raw.githubusercontent.com/hchap1/typst-spell-check/main/test.typ | typst | Here is some text with an equation $x^2 + 2x$, not to mention a spleling mitsake.
Here's a table:
#table(columns: 3,
[a],[b],[c],
[1],[2],[3]
))
|
|
https://github.com/avtarted/notes | https://raw.githubusercontent.com/avtarted/notes/main/math/sumofpowersoftwo.typ | typst | #set math.equation(numbering: "(1)")
= Sums of Powers Of Two
== Introduction
My goal is to explore the following relation:
$ 1 + 2 + 4 + 8 + .... + 2^n = 2^(n+1) - 1 | n in NN $ <goal>
I'll prove this using 4 different approaches.
Merely proving this equation to be true is not enough for me.
I want to receive and provide insight as to why it's plausible and makes sense, at least to me.
This article will
focus on the recursive nature of this sum,
include tree visualizations,
discuss how I use approximations to simplify my reasoning,
include a proof using binary,
and finally, throughout this article, compare and contrast the various approaches, distilling what I believe are key insights.
== Prerequisites
Knowledge of proof by induction.
Which I believe people learn in high school when they take Precalculus or in their second Algebra course.
For the bonus CS section at the end, the main prerequisite is knowing binary arithmetic and generally having some introductory level experience.
== Proof By Induction
I'll focus on the left-hand side (LHS) and right-hand side (RHS) of @goal separately and express them as functions.
For the LHS, I'll define the function:
$ S(n) = 2^0 + 2^1 + ... + 2^n $ <sn>
This function has $n+1$ terms. And here it is expressed as a sum:
$ S(n) = sum_(i=0)^n 2^i $
For the RHS, I'll define the function:
$ F(n) = 2^(n+1) - 1 $ <fn>
Let $P_n$ be the claim that $S(n) = F(n)$.
I want to prove $P_n$ is true for all $n = 0, 1, 2, ...$, as this proves @goal.
To this end, I'll use induction.
The first step is to check the base case, $P_0$.
$S(0)=2^0=1$ and $F(0)=2^(0+1)-1=1$. $S(0) = F(0)$. Done.
Next, if I can show $P_k => P_(k+1)$, I'll be done.
By induction, I would have proved @goal.
Here is the claim $P_k$:
$ 2^0 + 2^1 + ... + 2^k = 2^(k+1) - 1 $ <pk>
And here is the claim $P_(k+1)$:
$ 2^0 + 2^1 + ... + 2^k + 2^(k+1) = 2^(k+2) - 1 $ <pkplus1>
Let me prove that $P_k => P_(k+1)$ in 3 ways:
+ the textbook way
+ essentially the textbook way, but with a focus on the recursive natures of $S(n)$ and $F(n)$.
+ a visual approach, using tree diagrams
=== Approach 1: Textbook Induction
Assuming @pk is true, I want to show that @pkplus1 is true as well.
Observe that I can transform the LHS of @pkplus1 by plugging in the RHS of @pk.
After the substitution:
$ (2^(k+1) - 1) + 2^(k+1) = 2^(k+2) - 1 $
Write both sides in terms of 2^k using exponent rules:
$ 2^k*2- 1 + 2^k*2 = 2^k*4- 1 $
Factor 2^k in the LHS:
$ 2^k*4 - 1 = 2^k*4 - 1 $
And I am done. I have successfully proved that $P_k => P_(k+1)$.
And since I already verified the base case, my proof of @goal is complete.
=== Approach 2: Induction emphasizing recursive definitions
So the previous proof felt slightly unsatisfactory.
What I really want to know, besides simply proving the correctness of @goal is more insight as to _why_ it's true.
If someone looks at $S(n)$ with fresh eyes, defined in @sn, would they be able to come up with $F(n)$, defined in @fn, if they have never seen $F(n)$ before?
Why is $F(n)$ plausible?
Well, the prior proof, at least to me, did not seem to help me too much answer these questions.
So the proofs in this subsection and the next attempt to answer my questions.
Ok, so actually I slightly lied. The last proof actually did help me, namely one key step in it.
And that key step was the substitution of the RHS of @pk into @pkplus1.
This exploited, and more importantly, displayed, the recursive structure of $S(n)$.
That is, $S(k+1)$ expanded out contains $S(k)$.
I'll explicitly write this out:
$ S(k+1) = S(k) + 2^(k+1) $ <skrecursive>
Now I'll do the same for $F(n)$ and try to write $F(k+1)$ in terms of $F(k)$.
$ F(k+1) = 2^(k+2) - 1 $
$ = 2^(k+1)*2 - 1 $
$ = (2^(k+1) - 1) + 2^(k+1) $
And done because note that I've spotted, and wrapped in brackets, $F(k)$. Again, I'll explicitly write this out:
$ F(k+1) = F(k) + 2^(k+1) $ <fkrecursive>
Note that $S(k+1)$ and $F(k+1)$ as defined at @skrecursive and @fkrecursive share the exact same recursive structure!
In fact, now the proof of the inductive step, $P_k => P_(k+1)$, writes itself.
$ S(k+1) eq.quest F(k+1) $
Use the recursive definitions from @skrecursive and @fkrecursive:
$ S(k) + 2^(k+1) eq.quest F(k) + 2^(k+1) $
Subtract 2^(k+1) from both sides:
$ S(k) eq.quest F(k) $
And done, because we assume $P_k$ to be true.
I feel that viewing $S(n)$ and $F(n)$ as recurrent relations, yet again, defined at @skrecursive and @fkrecursive is really helpful.
$S(n)$ grows exponentially at each step, every time we extend the sum by 1 term, we add double the last term.
This is evident simply by looking at the expanded definition of $S(n)$ at @sn.
$F(n)$ also clearly grows exponentially as it contains $2^n$.
So both these functions grow exponentially the same way at each step.
And both share the same base case, $S(0) = F(0) = 1$.
So these functions grow in lockstep with each other and will always remain equivalent.
Now I feel I have more insight in terms of considering growth.
I feel there are definitely parallels to calculus that, while at the moment I'm unequipped to treat, may be worth exploring.
=== Approach 3: "Approximate" Visual Induction
Draw the tree and table. Possibly to do so side-by-side?
As figuring out the pattern behind sums of powers of 2,
it's likely an observer would simply notice the pattern by looking at this table.
Maybe expanding out a few more levels to convince themselves of the increasingly promising pattern they've formulated that is $F(n)$.
This section, I'd like to introduce a way I reasoned about $F(n)$ being plausible.
Again, from the previous section, the key idea is the exponential growth of $S(n)$.
Visually at each level, $2^d$ more nodes are introduced.
So my candidate function to match or approximate $S(n)$ could grow exponentially. And the base is 2. So why not simply try the function $2^(n+1)$?
Indeed $S(n) tilde.equiv 2^(n+1)$. And I'll present a visual "proof" of this.
$2^n$ is very convenient with this visual tree approach because it corresponds to the number of leaves at a given level.
=== Approach 4: Binary, rectifying the approximation
Preface. Knowing binary will help. But it is not a hard prerequisite
consider (unsigned) binary representation of 7 (0b0111) and 8 (0b1000) using 4 bits
For readers that are not acquinted with binary, this is not something to be scared about.
this simply means 7 = 1*2^0 + 1*2^1 + 1*2^2 + 0*2^3 = 1 + 2 + 4
and 8 = 0*2^0 + 0*2^1 + 0*2^2 + 1*2^3 = 8
Quick introduction to binary:
The 1's and 0's record presense or absence of a particular power of two,
and the powers of 2 increase from rightmost to leftmost, just like our typical decimal notation where one's place is rightmost, followed by ten's, hundred's, and so forth.
0bABCD = D*2^0 + C*2^1 + B*2^2 + A*2^3 where A,B,C, and D are all binary digits meaning they take on values 0 or 1.
So the sum 1+2+4 or S(2) = 7. So just 1 off from 8, the next power of 2.
If we add 1 to 0b0111, there's a domino effect of carrying over 1s and we get 0b1000
Even if you don't know binary consider what happens when I evaluate 1 + (1 + 2 + 4) as follows
first let's rewrite all terms as powers of 2 as that's the heart of this document
2^0 + (2^0 + 2^1 + 2^2)
group first 2 terms
(2^0+2^0) + (2^1 + 2^2)
simplify the grouping
2^1 + (2^1 + 2^2)
group first 2 terms
(2^1+2^1) + (2^2)
simplify the grouping
2^2 + (2^2)
group first 2 terms
(2^2 + 2^2)
simplify the grouping
2^3
Note the recursive nature of this process, the domino effect!
Given
2^k + (2^k + 2^(k+1) + ... )
We perform the 2 steps of grouping first 2 terms and simplyfing
2^(k+1) + (2^(k+1) + 2^(k+2) + ... )
and we get expression of same structure except one higher power of 2
So there's repeated doubling
To recap, last visual approach, we saw repeated halving.
Each level S(k), we represented as 2^k + S(k-1)
S(2) = 4 + S(1), S(1) = 2 + S(0)
S(k) = 2^k + S(k-1)
S(k-1) = 2^(k-1) + S(k-2)
S(k-2) = 2^(k-2) + S(k-3)
To solve problem K, we need problem K-1.
To solve problem K-1, we need problem K-2
And so forth till 0th problem.
From large problem we work backwards from small problem
But this way is more direct, from small subproblem, we work towards larger subproblems directly
1 + 1 = 2 = S(0) + 1
2 + 2 = 4 = S(1) + 1
4 + 4 = 8 = S(2) + 1
We're solving 1+problem K-1 along the route to solving 1+problem K
In fact when solving 1 + problem K, we solve all intermediate 1 + problem K - J
as those are the various powers of 2 being carried.
Bother bringing up domino effect in decimal (ex 1000 = 999+1) or arbitrary base
like idea is get 111 in some base, b. Then scale by b-1 so that adding 1 causes the domino cascade?
CS aside:
we see this idea all the time in bit manipulation that take advantage of 2s complement.
Example that comes to mind is BIT or Fenwick tree technique to get least significant 1 bit.
domino affect
from prev approach to 8
we subtracted 4
then we subtracted 2
then subtracted 1
and left with 0~=1
So binary has a domino affect
|
|
https://github.com/akshat2602/resume | https://raw.githubusercontent.com/akshat2602/resume/master/Akshat_Sharma_Resume.typ | typst | #import "template.typ": *
#let cvdata = yaml("data.yml")
#let uservars = (
headingfont: "New Computer Modern",
bodyfont: "New Computer Modern",
fontsize: 12pt, // 10pt, 11pt, 12pt
linespacing: 7pt,
sectionspacing: 1pt,
showAddress: true, // true/false show address in contact info
showNumber: true, // true/false show phone number in contact info
showTitle: false, // true/false show title in heading
headingsmallcaps: false, // true/false use small caps for headings
sendnote: false, // set to false to have sideways endnote
)
// setrules and showrules can be overridden by re-declaring it here
// #let setrules(doc) = {
// // add custom document style rules here
//
// doc
// }
#let customrules(doc) = {
// add custom document style rules here
set page(
paper: "us-letter", // a4, us-letter
number-align: center, // left, center, right
margin: 0.75cm, // 1.25cm, 1.87cm, 2.5cm
)
show link: set text(rgb(25%, 13%, 65%))
// show link: underline
doc
}
#let cvinit(doc) = {
doc = setrules(uservars, doc)
doc = showrules(uservars, doc)
doc = customrules(doc)
doc
}
// each section body can be overridden by re-declaring it here
// #let cveducation = []
// ========================================================================== //
#show: doc => cvinit(doc)
#cvheading(cvdata, uservars)
#cveducation(cvdata)
#cvskills(cvdata)
#cvwork(cvdata)
#cvprojects(cvdata)
// #cvaffiliations(cvdata)
// #cvawards(cvdata)
// #cvcertificates(cvdata)
// #cvpublications(cvdata)
// #cvreferences(cvdata)
#endnote(uservars)
|
|
https://github.com/Myriad-Dreamin/tinymist | https://raw.githubusercontent.com/Myriad-Dreamin/tinymist/main/crates/tinymist-query/src/fixtures/type_check/infer2.typ | typst | Apache License 2.0 | #text(size: 1pt, font: (), stroke: 1pt, fill: red)[]
#path(fill: red, stroke: red)
#line(angle: 1deg, length: 1pt, stroke: red)
#rect(width: 1pt, height: 1pt, fill: red, stroke: red, radius: 1pt, inset: 1pt, outset: 1pt)
#ellipse(fill: red, stroke: red)
#circle(fill: red, stroke: red)
#box(fill: red, stroke: red)
#block(fill: red, stroke: red)
#table(
fill: red,
stroke: red,
table.hline(stroke: red),
table.vline(stroke: red),
)
#text(stroke: ())
|
https://github.com/typst-community/setup-typst | https://raw.githubusercontent.com/typst-community/setup-typst/main/test/requirements.typ | typst | MIT License | #import "@preview/example:0.1.0": *
|
https://github.com/maxds-lyon/lokiprint | https://raw.githubusercontent.com/maxds-lyon/lokiprint/main/templates/typst/.template/experiences.typ | typst |
#import "@preview/splash:0.3.0": tailwind
#import "@preview/cmarker:0.1.0"
#import "./shared/flex.typ": *
#import "./shared/sizes.typ": scale
#let card-padding = 24pt
#let experience-block(
content,
) = block(
inset: card-padding,
radius: 16pt,
fill: tailwind.slate-100,
)[
#flex(
gap: 24pt,
[
#flex(gap: 18pt)[
#let offset = (card-padding + 24pt + 20pt)
#let gap = 12pt
#block(
inset: (left: -(gap + offset)),
grid(
column-gutter: gap,
align: bottom,
columns: (offset, 1fr, auto),
pad(
right: (card-padding - gap),
align(
bottom + center,
text(fill: tailwind.slate-400, tracking: 1.4pt, weight: "semibold", baseline: -1pt)[#if (
"badge" in content
) [#content.badge] else []],
),
),
text(fill: tailwind.orange-500)[=== #content.name],
align(bottom, text(baseline: -1pt, if ("dates" in content) [#content.dates] else [])),
),
)
#if ("title" in content) [
#block(
heading(level: 4, content.title),
)
]
#if ("description" in content) [
#set par(justify: true)
#cmarker.render(content.description)
]
]
#if ("work" in content) [
#flex(gap: 18pt)[
==== Réalisations
#for el in content.work [
#flex(gap: 12pt)[
#if ("title" in el) [
====== #el.title
]
#cmarker.render(el.content)
]
]
]
]
#if ("skills" in content) [
#flex(
direction: row,
gap: 6pt,
for el in content.skills {
box(
inset: 6pt,
fill: white,
radius: 2pt,
el,
)
},
)
]
],
)
]
|
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/bone-resume/0.1.0/lib.typ | typst | Apache License 2.0 | #let resume-init(title: none, author: "六个骨头", footer: none, body) = {
set document(author: author, title: title)
set page(margin: (x: 4em, y: 5em), footer: footer)
set box(fill: color.hsv(240deg, 10%, 100%), inset: 5pt, radius: 3pt)
set text(
font: ("Hack Nerd Font", "Source Han Sans"),
lang: "zh",
)
show emph: it => {
text(it.body, style: "italic")
}
show link: it => {
set text(blue, font: (
"Source Han Serif SC",
), weight: "bold")
it
}
show heading.where(level: 1): it =>{
set text(rgb("#448"), size: 16pt, font: "Source Han Sans CN")
stack(dir: ttb, spacing: 12pt, {
it.body
}, line(length: 100%))
v(8pt, weak: true)
}
align(center)[
#block(text(weight: 700, 1.75em, title))
]
set par(justify: true)
body
}
#let resume-section(name, decs, contrib) = {
box()[
#name #h(1fr) #decs
#v(1em, weak: true)
#contrib
]
}
|
https://github.com/cloudsftp/blimm.typ | https://raw.githubusercontent.com/cloudsftp/blimm.typ/latest/blimm.typ | typst | #import "@preview/letter-pro:2.1.0": letter-simple
#let blimm(
annotations: "",
recipient: (
company: "energiesandsuch",
name: "<NAME>",
street: "Kirchstraße 1",
city: "70563 Stuttgart"
),
reference-signs: (),
date: datetime.today(),
date-format: "[year]-[month]-[day]",
subject: "Empty Subject",
both-sign: false,
body
) = {
show: letter-simple.with(
sender: (
name: "<NAME>",
address: "Ostlandstraße 25, 32339 Espelkamp",
extra: [
Tel: #link("tel:+4917634969240")[+49 176 34969240]\
E-Mail: #link("mailto:<EMAIL>")[<EMAIL>]\
],
),
annotations: annotations,
recipient: {
if "company" in recipient {
recipient.company
}
linebreak()
recipient.name
linebreak()
recipient.street
linebreak()
recipient.city
if "country" in recipient {
linebreak()
recipient.country
}
},
reference-signs: reference-signs,
date: date.display(date-format),
subject: subject,
)
body
[ // empty line for spacing
]
context {
if text.lang == "de" {
[
Mit freundlichen Grüßen
]
} else {
[
Yours sincerely,
]
}
}
v(1.5cm)
[<NAME>]
if both-sign {
h(3fr)
recipient.name
h(2fr)
}
}
|
|
https://github.com/vEnhance/1802 | https://raw.githubusercontent.com/vEnhance/1802/main/src/partial.typ | typst | MIT License | #import "@local/evan:1.0.0":*
= Partial derivatives <sec-partial-derivative>
== [TEXT] The point of differentiation is linear approximation
In 18.01, when $f : RR -> RR$, you defined a *derivative* $f'(p)$ at each input $p in RR$,
which you thought of as the *slope* of the *tangent line* at $p$.
Think $f(5.01) approx f(5) + f'(5) dot 0.01$.
This slope roughly tells you, if you move a slight distance away from the input $p$,
this is how fast you expect $f$ to change.
To drill the point home again, in 18.01, we had
$ f(p + epsilon) = f(p) + f'(p) dot epsilon. $
See figure below.
#figure(
image("figures/grad-slope.png", width: auto),
caption: [In 18.01, the slope $f'(p)$ tells you how quickly $f$ changes near $p$.],
)
The 18.01 derivative had type "scalar".
But for a _two-variable_ function, that's not enough.
For concreteness, let's take
$ f(x,y) = x^2+y^2 $
as our example function (for which we have drawn level curves before),
and consider some point $P = (3,4)$, so that $f(3,4) = 25$.
Then, what would a point "close" to $(3,4)$ mean?
The point $(3.01,4)$ is close, but so is $(3,4.01)$.
For that matter, so is $(3.006, 4.008)$ --- that's also a point at distance $0.01$ away!
So having a single number isn't enough to describe the rate of change anymore.
For a two-variable function, we would really want _two_ numbers, in the sense that
we want to fill in the blanks in the equation
$ f(3 + epsilon_x, 4 + epsilon_y) approx 25
+ ("slope in" x"-direction") dot epsilon_x
+ ("slope in" y"-direction") dot epsilon_y. $
#idea[
For an $n$-variable functions, we have a rate of change in _each_ of the $n$ directions.
Therefore, *we need $n$ numbers and not just one*.
]
The first blank corresponds to what happens if you imagine $y$ is held in place at $4$,
and we're just changing the $x$-value to $3.01$.
The second blank is similar.
So we need a way to calculate these; the answer to our wish is
what's called a _partial derivative_.
== [TEXT] Computing partial derivatives is actually just 18.01 <text-compute-partial>
The good news about partial derivatives is that *they're actually really easy to calculate*.
You pretty much just need to do what you were taught in 18.01
with one variable changing while pretending the others are constants.
Here's the definition:
#definition[
Suppose $f(x,y)$ is a two-variable function.
Then the _partial derivative with respect to $x$_, which we denote either $f_x$
or $(partial f) / (partial x)$,
is the result if we differentiate $f$ while treating $x$ as a variable $y$ as a constant.
The partial derivative $f_y = (partial f) / (partial y)$ is defined the same way.
Similarly, if $f(x,y,z)$ is a three-variable function,
we write $f_x = (partial f) / (partial x)$ for the derivative when $y$ and $z$ are fixed.
]
#typesig[
Each partial derivative has the same type signature as $f$. That is:
- Given $f : RR^n -> RR$ which accepts *points* in $RR^n$ and outputs *scalars*.
- Then the partial derivative $(partial f) / (partial x) = f_x$
also accepts *points* in $RR^n$ and outputs *scalars*.
]
But that's a lot of words.
I think this is actually better explained by example.
In fact you could probably just read the examples and ignore the definition above.
#example(title: [Example: partial derivatives of $f(x,y) = x^3 y^2 + cos(y)$])[
Let $f(x,y) = x^3 y^2 + cos(y)$.
Let's compute $f_x$.
Again, pretend $y$ is a constant, so look at the function
$ x |-> y^2 dot x^3 + cos(y). $
If we differentiate with respect to $x$, then $x^3$ becomes $3x^2$,
and $cos(y)$ goes to $0$ (it doesn't have any $x$ stuff in it).
So
$ f_x = y^2 dot 3x^2. $
Similarly, let's compute $f_y$.
This time we pretend $x$ is a constant, and look at
$ y |-> x^3 dot y^2 + cos(y). $
This time $y^2$ becomes $2y$, and $cos(y)$ has derivative $-sin(y)$. So
$ f_y = x^3 dot 2y - sin(y). $
]
#example(title: [Example: partial derivatives of $f(x,y,z) = e^(x y z)$])[
Let $f(x,y,z) = e^(x y z)$ for a three-variable example.
To compute $f_x$, think of the function
$ x |-> e^(y z dot x) $
where we pretend $y$ and $z$ are constants.
Then the derivative is with respect to $x$ is just $y z e^(y z dot x)$
(just like how the derivative of $e^(3 x)$ is $3 e^x$). In other words,
$ f_x (x,y,z) = y z dot e^(x y z). $
For analogous reasons:
$
f_y (x,y,z) &= x z dot e^(x y z) \
f_z (x,y,z) &= x y dot e^(x y z).
$
]
#example(title: [Example: partial derivatives of $f(x,y) = x^2 + y^2$ and linear approximation])[
Let's go back to
$ f(x,y) = x^2+y^2 $
which we used in our earlier example as motivation, at the point $P = (3,4)$.
Let's fill in the numbers for the example $f(x,y) = x^2+y^2$ we chose.
By now, you should be able to compute that
$
f_x (x,y) &= 2x \
f_y (x,y) &= 2y \
$
Now, let's zoom in on just the point $P = (3,4)$.
We know that
$
f(P) = 3^2 + 4^2 = 25 \
f_x (P) = 2 dot 3 = 6 \
f_y (P) = 2 dot 4 = 8.
$
So our approximation equation can be written as
#eqn[
$ (3 + epsilon_x)^2 + (4 + epsilon_y)^2 approx 25 + 6 epsilon_x + 8 epsilon_y. $
<example-circle-3-4>
]
If you manually expand both sides, you can see this looks true.
The two sides differ only by $epsilon_x^2$ and $epsilon_y^2$,
and the intuition is that if $epsilon_x$ and $epsilon_y$ were small numbers,
then their squares will be negligibly small.
We'll return to @example-circle-3-4 later when we introduce the gradient.
]
== [RECIPE] Computing partial derivatives <recipe-compute-partial>
You probably can already figure out the recipe from the sections above,
but let's write it here just for completeness.
#recipe(title: [Recipe for calculating partial derivatives])[
To compute the partial derivative of a function $f(x,y)$ or $f(x,y,z)$
or $f(x_1, ..., x_n)$ with respect to one of its input variables,
1. Pretend all the other variables are constants,
and focus on just the variable you're taking the partial derivative to.
2. Calculate the derivative of $f$ with respect to just that variable like in 18.01.
3. Output the derivative you got.
This is easy, and only requires 18.01 material.
]
We just saw three examples where we computed the partials for $f(x,y) = x^3 y^2 + cos (y)$,
$f(x,y,z) = e^(x y z)$, and $f(x,y) = x^2+y^2$.
Here are a bunch more examples that you can try to follow along:
#sample[
Calculate the partial derivatives of $f(x,y,z) = x+y+z$.
]
#soln[
The partial derivative with respect to $x$ is obtained by differentiating
$ x |-> x + y + z. $
Since we pretend $y$ and $z$ are constants, we just differentiate $x$ to get $1$.
The same thing happens with $y$ and $z$.
Hence
$
f_x (x,y,z) &= 1 \
f_y (x,y,z) &= 1 \
f_z (x,y,z) &= 1. #qedhere
$
]
#sample[
Calculate the partial derivatives of $f(x,y,z) = x y + y z + z x$.
]
#soln[
We differentiate with respect to $x$ first, where we view as the function
$ x |-> (y + z) x + y z $
pretending that $y$ and $z$ are constants.
This gives derivative $f_x (x,y,z) = y + z$.
Similarly, $f_y (x,y,z) = x + z$ and $f_z (x,y,z) = x + y$.
So
$
f_x (x, y, z) &= y + z \
f_y (x, y, z) &= z + x \
f_z (x, y, z) &= x + y. #qedhere
$
]
#sample[
Calculate the partial derivatives of $f(x,y) = x^y$, where we assume $x,y > 0$.
]
#soln[
If we view $y$ as a constant and $x$ as a variable, then
$ x |-> x^y $
is differentiated by the "power rule" to get $y x^(y-1)$.
However, if we view $x$ as constant and $y$ as a variable, then
$ y |-> x^y = e^(ln x dot y) $
ends up with derivative $ln x dot e^(ln x dot y) = ln x dot e^y$.
Hence
$
f_x (x,y) &= y x^(y-1) \
f_y (x,y) &= ln x dot e^y. #qedhere
$
]
== [EXER] Exercises
#exer[
Find all the partial derivatives of the following functions, defined for $x,y,z > 0$:
- $f(x,y,z) = x / y + y / z + z / x$
- $f(x,y,z) = sin(x y z)$
- $f(x,y,z) = x^y + y^z + z^x$.
]
|
https://github.com/npikall/vienna-tech | https://raw.githubusercontent.com/npikall/vienna-tech/main/README.md | markdown | The Unlicense | # The `vienna-tech` Template
<div align="center">Version 0.1.1</div>
This is a template, modeled after the LaTeX template provided by the Vienna University of Technology for Engineering Students. It is intended to be used as a starting point for writing Bachelor's or Master's theses, but can be adapted for other purposes as well. It shall be noted that this template is not an official template provided by the Vienna University of Technology, but rather a personal effort to provide a similar template in a new typesetting system. If you want to checkout the original templates visit the website of [TU Wien](https://www.tuwien.at/cee/edvlabor/lehre/vorlagen)
## Getting Started
These instructions will help you set up the template on the typst web app.
```typ
#import "@preview/vienna-tech:0.1.1": *
// Useing the configuration
#show: tuw-thesis.with(
title: [Hier wird der Titel der Arbeit eingefügt.],
thesis-type: [Bachelorarbeit],
lang: "de",
authors: (
(
name: "<NAME>",
email: "<EMAIL>",
matrnr: "12345678",
date: datetime.today().display("[day] [month repr:long] [year]"),
),
),
abstract: [Hier wird die Kurzfassung der Arbeit eingefügt.],
bibliography: bibliography("bibliography.bib"),
appendix: [Hier wird der Anhang eingefügt.],
)
```
## Usage
These instructions will get you a copy of the project up and running on the typst web app.
```bash
typst init @preview/vienna-tech:0.1.1
```
### Template overview
After setting up the template, you will have the following files:
- `main.typ`: the file which is used to compile the document
- `abstract.typ`: a file where you can put your abstract text
- `appendix.typ`: a file where you can put your appendix text
- `sections.typ`: a file which can include all your contents
- `refs.bib`: references
## Contribute to the template
Feel free to contribute to the template by opening a pull request. If you have any questions, feel free to open an issue.
Keep in mind that the release workflow is automated, and will be triggered by a push to the `main` branch. |
https://github.com/typst-community/setup-hayagriva | https://raw.githubusercontent.com/typst-community/setup-hayagriva/main/README.md | markdown | MIT License | # Setup Hayagriva
🏷️ Installs [the Hayagriva CLI](https://github.com/typst/hayagriva#installation) for GitHub Actions
<table align=center><td>
```yaml
- uses: jcbhmr/setup-hayagriva@v1
- run: hayagriva literature.yml reference --style apa
```
</table>
✅ Installs the `hayagriva` CLI globally \
📌 Supports version pinning \
⚡ Caches the compiled binary using [@actions/cache](https://www.npmjs.com/package/@actions/cache) \
📥 Downloads from [the Hayagriva GitHub releases](https://github.com/typst/hayagriva/releases)
## Usage


**🚀 Here's what you're after:**
```yml
on: push
jobs:
job:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: jcbhmr/setup-hayagriva@v1
- run: hayagriva literature.yml reference
```
### Inputs
- **`hayagriva-version`:** Which version of Hayagriva to install. This can be an exact version specifier such as `0.5.1` or a semver range like `~0.5.0` or `0.x`. Use `latest` to always install the latest release. Defaults to `latest`.
- **`hayagriva-token`:** The GitHub token to use when fetching the version list from [typst/hayagriva](https://github.com/typst/hayagriva/releases). You shouldn't have to touch this. The default is the `github.token` if you're on github.com or unauthenticated (rate limited) if you're not on github.com.
- **`cache`:** Whether or not to use the workflow cache to cache the compiled `hayagriva` binary for future runs.
### Outputs
- **`hayagriva-version`:** The version of Hayagriva that was installed. This will be something like `0.5.1` or similar.
- **`cache-hit`:** Whether or not Hayagriva was restored from the runner's cache or download anew.
## Development


This GitHub Action uses Bun to bundle the main entry point plus all the imported dependencies into a single `.js` file ready to be run by `main: dist/main.js` in the `action.yml`. To test the action just open a PR (even a draft one) and some magic GitHub Actions will test your changes. 🧙♂️
ℹ Once [Bun gets Windows support](https://github.com/oven-sh/bun/issues/43) make sure you add back the `runs-on: windows-latest` test to `test-action.yml`.
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/hydra/0.1.0/src/lib.typ | typst | Apache License 2.0 | #import "/src/default.typ"
#let hydra(
sel: heading,
getter: default.get-adjacent,
prev-filter: default.prev-filter,
next-filter: default.next-filter,
display: default.display,
resolve: default.resolve,
is-footer: false,
) = locate(loc => resolve(
sel: sel,
getter: getter,
prev-filter: prev-filter,
next-filter: next-filter,
display: display,
is-footer: is-footer,
loc
))
|
https://github.com/Jollywatt/typst-fletcher | https://raw.githubusercontent.com/Jollywatt/typst-fletcher/master/tests/hiding/test.typ | typst | MIT License | #set page(width: auto, height: auto, margin: 1em)
#import "/src/exports.typ" as fletcher: diagram, node, edge
= Hiding
#rect(inset: 0pt, diagram({
node((0,0), [Can't see me])
edge("->", bend: 20deg)
node((1,1), [Can see me])
}))
#rect(inset: 0pt, diagram({
fletcher.hide({
node((0,0), [Can't see me])
edge("->", bend: 20deg)
})
node((1,1), [Can see me])
}))
|
https://github.com/Shuenhoy/modern-zju-thesis | https://raw.githubusercontent.com/Shuenhoy/modern-zju-thesis/master/examples/graduate-general.typ | typst | MIT License | #import "../lib.typ": graduate-general
#import graduate-general: *
#let info = (
title: ("毕业论文/设计题目", ""),
title-en: ("Graduation Thesis Title", ""),
grade: "2014级",
student-id: "学号",
author: "姓名",
department: "学院",
major: "专业",
degree: "博士",
supervisor: "指导教师",
submit-date: "递交日期",
)
#let doc = graduate-general(info: info, twoside: true)
#show: doc.style
#doc.pages.cover
#doc.pages.title-zh
#doc.pages.title-en
#doc.pages.decl
#show: frontmatter
#let individual = doc.pages.individual
#individual("致 谢")[]
#individual("摘 要")[]
#individual("Abstract")[]
#doc.pages.outline
#doc.pages.figure-outline
#doc.pages.table-outline
#show: mainmatter
#include "common-body.typ"
#individual("参考文献", outlined: true)[
#bibliography("ref.bib", style: "gb-7714-2015-numeric", title: none)
]
#individual("附录", outlined: true)[
#appendix(level: 1)[
== 一个附录 <app1>
@app1
== 另一个附录
]
]
#individual("作者简历", outlined: true)[
*基本信息:*
- 姓名:
- 性别:
- 民族:
- 出生年月:
- 籍贯:
*教育经历:*
- 2199.09 - 2203.06:浙江大学攻读学士学位
*攻读#(info.degree)学位期间主要的研究成果:*
]
|
https://github.com/Fr4nk1inCs/typreset | https://raw.githubusercontent.com/Fr4nk1inCs/typreset/master/readme.md | markdown | MIT License | # Typreset (W.I.P.)
A collection of [Typst](https://typst.app) presets to provide a starting point for your writing.
Available presets:
- `font.set-font()`: Chinese & English Font support.
- `homework`: Homework template with `simple_question` and `complex_question` frame to write your homework.
- `report`: Report template to write your report.
At this time there is no manual or handbook for this project. But our `tests/` directory contains some examples of how to use this project.
## Usage
Typrest supports Typst 0.6.0 and newer.
At this time there is no release of this project. You need to manually configure your local Typst packages.
- Clone this project to `{data-dir}/typst/packages/local/typreset/0.1.0`, where `data-dir` [depends on your systems](https://github.com/typst/packages#local-packages). For example, in Linux, it is `~/.local/share/typst/packages/local/typreset/0.1.0`:
```bash
mkdir -p ~/.local/share/typst/packages/local/typreset/
git clone https://github.com/Fr4nk1inCs/typreset.git ~/.local/share/typst/packages/local/typreset/0.1.0
```
Then you can use the presets in your Typst project:
```typ
#import "@local/typreset:0.1.0": *
```
## Fonts
The default font family for English is [Linux Libertine](https://linuxlibertine.sourceforge.net/Libertine-EN.html). It's bundled with Typst, so you don't need to install it.
The default math font is [Libertinus](https://github.com/alerque/libertinus) Math, which you need to install manually.
For Arch-based Linux distributions, you can install it from `libertinus-font`
package in `extra` repository. You can also install it from
[its GitHub release page](https://github.com/alerque/libertinus/releases).
### Language-specific Fonts
#### Simplified Chinese
Typreset uses the [fandol](https://ctan.org/pkg/fandol) fonts as the default simplified Chinese fonts in Typreset. If you want to use these fonts, you need to [install them](https://mirrors.ctan.org/fonts/fandol.zip).
|
https://github.com/QuadnucYard/cpp-coursework-template | https://raw.githubusercontent.com/QuadnucYard/cpp-coursework-template/main/template.typ | typst | #import "cody.typ": *
#import "font.typ": fonts
#let indent = h(2em)
#let unindent = h(-2em)
#let fake_par = {
v(-1em)
box()
}
#let indent-state = state("indent", false)
#let make-style(
leading: 0.8em,
justify: false,
first-line-indent: true,
) = (
leading: leading,
justify: justify,
first-line-indent: if first-line-indent { 2em } else { 0pt },
)
#let project(
title: "",
body,
) = {
// Set the document's basic properties.
set document(author: (), title: title)
set page(numbering: "1", number-align: center)
set text(font: fonts.primary, lang: "en")
set block(spacing: 0.7em)
set math.equation(numbering: "(1)")
show link: set text(fill: blue)
show heading: set text(font: fonts.strong)
show strong: text.with(font: fonts.strong)
show emph: text.with(font: fonts.emph, fill: red.darken(10%))
show math.lt.eq: math.lt.eq.slant
show math.gt.eq: math.gt.eq.slant
// Title row.
block(width: 100%, stroke: 1pt + purple.lighten(20%), fill: purple.lighten(90%), radius: 4pt, outset: 8pt, below: 2.0em)[
#align(center, text(weight: 700, 1.75em, font: fonts.strong, fill: purple.darken(20%), title))
]
// Main body.
set table(stroke: 0.5pt)
import "cody.typ": raw-style
show: raw-style
body
place(bottom + right, text(size: 8pt, "Proudly powered by Typst 11.0", fill: gray))
}
#let check-indent(doc, par-style) = {
set par(..par-style)
if par-style.at("first-line-indent", default: 0pt) > 0pt {
indent-state.update(true)
show heading: it => {
it
fake_par
}
doc
} else {
doc
}
}
#let assignment-style(doc, par-style: make-style()) = {
set enum(numbering: "1.a)")
show heading.where(level: 1): set text(size: 14pt, fill: blue)
show heading.where(level: 1): set block(above: 1.0em, below: 1.0em)
show heading.where(level: 2): set text(size: 13pt, fill: orange.darken(10%))
show heading.where(level: 2): set block(above: 1.0em, below: 1.0em)
show heading.where(level: 3): set text(size: 12pt, fill: eastern.darken(10%))
show heading.where(level: 3): set block(above: 1.0em, below: 1.0em)
show heading.where(level: 4): set text(fill: olive.darken(10%))
show heading.where(level: 4): set block(above: 1.0em, below: 1.0em)
check-indent(doc, par-style)
}
#let solution-style(doc, par-style: make-style()) = {
let h1-box = block.with(width: 100%, fill: blue.lighten(90%), stroke: (left: 3pt + blue.lighten(30%)), inset: 4pt, outset: 4pt, radius: (left: 4pt), below: 12pt)
let h2-box(inset: 6pt, outset: 10pt, bar-space: 2pt, bar-width: 3pt, body) = {
let ist = if type(inset) == dictionary { inset.top } else { inset }
let isb = if type(inset) == dictionary { inset.bottom } else { inset }
let isl = if type(inset) == dictionary { inset.left } else { inset }
let isr = if type(inset) == dictionary { inset.right } else { inset }
context {
let size = measure(body)
block(above: 8pt, below: 8pt)[
#stack(
dir: ltr,
spacing: bar-space,
polygon(fill: eastern.lighten(80%), (-2pt, 0pt), (-2pt, size.height + 2 * inset), (size.width + 2 * inset + 10pt, size.height + 2 * inset), (size.width + 2 * inset, 0pt)),
polygon(fill: eastern.lighten(60%), (-10pt, 0pt), (0pt, size.height + 2 * inset), (0pt + bar-width, size.height + 2 * inset), (-10pt + bar-width, 0pt)),
)
#place(left + top, dy: inset, dx: inset, body)
]
}
}
show heading.where(level: 1): it => {
set text(fill: blue, font: fonts.strong)
h1-box(it.body)
}
show heading.where(level: 2): it => {
set text(fill: eastern, font: fonts.strong)
h2-box(it.body)
}
show heading.where(level: 3): it => {
set block(above: 1.0em)
it
}
set enum(numbering: "1.a.i.")
check-indent(doc, par-style)
}
|
|
https://github.com/tuto193/typst-uos-thesis | https://raw.githubusercontent.com/tuto193/typst-uos-thesis/main/README.md | markdown | MIT License | # typst-uos-thesis
This is a [typst](https://typst.app/) that follows a general set of formatting requirements. It is partly based on [typst-uwthesis](https://github.com/yangwenbo99/typst-uwthesis/tree/master)
(which itself bases on [simple-typst-thesis](https://github.com/zagoli/simple-typst-thesis).
I personally used this template for my bachelor's thesis.
## Usage
Most of the work needed for you is in `main.typ`, so just type the work there.
The overall template is outlined in `template.typ`, which includes some other quality of life fuctions as well near the top.
Abbreviations you want included in the thesis can be edited in `glossaries.typ`. Here should be everything you _want_ to show in the final document.
Languages supported are under `languages.typ`. Just copy the main `english` dict, and fill the needed stuff with your wanted language.
## Main features
- **Multi-lingual!**:
As of now you can set the language of your work in the `main.typ`. Currently English (`en`), German (`de`) and Spanish (`es`) are supported. I am not fluent in other languages, so add whatever you want for your individual needs.
- **Fancy tables**: Use the _cool_ talbe filling and formatting already provided, modify them or use your own. It's all `typst`, so enjoy programming it yourself easily!
- **QOL Functions**: functions for quoting using `...et. al.` for many authors (`#cite-et-al`) or citing using strings instead of labels (`#cite-string`) in case you have citation keys with characters that `typst` doesn't like!.
- **Smart numbering**: Depending on whether you want to print `onesided` or `twosided`! `onesided` printing, means that numbers are always on the right side, and `twosided` printing ensures that numbers are on the _outer_ side
of the binding. That way, it doesn't matter what you choose, as long as the binding is on the (physical) left side of the document, numbers are always visible when scrolling through!.
## Limitations
Some special functions (in the `main.typ` file) for figures and tables need to be used, so the respective supplements (`Figure X`/`Ilustración X`, etc..) is shown properly. Feel free to modify them to your liking or create your own.
## License
Parts of this thesis belonging to `simple-typst-thesis` are under the Apache license. Other general parts belonging to `typst-uwthesis` and used here are not (at the time of this writing) under any license,
but it might be good to cover them (at least) under the license for this project.
`typst-uos-thesis` is under the MIT license, so feel free to share and re-mixit.
The `logo.png` belongs to the [University Osnabrück](https://www.uni-osnabrueck.de/startseite/).
|
https://github.com/jgm/typst-hs | https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/string-04.typ | typst | Other | // Test the `at` method.
#test("Hello".at(1), "e")
#test("Hello".at(4), "o")
#test("Hello".at(-1), "o")
#test("Hello".at(-2), "l")
#test("Hey: 🏳️🌈 there!".at(5), "🏳️🌈")
|
https://github.com/Mc-Zen/pillar | https://raw.githubusercontent.com/Mc-Zen/pillar/main/tests/examples/piano-keys/test.typ | typst | MIT License | #set page(width: auto, height: auto, margin: 1pt)
#import "/src/pillar.typ"
#let clr = if "dark" in sys.inputs { white } else { black }
#set page(fill: white) if clr == black
#set text(fill: clr)
#set table.hline(stroke: clr)
#set table.vline(stroke: clr)
#table(
..pillar.cols("c|ccc|r"),
[Piano Key], [MIDI Number], [Note Name], [Pitch Name], [$f$ in Hz],
table.hline(),
[73], [93], [a'''], [A6], [1760.000],
sym.dots.v, [], [], [], [],
[49], [69], [a'], [A4], [440.000],
[48], [68], [g♯'], [G♯4], [415.305],
[47], [67], [g'], [G4], [391.995],
)
|
https://github.com/sses7757/sustech-graduated-thesis | https://raw.githubusercontent.com/sses7757/sustech-graduated-thesis/main/sustech-graduated-thesis/utils/math-utils.typ | typst | Apache License 2.0 | #let svec(cont) = {
$accent(#cont, ->, size: #75%)$
}
#let _empty-l = math.class("opening", [])
#let _empty-r = math.class("closing", [])
#let sfrac(num, denom) = {
$lr(#_empty-l #num mid(\/) #denom #_empty-r)$
}
#let hide(value) = context h(measure(value).width) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.