repo
stringlengths 26
115
| file
stringlengths 54
212
| language
stringclasses 2
values | license
stringclasses 16
values | content
stringlengths 19
1.07M
|
---|---|---|---|---|
https://github.com/piepert/philodidaktik-hro-phf-ifp | https://raw.githubusercontent.com/piepert/philodidaktik-hro-phf-ifp/main/src/template.typ | typst | Other | #let color-orange = rgb("#B85A22")
#let color-orange-light = rgb("#DD8047")
#let color-blue = rgb("#94B6D2")
#let color-brown = rgb("#775F55")
#let refpage(label) = link(label, context counter(page).at(label).first())
#let index(name, content: none) = {
let s = state("indices", (:))
if name == none {
name = repr(lower(content))
}
if content == none {
content = name
}
context {
let num = s.at(here()).at(name, default: (
origins: (),
content: none)).origins.len()
let origin = label("index-"+name+"-" + str(num))
[#box[]#origin]
}
s.update(k => {
let num = k.at(name, default: (
origins: (), content: none)).origins.len()
let origin = label("index-" +
name + "-" +
str(num))
if name in k {
k.at(name).content.push(content)
k.at(name).origins.push(origin)
} else {
k.insert(name, (
content: (content,),
origins: (origin,)
))
}
k
})
}
#let make-index(title: none) = context {
let s = state("indices", (:))
let last-first = none
for item in s.final()
.keys()
.map(e => (e, lower(e.replace("Ü", "u")
.replace("Ä", "a")
.replace("Ö", "o")
.replace("ü", "u")
.replace("ö", "o")
.replace("ä", "a"))))
.sorted(key: e => e.at(1)) {
let original = item.at(0)
let small = item.at(1)
if last-first != small.first() {
last-first = small.first()
heading(level: 2, upper(last-first))
}
let i = 1
let e = s.final().at(original)
let label-page-list = e.origins.map(e => (e, query(e).first().location().page()))
let pages = label-page-list
.map(o => o.last())
.dedup()
.map(p => label-page-list.filter(i => i.last() == p).first())
let page_numbers = []
for p in pages {
page_numbers += [#link(p.first(), str(p.last()))]
if i < pages.len() {
page_numbers += [, ]
}
if calc.rem(i, 3) == 0 {
page_numbers += [\ ]
}
i += 1
}
block(stroke: (bottom: 1pt + color-blue),
inset: (bottom: 0.5em),
grid(columns: (1fr, auto),
column-gutter: 0.5em,
)[
#set text(hyphenate: true)
#e.content.dedup().first()
][
#show: align.with(right)
#set text(size: 1em)
#page_numbers
]
)
}
}
// todo: only first reference works
#let ix(b, ..args) = b + if args.pos().len() == 0 {
if type(b) != "string" {
panic("expected string, found "+type(b)+".")
}
index(b)
} else {
args.pos().map(e => if type(e) != "string" {
panic("expected string, found "+type(e)+".")
} else {
index(e)
}).join([])
}
#let note-note(state-key,
key,
number-format: numbering.with("1"),
wrap-note: k => super(text(fill: color-brown, k))) = context {
let counter-val = state(state-key, ()).at(here()).len() + 1
let origin = label(if key != none {
key+"-ORIGIN"
} else {
state-key + "-ORIGIN-" + str(counter-val)
})
let target = label(if key != none {
key+"-TARGET"
} else {
state-key + "-TARGET-" + str(counter-val)
})
[#link(target, wrap-note(number-format(counter(state-key).at(here()).first() + 1)))#origin]
}
#let note-content(state-key, body, key: none) = {
state(state-key, ()).update(k => {
let counter-val = k.len() + 1
k.push((
content: body,
counter: counter-val,
target: if key != none {
label(key+"-TARGET")
} else {
label(state-key + "-TARGET-" + str(counter-val))
},
origin: if key != none {
label(key+"-ORIGIN")
} else {
label(state-key + "-ORIGIN-" + str(counter-val))
},
))
k
})
}
#let add-note(key: none,
number-format: numbering.with("1"),
wrap-note: k => super(text(fill: color-brown, k)),
state-key,
body) = {
note-note(state-key, key, wrap-note: wrap-note, number-format: number-format)
note-content(state-key, body, key: key)
counter(state-key).step()
}
#let make-notes(state-key,
title: [Anmerkungen],
pretext: none,
number-format: numbering.with("1"),
wrap-all: k => k,
wrap-note: k => super(text(fill: color-brown, k)),
wrap-content: k => k) = context {
// generierung in zwei schritten:
// 1. alle mit origin, dadurch werden die neuen origins der endnoten in endnoten generiert
// 2. danach alle ohne origin
if title != none {
heading(title)
}
pretext
set par(justify: true)
let i = 1
for item in state(state-key, ()).final() {
counter(state-key).update(i)
wrap-all(block({
link(item.origin, wrap-note(number-format(i)))
wrap-content[#item.content#item.target]
}))
i += 1
}
}
#let en-note(key) = note-note("endnotes", key) + counter("endnotes").step()
#let en-content(key, body) = note-content("endnotes", body, key: key) + counter("endnotes").step()
#let en(key: none, body) = add-note(key: key, "endnotes", body)
#let ens(..args) = args.pos().map(e => en(e)).join(super[,])
#let enref(key) = context {
let index = 1
for item in state("endnotes", ()).final() {
if item.key == key {
return en(item.content)
// return [#link(key, super[#key])]
}
index += 1
}
panic("key '"+key+"' not found!")
}
#let make-endnotes() = make-notes("endnotes", title: [Anmerkungen])
#let todo(key: none, body) = add-note(
key: key,
number-format: numbering.with("(i)"),
wrap-note: k => strong(text(fill: rgb("#ff0000"), super(k))),
"todos",
body
)
#let make-todos() = {
make-notes("todos",
title: [To Do],
number-format: numbering.with("(i)"),
wrap-note: k => strong(text(fill: rgb("#ff0000"), k+[ ])),
pretext: [
Einige Inhalte dieses Dokuments sind noch nicht gesichert mit dem Dozenten oder Quellen geklärt. Diese werden hier vermerkt und Folgefragen festgehalten.
]
)
}
#let task(key: none, title, question, answer) = {
show: block.with(inset: 1em, width: 100%, stroke: color-blue, breakable: false)
set par(justify: false)
add-note(
key: key,
wrap-note: k => strong[Aufgabe #k -- #title],
number-format: numbering.with("1"),
"tasks",
[: ] + title + pad(left: 1.5em, answer))
set par(justify: true)
par(question)
}
#let taskref(key) = context {
let index = 1
for item in state("tasks", ()).final() {
if item.target == label(key+"-TARGET") {
return link(item.origin, [Aufgabe #numbering("1", item.counter)])
}
index += 1
}
panic("key '"+key+"' not found!")
}
#let make-tasks() = {
make-notes("tasks",
title: [Lösungsvorschläge],
wrap-note: k => strong[Lösung für #k],
number-format: numbering.with("1")
)
}
#let definition(name, content: none) = {
let s = state("definitions")
counter("definitions").step()
return none
if name == none {
name = repr(lower(content))
}
if content == none {
content = name
}
context {
let num = counter("definitions").at(here()).first()
let lbl = name+"-"+str(num)
[#s.update(k => {
if type(k) != "dictionary" {
k = (:)
}
if name in k {
k.at(name).content.push(content)
k.at(name).location.push((num, here()))
} else {
k.insert(name, ("content": (content,), location: ((num, here()),)))
}
k
})#label(lbl)]
}
}
#let def(key, body, ..ixs) = par(hanging-indent: 1.5em, [
#if (ixs.pos().len() == 0) {
definition(key)
index(key)
} else {
definition(key)
ixs.pos().map(e => index(e)).join([])
}
#strong[D#text(size: 0.8em)[EF].#box(place(left, dx: -3.5em, dy: -1em, text(size: 2em, fill: red, strong[!]))) #key]#definition(key) -- #body
])
#let make-definitions(title: none) = context {
let s = state("definitions")
let last-first = none
if s.final() == none {
return none
}
for item in s.final()
.keys()
.map(e => (e, lower(e)))
.sorted(key: e => e.at(1)) {
let original = item.at(0)
let small = item.at(1)
if last-first != small.first() {
// if last-first != none {
// v(1em)
// }
last-first = small.first()
// set text(fill: white)
// block(width: 100%, fill: color-blue, inset: 0.5em, upper(upper(last-first)))
heading(level: 2, upper(last-first))
}
let e = s.final().at(original)
grid(columns: (1fr, auto),
column-gutter: 0.5em,
)[
#e.content.dedup().first()
][
#let i = 1
#let pages = e.location.map(e => {
// let lbl = original+"-"+repr(e.position().x)+"-"+repr(e.position().y)
let lbl = original+"-"+str(e.first())
(e.last().page(), link(label(lbl), str(e.last().page())))
}).dedup(key: e => e.at(0)).map(e => e.at(1))
#for p in pages {
p
if i < pages.len() {
[, ]
}
if calc.rem(i, 3) == 0 {
[\ ]
}
i += 1
}
]
}
}
#let make-title-page() = {
grid(columns: (25%, 75%), column-gutter: 0.25cm, row-gutter: 0.5cm, [
], [
#set text(size: 4.5em, fill: color-brown)
#show: upper
// #show: strong
Aller\
Anfang ist... \
Klagerisch \
#box(image(width: 100%, "titlepage_image.png"))
], block(fill: color-orange-light,
width: 100%,
height: 2.5cm,
inset: 0.5cm)[
#set text(fill: white, size: 2.75em)
// #show: strong
#align(center + horizon)[KÜK]
], block(fill: color-blue,
width: 100%,
height: 2.5cm,
inset: 0.5cm)[
#set par(justify: false, leading: 0.4em)
#set text(fill: white, size: 1.75em)
#show: align.with(center + horizon)
[Klager-Überlebens-Koffer für das Lehramtsstudium der Philosophie an der Universität Rostock]
], [
], block(inset: 5pt)[
Der KÜK umfasst grundlegende, methodische und inhaltliche Überlebenstipps für das Bestehen der Klausur "Einführung in die Philosophiedidaktik", die Seminarvor- und Nachbereitung und die Prügungsvorbereitung am Institut für Philosophie der Universität Rostock. Die #context {
let headings = query(selector(heading.where(
outlined: true,
level: 1)))
let headings-after-nosubs = state("headings", ()).final()
.map(e => int(e.parent.split("-").last()))
.filter(e => e > int(state("no-subs", (parent: "0-0")).final().parent.split("-").last()))
// calculate number of headings without those, who are in the last part (no-subs-part, which is tagged)
headings.len() - headings-after-nosubs.len()
}-teilige Grundausrüstung richtet sich an alle Lehramtsstudent*innen der Philosophie und soll den Start in das Studium erleichtern. Der KÜK versteht sich prozessorientiert und erlaubt individuelle Ergänzungen. Die zusammengefassten Hinweise sind jedoch für das Lehramtsstudium der Philosophie als wichtig anzusehen. Diese Sammlung ersetzt nicht das Nachfragen und Nachdenken; wenn Inhalte, Formalia oder Methoden unklar erscheinen, sollten Dozierenden und Mitstudierenden um Rat gefragt werden.
#set text(size: 0.8em)
Verantwortung und Betreuung: Gruppe "#box(move(dy: 0.25em, circle(stroke: 0.5pt, radius: 0.6em, move(dx: -0.42em, dy: -1.15em, $phi$)))) Philo lernen" © 2023-2024
Autor: <NAME>, Version: #datetime.today().display("[year]-[month]")
])
}
#let add-outline(it-element, it-body, type) = {
state("outline", ()).update(k => {
k.push((
origin: label("ref-outline-"+str(k.len())),
type: type,
content: it-body
))
k
})
context {
let origin = label("ref-outline-"+str(state("outline", ()).at(here()).len() - 1))
block[#it-body#origin]
}
}
#let add-part(it) = add-outline(it, it, "part")
#let add-heading(it) = add-outline(it, it.body, "heading")
#let add-subheading(it) = add-outline(it, it.body, "subheading")
#let make-part(p, subtitle: none) = {
pagebreak(to: "odd")
align(horizon, {
set par(spacing: 0.5em, justify: false)
show: align.with(horizon)
{
set text(size: 4.5em, fill: color-brown)
add-part(p)
}
counter("parts").step()
context {
set text(size: 2em, fill: color-brown)
set text(fill: color-orange)
[Abschnitt #numbering("I", counter("parts").at(here()).first())]
if subtitle != none {
[ -- ]
subtitle
}
}
})
pagebreak()
}
#let make-outline() = context {
v(1em)
heading(outlined: false)[Aller Anfang ist... klagerisch]
v(-1em)
text(fill: color-orange, tracking: 0.25em, strong(upper[Inhaltsverzeichnis]))
show: pad.with(x: 1cm, y: 1.6cm)
let part-counter = 1
let heading-counter = 1
let subheading-counter = 1
let noheads = false
let arr = ()
let dotted-underline(item) = box(inset: (bottom: 0.25em),
stroke: (bottom: (dash: "dotted")),
link(item.origin, item.content +
h(1fr) +
str(query(item.origin).first().location().page())))
for item in state("outline", ()).final() {
if item == "noheads" {
noheads = true
continue
}
if item.type == "part" {
if arr.len() > 0 {
grid(columns: 3,
column-gutter: 7pt,
row-gutter: 4pt,
..arr)
arr = ()
}
arr.push(grid.cell(colspan: 3, v(1em) + link(item.origin, heading(outlined: false, level: 2)[#numbering("I.", part-counter) #item.content])))
part-counter += 1
} else if item.type == "heading" {
arr.push(grid.cell(colspan: 1, [#(heading-counter).]))
arr.push(grid.cell(colspan: 2, dotted-underline(item)))
heading-counter += 1
subheading-counter = 1
} else if item.type == "subheading" and not noheads {
arr.push(grid.cell[])
arr.push(grid.cell([#(heading-counter - 1).#subheading-counter.]))
arr.push(grid.cell(dotted-underline(item)))
subheading-counter += 1
}
}
if arr.len() > 0 {
grid(columns: 3,
column-gutter: 7pt,
row-gutter: 4pt,
..arr)
}
}
#let author(name) = {
show: none
counter("kuek-part").step()
set text(fill: color-blue)
show: strong
// [KÜK -- Hilfe Nr. #counter("kuek-part").display() -- Autor: ]
[KÜK -- Hilfe Nr. #context {
str(state("headings").at(here()).len())
if counter("subheadings").at(here()).first() > 0 {
[.] + counter("subheadings").display()
}
} -- Autor: ]
name
}
#let make-orange = text.with(fill: color-orange)
#let orange-list(..items) = {
set text(fill: color-orange)
list(tight: false, indent: 1.5em, marker: [■], ..items)
}
#let orange-list-with-body(parred: true, ..items) = {
let index = 1
let body = []
let elements = ()
for e in items.pos() {
if calc.even(index) {
let obody = text(fill: color-orange, body)
elements.push({
if parred {
block(obody)
} else {
obody
}
text(fill: black, e)
})
} else {
body = e
}
index += 1
}
orange-list(..elements)
}
#let project(body) = {
set page(margin: 2cm)
set text(font: "Tw Cen MT", size: 12pt, lang: "de")
set par(justify: true, linebreaks: "optimized", leading: 0.5em)
show heading.where(level: 1, outlined: true): it => add-heading(it)
show heading.where(level: 1): set text(size: 1.75em)
// show heading.where(level: 2): set block(above: 1.5em)
show heading.where(outlined: true, level: 2): it => add-subheading(it)
show heading.where(level: 2): upper
show heading: set par(justify: false)
show heading: set text(fill: color-brown, size: 1.25em, weight: "regular")
set enum(numbering: "1.", indent: 1.5em)
make-title-page()
set page(margin: (bottom: 3cm, rest: 2cm), footer: {
set text(size: 10pt, fill: color-brown)
line(length: 100%, stroke: 0.5pt + color-blue)
v(-0.75em)
[KÜK -- Institut für Philosophie -- © 2023-2024
#h(1fr)
Seite \
#h(1fr)
#set text(size: 1.25em)
#context counter(page).display()
]
}, header: {
set text(size: 10pt, fill: color-brown)
h(1fr)
strong[Aller Anfang ist... klagerisch]
v(-0.75em)
line(length: 100%, stroke: 0.5pt + color-blue)
})
include "/src/nutzung.typ"
pagebreak(weak: true)
make-outline()
show heading.where(level: 1): it => pagebreak(weak: true) + it
// make headings referable
show ref: it => {
if it != none and it.element != none and it.element.numbering == none and it.element.supplement == [Abschnitt] {
link(it.target, strong[#it.element.body])
} else {
it
}
}
let abbreviations = ("z. B.", "Z. B.", "D. h.", "d. h.", "d. i.", "o. ä.", "o. J.", "o. A.")
show regex(abbreviations.join("|")
.replace(" ", "")
.replace(".", "\.")): it => {
let res = abbreviations.filter(e => [#e.replace(" ", "")] == it)
if res.len() > 0 {
res.first().split(" ").join(" ")
}
}
body
state("outline", ()).update(k => {
k.push("noheads")
k
})
make-part[Anhang]
make-tasks()
make-endnotes()
make-todos()
bibliography("bibliography.bib", title: [Literaturverzeichnis], style: "kuek-zitierstil.csl")
heading[Index]
{
set par(justify: false)
// set text(hyphenate: false)
columns(3, make-index(title: none))
}
pagebreak(weak: true)
include "changelog.typ"
} |
https://github.com/hitszosa/universal-hit-thesis | https://raw.githubusercontent.com/hitszosa/universal-hit-thesis/main/harbin/bachelor/conf.typ | typst | MIT License | #import "../../common/theme/type.typ": 字体, 字号
#import "components/typography.typ": main-format-heading, special-chapter-format-heading
#import "utils/numbering.typ": heading-numbering
#import "config/constants.typ": special-chapter-titles
#import "config/constants.typ": current-date
#import "utils/states.typ": thesis-info-state, bibliography-state
#import "@preview/cuti:0.2.1": show-cn-fakebold
#import "@preview/i-figured:0.2.4": show-figure, reset-counters, show-equation
#import "@preview/lovelace:0.2.0": setup-lovelace
#import "pages/cover.typ": cover
#import "pages/abstract.typ": abstract-cn as abstract-cn-page, abstract-en as abstract-en-page
#import "pages/outline.typ": outline-page
#import "pages/conclusion.typ": conclusion as conclusion-page
#import "pages/bibliography.typ": bibliography-page
#import "pages/acknowledgement.typ": acknowledgement as acknowledgement-page
#import "pages/achievement.typ": achievement as achievement-page
#import "pages/declaration-of-originality.typ": declaration-of-originality
#let preface(content) = {
set page(
header: {
[
#set align(center)
#set par(leading: 0em)
#text(font: 字体.宋体, size: 字号.小五, baseline: 8.5pt)[
哈尔滨工业大学本科毕业论文(设计)
]
#line(length: 100%, stroke: 2.2pt)
#v(2.2pt, weak: true)
#line(length: 100%, stroke: 0.6pt)
]
},
header-ascent: 15%,
)
set page(numbering: "I")
set page(
footer: context [
#align(center)[
#set text(size: 字号.小五, font: 字体.宋体)
#counter(page).display("- I -")
]
],
footer-descent: 15%,
)
counter(page).update(1)
show heading: it => {
set par(first-line-indent: 0em)
if it.level == 1 {
align(center)[
#v(1em)
#special-chapter-format-heading(it: it, font: 字体.黑体, size: 字号.小二)
#v(.3em)
]
} else {
it
}
}
set par(first-line-indent: 2em, leading: 1em, justify: true)
set text(font: 字体.宋体, size: 字号.小四)
content
}
#let main(
content,
figure-options: (:),
) = {
figure-options = figure-options + (
extra-kinds: (),
extra-prefixes: (:),
)
set page(numbering: "1")
set page(footer: context [
#align(center)[
#set text(size: 字号.小五, font: 字体.宋体)
#counter(page).display("- 1 -")
]
])
counter(page).update(1)
set heading(numbering: heading-numbering)
show heading: it => {
set par(first-line-indent: 0em)
if it.level == 1 {
align(center)[
#v(1em)
#main-format-heading(it: it, font: 字体.黑体, size: 字号.小二)
#v(.3em)
]
} else if it.level == 2 {
main-format-heading(it: it, font: 字体.黑体, size: 字号.小三)
} else if it.level >= 3 {
main-format-heading(it: it, font: 字体.黑体, size: 字号.小四)
}
}
show heading: reset-counters.with(extra-kinds: ("algorithm",) + figure-options.extra-kinds)
show figure: show-figure.with(
numbering: "1-1",
extra-prefixes: ("algorithm": "algo:") + figure-options.extra-prefixes,
)
show figure.where(kind: table): set figure.caption(position: top)
show figure.where(kind: "algorithm"): set figure.caption(position: top)
show figure: set text(size: 字号.五号)
show raw.where(block: false): box.with(
fill: rgb("#fafafa"),
inset: (x: 3pt, y: 0pt),
outset: (y: 3pt),
radius: 2pt,
)
show raw.where(block: false): text.with(
font: 字体.代码,
size: 10.5pt,
)
show raw.where(block: true): block.with(
fill: rgb("#fafafa"),
inset: 8pt,
radius: 4pt,
width: 100%,
)
show raw.where(block: true): text.with(
font: 字体.代码,
size: 10.5pt,
)
show math.equation: show-equation.with(numbering: "(1-1)")
show: setup-lovelace
show ref: it => {
let eq = math.equation
let el = it.element
if el != none and el.func() == eq {
// Override equation references.
numbering(
el.numbering,
..counter(eq).at(el.location()),
)
} else {
// Other references as usual.
it
}
}
content
}
#let doc(
content,
thesis-info: (:),
abstract-cn: none,
keywords-cn: (),
abstract-en: none,
keywords-en: (),
figure-options: (:),
bibliography: none,
) = {
set document(
title: thesis-info.at("title-cn"),
author: thesis-info.author,
)
thesis-info-state.update(current => {
current + thesis-info
})
bibliography-state.update(current => bibliography)
set page(
paper: "a4",
margin: (top: 3.8cm, left: 3cm, right: 3cm, bottom: 3cm),
)
show: show-cn-fakebold
set text(lang: "zh", region: "cn")
cover()
show: preface
if abstract-cn != none {
abstract-cn-page(keywords: keywords-cn)[
#abstract-cn
]
pagebreak()
}
if abstract-en != none {
abstract-en-page(keywords: keywords-en)[
#abstract-en
]
pagebreak()
}
outline-page()
figure-options = figure-options + (
extra-kinds: (),
extra-prefixes: (:),
)
show: main.with(figure-options: figure-options)
content
}
#let ending(content, conclusion: none, achievement: none, acknowledgement: none) = {
show heading: it => {
set par(first-line-indent: 0em)
if it.level == 1 {
align(center)[
#v(1em)
#special-chapter-format-heading(it: it, font: 字体.黑体, size: 字号.小二)
#v(.3em)
]
} else {
it
}
}
set heading(numbering: none)
if conclusion != none {
conclusion-page[
#conclusion
]
pagebreak()
}
bibliography-page()
pagebreak()
if achievement != none {
achievement-page[
#achievement
]
pagebreak()
}
declaration-of-originality()
pagebreak()
if acknowledgement != none {
acknowledgement-page[
#acknowledgement
]
}
content
} |
https://github.com/ad-si/invoice-maker | https://raw.githubusercontent.com/ad-si/invoice-maker/master/examples/with-body.typ | typst | ISC License | #import "../invoice-maker.typ": *
#import "../fixtures/example-data.typ": *
#show: invoice.with(
language: "en",
banner-image: image("../fixtures/banner.png"),
invoice-id: "2024-03-10t172205",
issuing-date: "2024-03-10",
delivery-date: "2024-02-29",
due-date: "2024-03-20",
biller: biller,
hourly-rate: 100,
recipient: recipient,
items: table-data,
styling: (
font: none, // Explicitly use Typst's default font
font-size: 8pt,
margin: (
top: 20mm,
right: 40mm,
bottom: 10mm,
left: 40mm
),
)
)
#horizontalrule
= Additional Information
#v(1em)
The body of the invoice can contain additional information,
such as a note to the recipient
or an extended description of the provided services.
This information is displayed at the bottom of the invoice.
|
https://github.com/kilpkonn/typst-thesis | https://raw.githubusercontent.com/kilpkonn/typst-thesis/main/template.typ | typst | MIT License | #import "@preview/codelst:2.0.1": sourcecode
#import "@preview/drafting:0.2.0": * // For notes in margins
// The project function defines how your document looks.
// It takes your content and some metadata and formats it.
// Go ahead and customize it to your liking!
#let project(
title: "",
title_estonian: "",
thesis_type: "",
thesis_type_estonian: "",
authors: (),
supervisors: (),
date: none,
location: "",
dev: false,
logo: none,
body,
) = {
// Set the document's basic properties.
let doc_authors = authors.map(author => author.name)
set document(author: doc_authors, title: title)
set text(font: "New Computer Modern", lang: "en")
show math.equation: set text(weight: 400)
show par: set block(spacing: 1.5em)
show link: it => [
#text(rgb("0000FF"))[#it]
]
// Set up notes in margin
// https://github.com/ntjess/typst-drafting
set page(
// Extra wide A4 to give extra room for notes
margin: (left: 2.5cm, right: if dev { 6.5cm } else { 2.5cm}), paper: "a4", width: if dev {25cm} else {21cm}
)
set-page-properties()
// Title page.
align(center, text(1.2em, weight: 50, "TALLINN UNIVERSITY OF TECHNOLOGY"))
align(center, text(1.2em, weight: 50, "School of Information Technologies"))
// The page can contain a logo if you pass one with `logo: "logo.png"`.
v(0.6fr)
if logo != none {
align(right, image(logo, width: 26%))
}
v(1fr)
// Author information.
pad(
top: 0.7em,
bottom: 2em,
align(center,
grid(
rows: authors.len(),
gutter: 3em,
..authors.map(author => align(right,
strong(author.name) + " " +
author.student_code
)),
),
)
)
v(1.2em, weak: true)
align(center, text(2em, weight: 700, title))
v(2.4em, weak: true)
align(center, text(1.8em, weight: 200, smallcaps(thesis_type)))
// Supervisors
pad(
top: 0.7em,
right: 10%,
align(right,
strong("Supervisor") +
grid(
rows: supervisors.len(),
gutter: 1em,
..supervisors.map(supervisor => align(right, supervisor.name + linebreak() + supervisor.degree)),
),
)
)
v(2.4fr)
place(bottom+center)[
#location #datetime.today().year()
]
pagebreak()
// Title page.
align(center, text(1.2em, weight: 50, "TALLINNA TEHNIKAÜLIKOOL"))
align(center, text(1.2em, weight: 50, "Infotehnoloogia teaduskond"))
// The page can contain a logo if you pass one with `logo: "logo.png"`.
v(0.6fr)
if logo != none {
align(right, image(logo, width: 26%))
}
v(1fr)
// Author information.
pad(
top: 0.7em,
bottom: 2em,
align(center,
grid(
rows: authors.len(),
gutter: 3em,
..authors.map(author => align(right,
strong(author.name) + " " +
author.student_code
)),
),
)
)
v(1.2em, weak: true)
align(center, text(2em, weight: 700, title_estonian))
v(2.4em, weak: true)
align(center, text(1.8em, weight: 200, smallcaps(thesis_type_estonian)))
// Supervisors
pad(
top: 0.7em,
right: 10%,
align(right,
strong("Juhendaja") +
grid(
rows: supervisors.len(),
gutter: 1em,
..supervisors.map(supervisor => align(right, supervisor.name + linebreak() + supervisor.degree)),
),
)
)
v(2.4fr)
place(bottom+center)[
#location #datetime.today().year()
]
pagebreak()
set page(numbering: "I", number-align: center)
counter(page).update(1)
// Authors declaration
include "authors_declaration.typ"
v(2em)
text("Author: ")
authors.map(author => author.name).join(", ")
v(1em)
text("Date: ") + datetime.today().display("[day].[month].[year]")
pagebreak()
// Abstract page.
v(1fr)
align(center)[
#heading(
outlined: false,
numbering: none,
text(1em, smallcaps[Abstract]),
)
]
par(justify: true, include("abstract.typ"))
v(1.618fr)
pagebreak()
// Annotation page.
v(1fr)
align(center)[
#heading(
outlined: false,
numbering: none,
text(1em, smallcaps[Annotatsioon]),
)
]
par(justify: true, include("annotation.typ"))
v(1.618fr)
pagebreak()
// Table of contents.
outline(depth: 3, indent: true)
// Main body.
set par(justify: true)
set page(numbering: "1", number-align: center, header: counter(footnote).update(0))
counter(page).update(1)
// Abbreviations
// include("abbreviations.typ")
// Heading numbering
set heading(numbering: "1.1")
show heading: it => {
if (it.level == 1) {
pagebreak()
}
if (it.level > 3) {
block(it.body)
} else {
block(counter(heading).display() + " " + it.body)
}
}
body
bibliography("references.bib");
// Hack to insert end label for page count
text[#text(" ")<end>]
show heading: none
heading[Appendixes]
set heading(numbering: "1.1", outlined: false)
counter(heading).update(0)
show heading: it => {
if (it.level == 1) {
pagebreak()
}
block("Appendix " + counter(heading).display() + ": " + it.body)
}
include("appendixes.typ")
}
#let todo(txt) = inline-note(
stroke: blue,
rect: rect.with(inset: 1em, radius: 0.5em, fill: blue.lighten(90%)),
text[_Author_: #txt]
)
#let todo-supervisor(txt) = inline-note(
stroke: orange,
rect: rect.with(inset: 1em, radius: 0.5em, fill: orange.lighten(90%)),
text[_Supervisor_: #txt]
)
#let note(note, txt) = margin-note(stroke: aqua, text(size: 0.7em, note)) + highlight(txt)
#let suggestion(old, new) = highlight(fill: red, old) + highlight(fill: green, new)
#let metric(name) = emph(name)
#let cite-footnote(title, accessed, url, archivedurl) = footnote[#title, Accessed: #accessed, #smallcaps("url:") #link(url)] |
https://github.com/rabotaem-incorporated/probability-theory-notes | https://raw.githubusercontent.com/rabotaem-incorporated/probability-theory-notes/master/utils/shortcuts.typ | typst | // shortcuts
#show "iff": "тогда и только тогда, когда"
#let iff = math.limits(sym.arrow.l.r.double.long)
#let imply = math.limits(sym.arrow.r.double.long)
#let since = math.limits(sym.arrow.l.double.long)
#let empty = sym.nothing.rev
#let maps = sym.arrow.long.bar
#let sim = sym.zws + sym.tilde + sym.zws
#let pc = sym.plus.circle
#let nsubg = sym.triangle.l.small
#let iso = sym.tilde.equiv
#let fg = sym.slash
#let usb = sym.union.sq.big
#let eps = sym.epsilon
#let Union = sym.union.big
#let UnionSq = sym.union.sq.big
#let Sect = sym.sect.big
#let cj = math.overline
#let limsup = $limits(overline(lim))$
#let liminf = $limits(underline(lim))$
#let INT(f, E: $E$, mu: $mu$) = $ integral_#E #f dif #mu $
#let newline(cont) = $#cont \ #cont$
#let neg(x) = math.overline(x)
#let Exp = math.op("Exp")
#let Bern = math.op("Bern")
#let Geom = math.op("Geom")
// set fix
#let nothing = math.diameter
#let emptyset = math.diameter
// operators
#let sgn = math.op("sgn")
#let Inv = math.op("Inv")
#let Det = math.op("det")
#let lcm = math.op("lcm")
#let diag = math.op("diag")
#let sgn = math.op("sgn")
#let Lin = math.op("Lin")
#let Dim = math.op("dim")
#let id = math.op("id")
#let GL = math.op("GL")
#let Hom = math.op("Hom")
#let Ker = math.op("Ker")
#let Re = math.op("Re")
#let Im = math.op("Im")
#let Id = math.op("Id")
#let End = math.op("End")
#let rk = math.op("rk")
#let Tr = math.op("Tr")
#let ord = math.op("ord")
#let St = math.op("St")
#let char = math.op("char")
#let Irr = math.op("Irr")
#let arg = math.op("arg")
#let Int = math.op("Int")
#let Cl = math.op("Cl")
#let const = math.op("const")
#let Ind = math.op("Ind")
#let Ln = math.op("Ln")
#let Arg = math.op("Arg")
#let cov = math.op("cov")
// letters
#let Aa = math.cal($A$)
#let Bb = math.cal($B$)
#let Cc = math.cal($C$)
#let Dd = math.cal($D$)
#let Ee = math.cal($E$)
#let Ff = math.cal($F$)
#let Gg = math.cal($G$)
#let Hh = math.cal($H$)
#let Ii = math.cal($I$)
#let Jj = math.cal($J$)
#let Kk = math.cal($K$)
#let Ll = math.cal($L$)
#let Mm = math.cal($M$)
#let Nn = math.cal($N$)
#let Oo = math.cal($O$)
#let Pp = math.cal($P$)
#let Qq = math.cal($Q$)
#let Rr = math.cal($R$)
#let Ss = math.cal($S$)
#let Tt = math.cal($T$)
#let Uu = math.cal($U$)
#let Vv = math.cal($V$)
#let Ww = math.cal($W$)
#let Xx = math.cal($X$)
#let Yy = math.cal($Y$)
#let Zz = math.cal($Z$)
// functions
#let dotp(x, y) = $lr(angle.l #x, #y angle.r)$
#let hat(x) = math.accent(x, "^")
// amogus
#let amogus = symbol(
"ඞ",
("adult", "ඞ"),
("tween", "ඩ"),
("toddler", "ධ"),
("baby", "ව"),
)
|
|
https://github.com/r8vnhill/keen-manual | https://raw.githubusercontent.com/r8vnhill/keen-manual/main/manual.typ | typst | BSD 2-Clause "Simplified" License | #import "template.typ": *
// Take a look at the file `template.typ` in the file panel
// to customize this template and discover how it works.
#show: project.with(
title: "Evolutionary Algorithms with Keen",
authors: (
(name: "<NAME>", email: "<EMAIL>"),
),
logo: "TransparentBg.png",
)
#show raw.where(lang: "kt"): it => [
#show regex("\b(genotypeOf)\b") : keyword => text(fill: blue, keyword)
#show regex("\b(chromosomeOf)\b") : keyword => text(fill: purple, keyword)
#show regex("\b(booleans)\b") : keyword => text(fill: olive, keyword)
#show regex("\b(BooleanGene|Genotype)\b"): keyword => text(fill: red, keyword)
#it
]
#set quote(block: true)
#set heading(depth: 1)
#outline(depth: 2, indent: true)
#include "installation.typ"
#include "omp/omp_main.typ"
|
https://github.com/jakobjpeters/Typstry.jl | https://raw.githubusercontent.com/jakobjpeters/Typstry.jl/main/docs/source/guides/the_julia_to_typst_interface.md | markdown | MIT License |
# The Julia to Typst Interface
This guide illustrates how to implement Typst formatting for custom types.
## Setup
```jldoctest 1
julia> import Base: show
julia> import Typstry: context, show_typst
julia> using Typstry
```
## Implementation
Consider this custom type.
```jldoctest 1
julia> struct Reciprocal{N <: Number}
n::N
end
```
Implement a [`show_typst`](@ref) method to specify its Typst formatting. Remember to
[Annotate values taken from untyped locations](https://docs.julialang.org/en/v1/manual/performance-tips/#Annotate-values-taken-from-untyped-locations).
```jldoctest 1
julia> show_typst(io, r::Reciprocal) =
if io[:mode]::Mode == markup
print(io, "#let reciprocal(n) = \$1 / #n\$")
else
print(io, "reciprocal(")
show(io, MIME"text/typst"(), Typst(round(r.n; digits = io[:digits]::Int)))
print(io, ")")
end;
```
Although custom formatting may be handled in `show_typst` with `get(io, key, default)`,
this may be repetitive when specifying defaults for multiple methods.
There is also no way to tell if the value has been
specified by the user or if it is a default.
Instead, implement a custom [`context`](@ref) which overrides default,
but not user specifications.
```jldoctest 1
julia> context(::Reciprocal) = Dict(:digits => 2);
```
Those two methods are a complete implementation of the Julia to Typst interface.
The following method is optional:
```jldoctest 1
julia> show(io::IO, m::MIME"text/typst", r::Reciprocal) = show(io, m, Typst(r));
```
Now, a `Reciprocal` is fully supported by Typstry.jl.
```jldoctest 1
julia> r = Reciprocal(π);
julia> println(TypstString(r))
#let reciprocal(n) = $1 / #n$
julia> println(TypstString(r; mode = math))
reciprocal(3.14)
julia> println(TypstString(r; mode = math, digits = 4))
reciprocal(3.1416)
```
## Guidelines
While implementing the interface only requires two methods,
it may be more challenging to determine how a Julia value should be
represented in a Typst source file and its corresponding compiled document.
Julia and Typst are distinct languages that differ in both syntax and semantics,
so there may be multiple meaningful formats to choose from.
### Make the obvious choice, if available
- There is a clear correspondence between these Julia and Typst values
```jldoctest 1
julia> println(TypstString(1))
$1$
julia> println(TypstString(nothing))
#none
julia> println(TypstString(r"[a-z]"))
#regex("[a-z]")
```
### Choose the most semantically rich representation
- This may vary across `Mode`s and domains
- Some modes may not have a meaningful representation, and should be formatted into a mode that does
- Both Julia and Typst support Unicode characters, except unknown variables in Typst's `code` mode
```jldoctest 1
julia> println(TypstString(π; mode = code))
3.141592653589793
julia> println(TypstString(π; mode = math))
π
julia> println(TypstString(π; mode = markup))
$π$
```
### Consider both the Typst source text and compiled document formatting
- A `Docs.Text` is documented to "render [its value] as plain text", and therefore corresponds to text in a rendered Typst document
- A `TypstString` represents Typst source text, and is printed directly
```jldoctest 1
julia> println(TypstString(text"[\"a\"]"))
#"[\"a\"]"
julia> println(TypstString(typst"[\"a\"]"))
["a"]
```
### Try to generate valid Typst source text
- A `TypstString` represents Typst source text, which may be invalid
- A `UnitRange{Int}` is formatted differently for each `Mode`, but is always valid
```jldoctest 1
julia> println(TypstString(1:4; mode = code))
range(1, 5)
julia> println(TypstString(1:4; mode = math))
vec(
1, 2, 3, 4
)
julia> println(TypstString(1:4; mode = markup))
$vec(
1, 2, 3, 4
)$
```
### Test for edge cases
- `1 / 2` may be ambiguous in `code` and `math` mode expressions
- `$1 / 2$` is not ambiguous in `markup` mode
```jldoctest 1
julia> println(TypstString(1 // 2; mode = code))
(1 / 2)
julia> println(TypstString(1 // 2; mode = math))
(1 / 2)
julia> println(TypstString(1 // 2; mode = markup))
$1 / 2$
```
### Format values in containers using `show` with the `text/typst` MIME type
- Values may require their `context`
- The `AbstractVector` method
- Encloses source text in dollar signs, so it changes its `Mode` to `math`
- Formats its elements with an indent, so it increments its `depth`
```jldoctest 1
julia> println(TypstString([true, Any[1, 1.2]]))
$vec(
"true", vec(
1, 1.2
)
)$
```
### Check parametric and abstract types
- Related Julia types may not be representable in the same Typst format
```jldoctest 1
julia> println(TypstString(0:2:6; mode = code))
range(0, 7, step: 2)
julia> println(TypstString(0:2.0:6; mode = code))
(0.0, 2.0, 4.0, 6.0)
```
|
https://github.com/phinixplus/docs | https://raw.githubusercontent.com/phinixplus/docs/master/source/cpu/regfile.typ | typst | Other | #let regfile = [
#import "/source/utilities.typ": note, comment
= Register Files
A register file is a grouping of individually addressable memory cells, also
known as registers, that are closely coupled with the operation and structure
of a processor architecture. PHINIX+ defines three of these register files,
each with a slightly different purpose. The first two of the novel features
outlined in @table-novelfeatures are thus explained in detail in this chapter.
#note[
Registers are the most common subjects (also referred to as operands) of
the instructions a CPU executes, especially if it follows the load-store
paradigm wherein, as a consequence, no arithmetic/logic instruction can
directly operate on memory.
]
== General Purpose Registers
The general purpose registers are a pair of register files, each file aimed
towards a narrower set of use cases. This is the first novel feature of
PHINIX+, initially noted in the first entry of @table-novelfeatures. Each file
consists of 16 registers for a total of 32 registers. The two register files
are to be detailed in the following two subsections and an extra subsection
listing out the registers in table form.
#comment[
The quantity of general purpose registers PHINIX+ provides, 32, is a
commonality amongst the majority of RISC processor architectures,
consistently more than the usual CISC architecture. The reason for this
is that having instructions that do less work would necessitate more
space to store intermediate values in.
]
=== Data Registers
The data registers are the most versatile set of registers available. 16
are provided, each one being 32 bits in width, denoted #emph[\$xN] (where
N is a single hexadecimal digit ranging from 0 to 9 and then from A to F).
They are intended to hold values loaded from or to be stored to memory and
to be the subject (operand) of most arithmetic and logic operations the CPU
performs.
#note[
It is important to note that the first data register, #emph[\$x0] is not
actually a register one can write to. Register #emph[\$x0] is a constant
holding the value zero. This is a nigh universal design pattern across
the RISC processor family and has been included in PHINIX+ for the same
reasons.
]
#comment[
Data register #emph[\$x0] being a constant zero aids in better usage of
existing instructions (or conversely allows for the reduction of the needed
instructions) in at least the following two ways:
- #[By allowing them to discard their result by storing to this
register when only the condition code generated from that instruction
is required. For example, a comparison instruction can be achieved by
doing a subtraction and storing the result to register zero.]
- #[By allowing instructions that address memory to coalesce under a
single addressing mode, "reg + imm". This is such because when
supplying #emph[\$x0] as the register, the effective address becomes
just the immediate, and by supplying a zero immediate, the effective
address becomes the supplied register. #footnote[Addressing modes as
they relate to PHINIX+ are discussed in TODO.]]
]
=== Address Registers
The address registers are a secondary set of registers that are less versatile
computation-wise than the data registers. Just like the data registers, 16 are
provided, again, each one being 32 bits in width, denoted instead #emph[\$yN]
(where N is a single hexadecimal digit, ranging from 0 to 9 and then
from A to F).
While still having more available functionality than for just storing and
manipulating pointers, the address registers' primary purpose is nevertheless
for the aforementioned task or for serving as a bank of secondary storage
when the data registers are not enough to hold all datums of a computation.
#pagebreak()
=== Data and Address Register Tables
The previously discussed general purpose registers are hereby illustrated in
table form.
#figure(grid(columns: 2, gutter: 1em,
table(columns: (40mm),
table.header([Data Registers]),
[\$x0], [\$x1], [\$x2], [\$x3], [\$x4], [\$x5], [\$x6], [\$x7],
[\$x8], [\$x9], [\$xA], [\$xB], [\$xC], [\$xD], [\$xE], [\$xF],
),
table(columns: (40mm),
table.header([Address Registers]),
[\$y0], [\$y1], [\$y2], [\$y3], [\$y4], [\$y5], [\$y6], [\$y7],
[\$y8], [\$y9], [\$yA], [\$yB], [\$yC], [\$yD], [\$yE], [\$yF]
)
), caption: [PHINIX+'s general purpose registers]) <table-generalregs>
== Condition Code Registers
The condition code registers, or "flag" registers, are a collection of 8
registers each one being only 1 bit in width. Their purpose is, as the name
suggests, to hold intermediate condition codes for the purpose of program
control flow. Branch instructions, which are later discussed, are intimately
tied with this set of registers. #footnote[Branch instructions and how they
relate to the condition code registers are discussed in TODO.] This is the
second novel feature of PHINIX+, initially noted in the second entry of
@table-novelfeatures. They are denoted #emph[\$cN] (where N is a single octal
digit, ranging from 0 to 7).
#figure(table(columns: 1,
table.header([Condition Code Registers]),
[\$c0], [\$c1], [\$c2], [\$c3], [\$c4], [\$c5], [\$c6], [\$c7],
), caption: [PHINIX+'s condition code registers]) <table-condregs>
#comment[
Condition code register #emph[\$c0] is a register that constantly and
forever holds a zero bit. This simplifies the program logic needed to handle
a cascading set of conditions and reduces the amount of instructions
required to handle all the needed cases.
]
== Register Calling Convention
The tables of registers previously showcased contained just one column, which
lists the "architectural names" of the registers. They denote the systematic
name given to the register for the purposes of a hardware point-of-view. An
implementer doesn't care how the registers are used because they are all generic
so they get generic architectural names.
In this section the registers are re-examined, this time concerning a software
point-of-view instead. In contrast with the implementer, a programmer needs to
organize the registers given to them in a consistent manner in order to ensure
proper behavior when calling into subroutines, thus follow a #emph[convention]
for #emph[calling], a pre-agreed set of rules to ensure compatibility between
interacting subroutines.
To assist in this endeavour, this document provides a reference, standard
calling convention that any software written for the processor is advised to
use, such that software written by different developers can interoperate. A
calling convention's whole purpose is to provide a common ground for software
development, so that someones's code is able to use someone else's.
#comment[
Developers are free to come up with an alternative convention to better
suit their needs, it just then falls unto them to interface with other
existing software which is not compatible with their custom convention.
]
=== The Concept of Saving
In the act of a subroutine call, there are two hypothetical entities at play:
the code performing the call--referred to as the caller, and the code being
called--referred to as the callee. Using these roles as a framework for a
calling convention yields the simplest form of segregation based on whose duty
it is to "clean up" the register, revert its contents to a known-good state.
Thus, each register can be assigned to either be saved by the caller or by the
callee. The PHINIX+ calling convention bases its design on this principle.
In practice, what it means for a register to be caller-saved is that the code
being called has no obligation to maintain the contents of the register to
its initial value. After the subroutine returns, the caller has to assume that
all of the caller-saved registers now contain garbage values. Thus, it's the
duty of the caller to preserve the values of registers it wants to continue
using after the subroutine call.
#comment[
There are mainly two ways the caller can preserve the value of a
caller-saved register that it wants to keep using after the subroutine
returns. Those are:
- #[Exploiting the stack by pushing the value of the register
onto its stack frame. #footnote[The stack is a concept that is
explained in detail in TODO.] <footnote-stack>]
- #[Moving the value of the register onto another, callee-saved
register that had previously been saved itself.]
]
Likewise, what it means in practice for a register to be callee-saved is that
the code doing the call expects that the value of that register remains the same
after the return of the subroutine without it having to do anything. Thus, it's
the duty of the code being called, the callee, to preserve the value in some
way before using it and then revert the register to the old value before
returning.
#comment[
There are, again, mainly two ways the callee can preserve the value of a
callee-saved register that it later intends to use. Those are:
- #[Exploiting the stack by pushing the value of the register
onto its stack frame. #footnote(<footnote-stack>)]
- #[Moving the value of the register onto another, caller-saved register.]
]
=== Convention Tables Glossary
The three tables to follow, @table-dataregs-conv, @table-addrregs-conv, and
@table-condregs-conv, contain a compact description of how they are intended
to be used by the software developer. The terms used in these tables are hereby
explained.
The headers of the tables contain these four entries with the exception of the
condition code registers table, which contains only the first and last entries:
- #[#emph[Architectural Name] is the one column of the original table.]
- #[#emph[Convention Name] lists out the name that a programmer will use
inside their assembly language development environment to refer to the
specific register. It is an abbreviated form of the register's intended
purpose.]
- #[#emph[Description] expands on the #emph[Convention Name] by listing out
in full the intended purpose of the specific register. Not every register
has a unique purpose, so smaller groupings of registers with a common
purpose are additionally numbered.]
- #[#emph[Saving] lists out the assigned saving of the specific register,
either caller-save or callee-save (in the cases where it applies to do so).
The implications for each of the two savings are explained in the previous
sub-section.]
=== Data Register Convention
The following table is an expanded version of the before shown table of data
registers (left half of @table-generalregs). Three additional columns have been
added in order to detail the proposed standard calling convention for the data
register file.
#figure(table(columns: 4,
table.header([Architectural Name], [Convention Name], [Description], [Saving]),
[\$x0], [\$zr], [Constant Zero], [N/A],
[\$x1], [\$at], [Assembler Temporary], [Caller],
[\$x2], [\$rp], [Return Pointer], [Caller],
[\$x3], [\$t0], [Temporary Value \#0], [Caller],
[\$x4], [\$t1], [Temporary Value \#1], [Caller],
[\$x5], [\$t2], [Temporary Value \#2], [Caller],
[\$x6], [\$a0], [Subroutine Argument \#0], [Caller],
[\$x7], [\$a1], [Subroutine Argument \#1], [Caller],
[\$x8], [\$a2], [Subroutine Argument \#2], [Caller],
[\$x9], [\$s0], [Saved Value \#0], [Callee],
[\$xA], [\$s1], [Saved Value \#1], [Callee],
[\$xB], [\$s2], [Saved Value \#2], [Callee],
[\$xC], [\$s3], [Saved Value \#3], [Callee],
[\$xD], [\$s4], [Saved Value \#4], [Callee],
[\$xE], [\$s5], [Saved Value \#5], [Callee],
[\$xF], [\$fp], [Frame Pointer], [Callee]
), caption: [PHINIX+'s data registers]) <table-dataregs-conv>
=== Address Register Convention
The following table is an expanded version of the before shown table of address
registers (right half of @table-generalregs). Three additional columns have been
added in order to detail the proposed standard calling convention for the
address register file in the same manner as before.
#figure(table(columns: 4,
table.header([Architectural Name], [Convention Name], [Description], [Saving]),
[\$y0], [\$a3], [Subroutine Argument \#3], [Caller],
[\$y1], [\$a4], [Subroutine Argument \#4], [Caller],
[\$y2], [\$a5], [Subroutine Argument \#5], [Caller],
[\$y3], [\$t3], [Temporary Value \#3], [Caller],
[\$y4], [\$t4], [Temporary Value \#4], [Caller],
[\$y5], [\$t5], [Temporary Value \#5], [Caller],
[\$y6], [\$t6], [Temporary Value \#6], [Caller],
[\$y7], [\$t7], [Temporary Value \#7], [Caller],
[\$y8], [\$s6], [Saved Value \#6], [Callee],
[\$y9], [\$s7], [Saved Value \#7], [Callee],
[\$yA], [\$gp], [Globals Pointer], [Callee],
[\$yB], [\$sp], [User Stack Pointer], [Callee],
[\$yC], [\$k0], [System Reserved \#0], [Callee],
[\$yD], [\$k1], [System Reserved \#1], [Callee],
[\$yE], [\$k2], [System Reserved \#2], [Callee],
[\$yF], [\$kp], [System Stack Pointer], [Callee]
), caption: [PHINIX+'s address registers]) <table-addrregs-conv>
#note[
The last four registers in the address register file are "privileged".
That means that they are accessible only when the processor is in a special
mode of operation reserved for managerial code, like an operating system's
kernel. #footnote[PHINIX+'s privileged execution mode is explained in detail
in TODO.] When not in this mode, the registers act nominally the same as the
data register #emph[\$zr]. An implementer may, however, choose to have the
processor react on such a violating access by alerting the privileged code
of such an action. #footnote[Such an alert would constitute an interrupt.
Interrupts are explained in detail in TODO.]
]
=== Condition Code Register Convention
The following table is an expanded version of the before shown table of
condition code registers (@table-condregs). Only one additional column has been
added in order to detail the proposed standard calling convention for the
condition code register file. Only a saving convention has been assigned to
this file in contrast to the other two, due the nature of the file. All of the
registers are in the same and only grouping with callee saving.
#figure(table(columns: 2,
table.header([Architectural Name], [Saving]),
[\$c0], [N/A],
[\$c1], [Callee],
[\$c2], [Callee],
[\$c3], [Callee],
[\$c4], [Callee],
[\$c5], [Callee],
[\$c6], [Callee],
[\$c7], [Callee]
), caption: [PHINIX+'s condition code registers]) <table-condregs-conv>
#comment[
Having eight condition code registers, each one being 1 bit in width, means
that saving and restoring the contents can be done in one fell swoop. The
entire register file's contents can fit into a single byte. To aid in this
mechanic, all of the registers have the same saving (with the exception of
#emph[\$c0], on which saving is not applicable due to its constant nature).
]
]
|
https://github.com/SkytAsul/INSA-Typst-Template | https://raw.githubusercontent.com/SkytAsul/INSA-Typst-Template/main/insa-template/letter-template.typ | typst | MIT License | #let heading-fonts = ("League Spartan", "Arial", "Liberation Sans")
#let normal-fonts = ("Source Serif", "Source Serif 4", "Georgia")
#let insa-letter(
author : none,
insa : "rennes",
date : datetime.today(),
footer: none,
doc
) = {
set text(lang: "fr", font: normal-fonts)
set page(
"a4",
margin: (top: 3.2cm, x: 2.5cm),
header-ascent: 0.71cm,
header: [
#place(left, image("assets/"+insa+"/logo.png", height: 1.28cm), dy: 1.25cm)
#place(right + bottom)[
#author\
#if type(date) == datetime [
#date.display("[day]/[month]/[year]")
] else [
#date
]
]
],
footer: context {
place(
right + bottom,
dx: page.margin.at("right") - 0.6cm,
dy: -0.6cm,
box(width: 2.34cm, height: 2.34cm, image("assets/footer.png"))
)
if counter(page).final() != (1,) { place(
right + bottom,
dx: page.margin.at("right") - 0.6cm,
dy: -0.6cm,
box(width: 1.15cm, height: 1.15cm, align(center + horizon, text(fill: white, weight: "bold", counter(page).display())))
) }
footer
}
)
show heading: set text(font: heading-fonts)
doc
}
|
https://github.com/piepert/grape-suite | https://raw.githubusercontent.com/piepert/grape-suite/main/src/colors.typ | typst | MIT License | #let blue = rgb("#648fff")
#let purple = rgb("#555ef0").darken(50%)
#let magenta = rgb("#dc267f")
#let brown = rgb("#fe6100").darken(50%)
#let yellow = rgb("#ffb000")
#let orange = none
#let red = none
#let green = none |
https://github.com/maxgraw/bachelor | https://raw.githubusercontent.com/maxgraw/bachelor/main/apps/document/src/8-appendix/task.typ | typst | === Einleitung
Stellen Sie sich vor, Sie möchten ein Zimmer Ihrer Wahl mit einem neuen Schrank einrichten und sind auf das modulare Möbelsystem „StackCubes“ gestoßen. Sie sind sich jedoch unsicher, ob dieses System Ihren Anforderungen entspricht und ob es in Ihr Zimmer passt. Im Folgenden wird Ihnen eine Applikation vorgestellt, die es ermöglicht, mittels Augmented Reality das Möbelsystem in Ihrem eigenen Zimmer zu erstellen und zu betrachten.
*Identifikationsnummer:*
#linebreak()
Um die Anonymität zu gewährleisten, erstellen Sie bitte eine Identifikationsnummer, welche genutzt werden kann, um das Löschen Ihrer Daten zu ermöglichen.
#pagebreak()
=== Informationen
Die Applikation finden Sie unter ar.maxgraw.com.
Bitte öffnen Sie die Applikation mit einem Android-Mobilgerät, da iOS-Geräte nicht unterstützt werden.
#pagebreak()
=== Aufgabe 1
In der ersten Aufgabe sollen Sie sich zunächst mit dem System und Ablauf vertraut machen. Folgen Sie dafür den folgenden Schritten:
1. Starten Sie die Anwendung über den Button "Start AR" am unteren Bildschirmrand.
2. Stehen Sie auf und richten Sie die Kamera Ihres Smartphones auf eine freie, ebene Stelle im Raum.
3. Folgen Sie den Anweisungen auf dem Bildschirm, bis der graue Kreis auf dem Boden angezeigt wird.
4. Tippen Sie auf den Bildschirm, um das Menü zur Auswahl der Möbel zu öffnen.
5. Wählen Sie aus der Liste der verfügbaren Möbelstücke das 2x1-Regal aus.
6. Betrachten Sie das platzierte Möbelstück von allen Seiten.
*Nachdem alle Schritte erfüllt wurden, bereiten Sie die Anwendung für die nächste Aufgabe vor:*
1. Schließen Sie die Ansicht über das Kreuz am oberen rechten Rand der Anwendung.
2. Laden Sie die Seite der Anwendung unter ar.maxgraw.com neu, indem Sie vom oberen Rand der Seite nach unten ziehen.
#pagebreak()
=== Aufgabe 2
In der zweiten Aufgabe sollen Sie sich mit der Aktion des Löschens vertraut machen:
1. Starten Sie die Anwendung über den Button "Start AR" am unteren Bildschirmrand.
2. Stehen Sie auf und richten Sie die Kamera Ihres Smartphones auf eine freie, ebene Stelle im Raum.
3. Folgen Sie den Anweisungen auf dem Bildschirm, bis der graue Kreis auf dem Boden angezeigt wird.
4. Tippen Sie auf den Bildschirm, um das Menü zur Auswahl der Möbel zu öffnen.
5. Wählen Sie aus der Liste der verfügbaren Möbelstücke das 1x1-Regal aus.
6. Tippen Sie auf das platzierte Möbelstück.
7. Tippen Sie auf den Button "Löschen".
*Nachdem alle Schritte erfüllt wurden, bereiten Sie die Anwendung für die nächste Aufgabe vor:*
1. Schließen Sie die Ansicht über das Kreuz am oberen rechten Rand der Anwendung.
2. Laden Sie die Seite der Anwendung unter ar.maxgraw.com neu, indem Sie vom oberen Rand der Seite nach unten ziehen.
#pagebreak()
=== Aufgabe 3
In der dritten Aufgabe sollen Sie eine komplexere Möbelstruktur erstellen. Folgen Sie dafür den folgenden Schritten:
1. Starten Sie die Anwendung über den Button "Start AR" am unteren Bildschirmrand.
2. Stehen Sie auf und richten Sie die Kamera Ihres Smartphones auf eine freie, ebene Stelle im Raum.
3. Folgen Sie den Anweisungen auf dem Bildschirm, bis der graue Kreis auf dem Boden angezeigt wird.
4. Tippen Sie auf den Bildschirm, um das Menü zur Auswahl der Möbel zu öffnen.
5. Wählen Sie aus der Liste der verfügbaren Möbelstücke das 1x1-Regal aus.
6. Tippen Sie auf das platzierte Möbelstück.
7. Tippen Sie auf eines der grünen Kreuze, die um das ausgewählte Möbelstück erscheinen.
8. Wählen Sie im Menü ein weiteres Möbelstück aus.
9. Fügen Sie ein weitere Möbelstücke an das System an. Wählen Sie dafür ein Möbelstück aus und führen Sie Schritt 7 erneut aus.
10. Betrachten Sie das platzierte Möbelkonstrukt von allen Seiten.
#pagebreak()
=== Allgemeine Fragen
Alter
Geschlecht
Haben Sie bereits Erfahrung mit Augmented Reality?
Welches Handymodell benutzen Sie?
#pagebreak()
=== Fragebogen
Um das Produkt zu bewerten, füllen Sie bitte den nachfolgenden Fragebogen aus. Er besteht aus Gegensatzpaaren von Eigenschaften, die das Produkt haben kann. Abstufungen zwischen den Gegensätzen sind durch Kreise dargestellt. Durch Ankreuzen eines dieser Kreise können Sie Ihre Zustimmung zu einem Begriff äußern.
Entscheiden Sie möglichst spontan. Es ist wichtig, dass Sie nicht lange über die Begriffe nachdenken, damit Ihre unmittelbare Einschätzung zum Tragen kommt. Bitte kreuzen Sie immer eine Antwort an, auch wenn Sie bei der Einschätzung zu einem Begriffspaar unsicher sind oder finden, dass es nicht so gut zum Produkt passt. Es gibt keine „richtige“ oder „falsche“ Antwort. Ihre persönliche Meinung zählt!
#pagebreak()
=== UEQ Fragebogen
#image("../media/ueq.png", width: 100%) |
|
https://github.com/MrToWy/Bachelorarbeit | https://raw.githubusercontent.com/MrToWy/Bachelorarbeit/master/Diagrams/idea2.typ | typst | ```pintora
erDiagram
Module {
INT id PK
INT credits
INT hoursPresence
}
Module_Translation {
INT id PK
STRING name
STRING subtitle
STRING description
STRING exam
STRING learningOutcomes
INT languageId FK
INT moduleId FK
}
Module ||--o{ Module_Translation : "translations"
``` |
|
https://github.com/Kasci/LiturgicalBooks | https://raw.githubusercontent.com/Kasci/LiturgicalBooks/master/covers/oktoich/H12.typ | typst | #set text(font: "Monomakh Unicode", lang: "cu")
#set page(header: "", footer: "")
#import "/style.typ": *
#align(center)[#text(80pt)[#redText[☦]]]
#align(center)[#redText[
#text(60pt)[Ѻ҆КТѠ́ИХЪ]\ \
#text(20pt, black)[си́рѣчь]\ \
#text(30pt)[Ѻ҆СМОГЛА́СНИКЪ]\ \
#text(50pt)[гла́сы а҃‑в҃]\
]]\ \ \
#align(center)[#text(20pt)[<NAME>\ Prešov\ 2024]]
|
|
https://github.com/86challenge/rules | https://raw.githubusercontent.com/86challenge/rules/main/rules.typ | typst | #import "template.typ": template, frontmatter, spacer
#show: template
#frontmatter(title: "2025 Rules", revision: "0.1")
#heading(outlined: false)[
Introduction
]
Welcome to 86 Challenge. Drivers of all skill levels are welcome and there is no charge to participate.
See #link("https://86challenge.us")[86challenge.us] for more information, including schedule, historical results, and forms for registering for GT Radial's Championship Cash Prize and tire contingency program.
Questions, corrections, and clarifications can be raised on our #link("https://www.facebook.com/groups/1009986312348877")[Facebook Group] or #link("https://discord.gg/sDKdV4TS4T")[Discord server].
#spacer
+ #heading[General Rules]
+ Chassis must be one of the following (hereafter referred to by its _Designation_):
#table(
columns: 2,
align: (left, center),
table.header(
[Chassis],
[Designation],
),
[
- 2012–2016 Scion FR-S
- 2012–2020 Subaru BRZ
- 2017–2020 Toyota 86
],
[Gen. 1],
[
- 2022+ Subaru BRZ
- 2022+ Toyota GR86
],
[Gen. 2],
)
+ *GT Radial Sponsorship*
+ Participation in GT Radial Championship Cash Prize and tire contingency are optional and require submitting #link("https://86challenge.us/documents")[registration forms] directly to GT Radial
+ Participants in the Championship Cash Prize and tire contingency must display 1 large GT Radial decal on the windshield and display 2 smaller GT Radial decals, one on each side of their car
+ *Timing*
+ The following timing methods are accepted, but transponder time will take priority if more than one time is available for a given lap:
#table(
columns: 2,
align: (left, center),
table.header(
[Timing Method],
[Time Penalty (s)],
),
[Official timing transponder], [0.0],
[Data logger with at least 10Hz GPS (e.g. Aim Solo 2)], [0.2],
[Data logger with at least 1Hz GPS (e.g. iPhone)], [0.5],
)
+ If a driver participates in multiple run groups, they must declare in advance which run group will be used for official timing. Please declare which run group will be used for timing on your event registration form if you sign up for multiple run groups.
+ If a driver participates in a dedicated 86 Challenge run group, their timing will be taken exclusively from that run group.
+ *Classes*
+ Cars will be classed according to rules and modification points listed in the following sections.
+ A car may be used in multiple classes if driven by different drivers. Please comment on your event registration form if you are co-driving a car and what group you are driving in.
+ Prior to a driver’s first event of the season, they may declare to run in a higher class than their modification points would normally dictate.
+ A driver shall not compete in more than one class during an event.
+ If a driver modifies their car mid-season and moves up or down a class, the points earned in the previous class will NOT be carried over to the new class. Drivers may choose to not earn points in the new class and keep the points in the previous class. Drivers participating in the GT Radial Championship Prize challenge cannot change classes.
+ *Scoring*
+ Championship points are awarded at the end of each event according to the chart below:
#table(
columns: 2,
align: (center, center),
table.header(
[Finish Position],
[Points],
),
[1st], [10],
[2nd], [7],
[3rd], [5],
[4th], [4],
[5th], [3],
[6th], [2],
[7th+], [1],
)
+ At the end of the season, the driver with the most points from 7 of the best scores out of 10 events wins the championship.
+ Ties will go to the competitor with the lowest time at the same configuration Thunderhill East regular event during the season. If there is still a tie, the participant with the fastest time at Laguna Seca regular event during the season will be awarded the win.
+ *In-season rule changes*
+ Changes which make the rules more _restrictive_ will be announced 60 days ahead of enforcement
+ Changes which make the rules more _permissive_ may be enforced immediately
+ All rule change will be announcemented and will include the date on which they are effective
+ #heading[GT Radial Stock Class]
_This class is designed to keep costs to a minimum and be attractive to drivers with nearly-stock vehicles. Participants are eligible for GT Radial Championship Cash Prizes and GT Radial Tire Contingency._
+ Vehicles in this class are only allowed performance modifications included in this section
+ In order to balance performance between Gen. 1 and Gen. 2 cars, a time handicap will be added to Gen. 2 lap times, according to the table below. For any tracks/configurations not listed below, the closest comparable track configuration will be used
#table(
columns: 2,
align: (left, center),
table.header(
[Track],
[Handicap Time (s)],
),
[Buttonwillow Raceway \#1], [2.5],
[Buttonwillow Raceway \#13], [2.0],
[Buttonwillow Raceway New Track], [1.5],
[Laguna Seca], [2.5],
[Sonoma Raceway], [2.5],
[Thunderhill 5-mile Bypass], [4.0],
[Thunderhill East], [2.5],
[Thunderhill West], [1.5],
)
+ *Engine*
+ Engine tuning must be unmodified
+ Oil coolers, accumulators, and/or sump changes are allowed
+ Gen. 2 intake charcoal filters shall be present and unaltered
+ *Alignment*
+ Front camber shall not exceed (be more negative than) -2.5 degrees
+ Front camber may be adjusted using only the following parts:
#table(
columns: 2,
align: (left, left),
table.header(
[Description],
[Allowed Parts],
),
[Crash bolt (i.e. 14mm lower bolt used in upper position)],
[
- Toyota SU003–02818
- Subaru 901000394
],
[Camber bolts (in either/both positions)],
[
- SPC 81280
- SPC 81305
- Whiteline KCA416
- Whiteline KCA414
- Eibach 5.81305K
],
[Front Strut Mounts],
[
- Pedders PED-580096
- Whiteline KCA335
],
[Camber plates], [- Any],
)
+ Rear camber with stock, unmodified, and undamaged components is not restricted
+ Rear camber with adjustable components shall not exceed -2.0 degrees
+ Rear camber may be adjusted using only the following parts:
#table(
columns: 2,
align: (left, left),
table.header(
[Description],
[Allowed Parts],
),
[OEM–style, adjustable, steel rear lower control arm],
[
- SPC 67660
- Moog RK100136
- Whiteline KTA216A
],
[Camber bolts], [- H&R TC112],
)
+ Toe angle may be adjusted without limitation
+ Cutting or bending of any vehicle sheet metal or suspension components to change alignment or ride height is not allowed
+ *Wheels*
+ Wheels must have a maximum width of 8.0 inches
+ Wheel spacers are allowed
+ *Tires*
+ Tires must be one of the following:
#table(
columns: 2,
align: (left, left),
table.header([Models], [Sizes]),
[GT Radial Champiro SX2 or HPY],
[
- 215/45R17
- 225/45R17
],
[Michelin Primacy HP (OEM)\*], [- 215/45R17],
[Michelin Pilot Sport 4 (Gen. 2 OEM)\*], [- 215/40R18],
)
+ OEM tires (marked with \*) are only allowed for a competitor’s first 2 events
+ Points earned with OEM Michelin tires will not be eligible toward GT Radial tire contingency nor toward GT Radial Championship Cash Prize
+ *Aero*
+ STI and TRD lips are not allowed
+ Gen. 1 BRZ tS wing is not allowed
+ Gen. 2 ducktails (OEM or equivalently-sized) are allowed
+ *Safety*
+ Any safety device may be used
+ Cars equipped with a full aftermarket safety system (bucket seats, racing harnesses, neck restraint system, full or half roll cage, etc.) are allowed, as long as total curb weight of the car with full tank of fuel remains above 2800 lbs without driver. The car will need to be weighed at a location approved by the Rules Board before it is allowed to enter Stock Class.
+ *Misc*
+ OEM-style, off-the-shelf, non-adjustable, and non-rebuildable dampers with unaltered valving are allowed
+ Braking upgrades are allowed
+ Replacing OEM parts with newer/older OEM parts is allowed (e.g.\ dampers, sway bars, etc).
+ No lightweight replacement parts are allowed (e.g.\ battery, lighter than OEM trunk) or removal of parts allowed.
+ Removal of spare tire, floor mats, and tools is allowed.
+ Non-OEM cat-back exhaust with a weight greater than 32 lbs is allowed (no removal of any catalyst)
+ #heading[GT Radial Street Class]
_This class allows moderate modification flexibility and balances competition between generations through Modification Points. Participants are eligible for GT Radial Championship Cash Prizes and GT Radial Tire Contingency._
+ Drivers are allotted 3 modification points according to #link(<mods>)[Modification Points]
+ Minimum weight is 2800 lbs without driver and with a full tank of gas
+ *Wheels*
+ Wheels must have a maximum width of 9.0 inches
+ *Tires*
+ Tires must be one of the following:
#table(
columns: 2,
align: (left, left),
table.header([Models], [Sizes]),
[GT Radial Champiro SX2 or HPY], [- 17” only with 245mm maximum width],
[Michelin Primacy HP (OEM)\*], [- 215/45R17],
[Michelin Pilot Sport 4 (Gen. 2 OEM)\*], [- 215/40R18],
)
+ OEM tires (marked with \*) are only allowed for a competitor’s first 2 events
+ Points earned with OEM Michelin tires will not be eligible toward GT Radial tire contingency nor toward GT Radial Championship Cash Prize
+ #heading[Touring Class]
_This class allows for significant modification freedom and times are adjusted based on Modification Points. Participants are eligible for GT Radial Tire Contingency._
+ Drivers are allotted 10 modification points according to #link(<mods>)[Modification Points]
+ Maximum tire width: 255 mm
+ Minimum weight is 2750 lbs without driver and with a full tank of gas
+ Event standings will be determined by _Adjusted Track Time_, as defined below:
$
"Adjustment Factor" =
"Gen. 1 Stock record at event track" / "Gen. 1 Stock record at Thunderhill East Bypass"
$
$
"Touring Handicap" = "Modification Points" x "Adjustment Factor"
$
$
"Adjusted Track Time" =
"Raw Time" + "Touring Handicap"
$
+ You can use the following table to find the _Touring Handicap_ given Modification Points and a track:
#let mod_points = range(-2, 21, step: 1).map(n => n / 2)
#let adjustment_factors = csv("adjustment_factor.csv")
#show table.cell.where(x: 0): strong
#show table: set text(size: 8pt)
#table(
columns: (auto, ..mod_points.map(point => 1fr)),
align: (left, ..mod_points.map(point => center)),
table.header([Track/Points], ..mod_points.map(point => str(point))),
..for (track, factor) in adjustment_factors {
(
[#track],
..mod_points.map(points => str(
calc.round(
points * float(factor),
digits: 1,
),
)),
)
}
)
+ #heading[Unlimited Class]
_This class allows unconstrained builds to achieve the ultimate 86 lap times. For people who don’t like conforming to rules!_
+ Anything goes!
+ #heading[Modification Points] <mods>
This section applies only to GT Radial Street Class and Touring Class.
#set align(center)
#set table(columns: (60pt, 1fr), align: (center, left))
== Chassis
#table(
table.header([Points], [Chassis]),
[0], [Gen. 1 (2013–2020 model years)],
[2], [Gen. 2 (2022+ model years)],
)
== Tires
Any tire not on this list will automatically be 6pts
#table(
table.header([Points], [Tires]),
[-1], [
- 17" GT Radial SX2
- 17" GT Radial HPY
- 17" Michelin Primacy HP
],
[0], [
- Michelin Pilot Sport 4 (OEM size only 215/40R18)
- Yokohama Advan Apex V601
],
[1], [
- 18" GT Radial SX2
- GT Radial SX2 RS
],
[2], [
- Continental ExtremeContact Sport (ECS)
- Dunlop Direzza ZII Star Spec
- Dunlop Direzza ZIII
- Falken Azenis RT615K
- Federal 595 RS-RR
- Goodyear Eagle F1 Supercar 3
- Hankook Ventus RS-4
- Kumho Ecsta V730
- Maxxis VR-1
- Michelin Pilot Sport 4S
- Michelin Pilot Sport Cup 2 (180TW)
- Michelin Pilot Sport Cup 2 Connect
- Nankang NS-2R
- Nexen Nfera SUR4
- Nitto NT01
- Nitto NT05
- Toyo Proxes R888
- Yokohama Advan AD08R
],
[3], [
- BFGoodrich g-Force Rival S
- Bridgestone Potenza RE71R
- Bridgestone Potenza RE71RS
- Continental ExtremeContact Force (ECF)
- Falken Azenis RT660
- Goodyear Eagle F1 Supercar 3R
- Maxxis Victra RC-1
- Maxxis Victra VR2
- Nankang AR-1
- Nankang CR-1
- Nexen N’Fera SUR4G
- Yokohama Advan A052
],
[5], [Treadwear 40–99],
[6], [Racing slicks or any other unlisted tire],
)
#pagebreak()
== Engine & Drivetrain
#table(
table.header([Points], [Modifications]),
[0.5], [Gen. 1 Aftermarket intake, header, front-pipe, overpipe, and/or any removal of OEM catalytic converter],
[0.5], [Gen. 1 NA tune for ≤ 93 (R+M)/2 octane rating (non-OEM)],
[1.5], [Gen. 1 NA tune for >15% ethanol content or gasoline >93 (R+M)/2 octane rating],
[0.5], [Gen. 2 Aftermarket intake, header, front-pipe, overpipe, and/or any removal of OEM catalytic converter],
[1], [Gen. 2 NA tune for ≤ 93 (R+M)/2 octane rating (non-OEM)],
[1.5], [Gen. 2 NA tune for >15% ethanol content or gasoline >93 (R+M)/2 octane rating],
[5], [Forced induction and tune for ≤ 93 (R+M)/2 octane rating],
[2], [Forced induction tuned for >15% ethanol content or gasoline >93 (R+M)/2 octane rating],
[0.5], [Lighter than OEM rotational inertia flywheel, clutch, and/or driveshaft],
[0.5], [Non-OEM limited slip differential],
[1], [Non-OEM Transmission],
[2], [FA20 to FA24 engine swap],
[X], [Any other engine swap or hybrid system (contact Rules Board for individual engine swap classing)],
)
== Front Aero
#table(
table.header([Points], [Modifications]),
[0], [Small Front Aero: STI lip, TRD lip, or any device extending \<50mm from bumper],
[1], [Medium Front Aero: Device extending 50-74mm outward/rearward from bumper],
[1.5], [Large Front Aero: Device extending 75-124mm outward/rearward from bumper],
[3], [Unlimited Front Aero: Device extending >125mm from bumper],
[0.5], [Canards/dive planes, hood vents, fender vents, or vertical deviation on splitter],
)
== Rear Aero
#table(
table.header([Points], [Modifications]),
[0], [Really Small Rear Aero: OEM rear spoilers, OEM wings, aftermarket \<60mm height],
[0.5], [Small Rear Aero: Aftermarket rear spoiler >60mm height],
[1], [Medium Rear Aero: Gen. 1 BRZ tS wing, SARD LSR, wings with max width of 1400mm],
[1.5], [Large Rear Aero: APR GTC-200, Voltex Type 12, wings with max width of 1460mm],
[2], [Unlimited Rear Aero: Wings wider than 1460mm, higher than 245mm, or chord length >250mm],
[0.5], [Any rear diffuser extending in front of the rear axle or behind the rear bumper],
)
== Suspension
#table(
table.header([Points], [Modifications]),
[0], [OEM-style, non-adjustable dampers],
[0.5], [Non-OEM springs],
[0.5], [Adjustable dampers or modification to damper valving],
[0.5], [Remote reservoirs],
)
== Weight Reduction
#table(
table.header([Points], [Modifications]),
[1], [Removal of any interior trim pieces or seating not required for installation of a roll cage],
)
== Free Modifications
#table(
table.header([Points], [Modifications]),
[0], [
- Any in-cabin safety equipment
- Any interior removal as needed to accommodate safety equipment
- Any replacement drop-in air filter for OEM intake
- Gen. 2 charcoal airbox filter removal
- Intake resonator removal
- Aftermarket radiator or oil-cooler
- Aftermarket calipers, rotors, pads or brake ducting
- Any interior removal as needed to accommodate safety equipment
- Any alignment bolt/kit, bump steer kit, tie rod end, roll center adjuster
- Any spherical bushings including strut top mounts
- Any bolt-on chassis bracing
- Any oil-catch can, oil baffle pan
- Any transmission or diff cooling system
- Any lightweight battery
- Any cat-back (no removal of any catalyst) exhaust
]
)
== Prohibited Modifications
#table(
table.header([Points], [Modifications]),
[∞], [
- Active aero devices
- Active dampers
- Aftermarket ABS or TCS, or modified driver assist firmware
- Air suspension
- Any advanced driver-assistance systems (ADAS) system
- Any non-aluminum or steel wheels (e.g. carbon fiber, magnesium, titanium)
- Any non-pump-gasoline or non-ethanol/gasoline fuel
- Cutting, removing, or reshaping exterior parts not required for pointed modifications
- Changing suspension type (e.g. double A-arm, inboard shocks/springs) is prohibited
],
)
|
|
https://github.com/soul667/typst | https://raw.githubusercontent.com/soul667/typst/main/PPT/typst-slides-fudan/themes/polylux/book/src/dynamic/syntax.md | markdown | # General syntax for `#only` and `#uncover`
Both functions are used in the same way.
They each take two positional arguments, the first is a description of the
subslides the content is supposed to be shown on, the second is the content itself.
Note that Typst provides some syntactic sugar for trailing content arguments,
namely putting the content block _behind_ the function call.
You could therefore write:
```typ
{{#include only-uncover.typ:5:}}
```
...resulting in

(Note again that the gray border is not part of the slides and that the labels
indicating the index of the subslide are also added afterwards.)
You can clearly see the difference in behaviour between `only` and `uncover`.
In the first line, "after" moves but not in the second line.
In this example, we specified only a single subslide index, resulting in content
that is shown on that exact subslide and at no other one.
Let's explore more complex rules next.
|
|
https://github.com/jneug/typst-codetastic | https://raw.githubusercontent.com/jneug/typst-codetastic/main/bitfield.typ | typst | MIT License |
#import "bits.typ"
// TODO: This probably should be improved / optimized.
/// Creates a new bitfield of the given dimensions and
/// initializes its values with the given init function.
///
/// A bitfield stores bits in an array of arrays. The value
/// at `bitfield.at(i).at(j)` is the bit in row `i`, column `j`.
///
/// #arg[init] is a function that takes the row and column index of
/// of a bit and returns a #dtype("boolean") to initialize the bit.
#let new(n, m, init:(i,j) => false) = {
return range(n).map(
i => range(m).map(j => init(i,j))
)
}
#let from-str(..str) = {
return str.pos().map(bits.from-str)
}
#let at(field, i, j) = field.at(i).at(j)
#let map(a, func) = {
return a.enumerate().map(((i, c)) => c.enumerate().map(((j, d)) => func(i, j, d)))
}
#let band(a, b) = {
return a.enumerate().map(((i, c)) => bits.band(c, b.at(i)))
}
#let bor(a, b) = {
return a.enumerate().map(((i, c)) => bits.bor(c, b.at(i)))
}
#let bxor(a, b) = {
return a.enumerate().map(((i, c)) => bits.bxor(c, b.at(i)))
}
#let compose(a, b, at:(0,0), center:false) = {
let (m,n) = (
b.len(), b.first().len()
)
let (x,y) = at
if center {
(x, y) = (
x - int(m/2), y - int(n/2)
)
}
for i in range(m) {
for j in range(n) {
a.at(x + i).at(y + j) = b.at(i).at(j)
}
}
return a
}
|
https://github.com/Myriad-Dreamin/typst.ts | https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/fuzzers/corpora/visualize/shape-aspect_00.typ | typst | Apache License 2.0 |
#import "/contrib/templates/std-tests/preset.typ": *
#show: test-page
// Test relative width and height and size that is smaller
// than default size.
#set page(width: 120pt, height: 70pt)
#set align(bottom)
#let centered = align.with(center + horizon)
#stack(
dir: ltr,
spacing: 1fr,
square(width: 50%, centered[A]),
square(height: 50%),
stack(
square(size: 10pt),
square(size: 20pt, centered[B])
),
)
|
https://github.com/arthurcadore/eng-telecom-workbook | https://raw.githubusercontent.com/arthurcadore/eng-telecom-workbook/main/semester-7/COM_1/homework3/homework.typ | typst | MIT License | #import "@preview/klaro-ifsc-sj:0.1.0": report
#import "@preview/codelst:2.0.1": sourcecode
#show: doc => report(
title: "Modulação de Fase e Quadratura (IQ)",
subtitle: "Sistemas de Comunicação I",
authors: ("<NAME>",),
date: "07 de Abril de 2024",
doc,
)
= Introdução
\
O objetivo deste relatório, é apresentar os conceitos teóricos para a transmissão de dados utilizando modulação de fase e quadratura (IQ) através de dois diferentes sinais modulados por senos e cossenos.
\
Neste relatório, será apresentado a fundamentação teórica, os conceitos teóricos utilizados, a análise dos resultados, os scripts e códigos utilizados, as conclusões e as referências bibliográficas, de maneira em que serão utilizados dois sinais modulantes distintos, o primeiro irá modular um sinal senoidal (que será a portadora) e o segundo irá modular um sinal cossenoidal.
\
Desta forma, poderemos verificar a eficiecia da modulação IQ na transmissão de dados, utilizando a mesma região do espectro eletromagnético para transmitir dois sinais distintos sem interferência (idealmente).
= Fundamentação teórica
== Principais Conceitos
\
Os principais conceitos teóricos abordados neste relatório são:
- Modulação (IQ): A modulação IQ é uma técnica de modulação digital que utiliza dois sinais modulados por senos e cossenos de igual fase para transmitir dados idealmente sem interferência. A modulação IQ é amplamente utilizada em sistemas de comunicação modernos, como o 4G e o 5G, devido à sua eficiência espectral e capacidade de transmitir dados de forma confiável.
- Ortogonalidade: A ortogonalidade é um conceito fundamental na modulação IQ, que garante que os sinais modulados por senos e cossenos sejam independentes e não interfiram uns nos outros. Isso permite que os dados sejam transmitidos de forma eficiente e confiável, mesmo em ambientes ruidosos. \
- Sinal Portador: A portadora é o que permite a transmissão do sinal, suas caracteristicas são ideiais para o meio onde a onda irá se propagar, a portadora é influenciada pela modulante de maneira em que o receptor consiga extrair a informação transmitida.
- Sinal Modulante: A modulante é a informação que será transmitida, ela é a responsável por alterar as caracteristicas da portadora, de maneira que o receptor consiga extrair a informação transmitida.
- Processo de Modulação/Demodulação: O processo de modulação consiste em alterar (normalmente através de multiplicação) as características da portadora de acordo com a modulante, de maneira em que o receptor consiga extrair a informação transmitida. Nos processos de modulação vistos até o momento, realizamos através de mutiplicação (ou no caso da FM, através de integração do argumento de fase). Já o processo de demodulação consiste em, uma vez com o sinal modulado, transmitido, e recebido no destinatário, extrair a informação do sinal portador, de maneira em que o receptor consiga interpretar a informação transmitida e utiliza-la a nivel de aplicação (como na rádio FM por exemplo, onde a música é recebida interpretada e reproduzida no carro).
== Resumo dos Itens abordados (Material de Referência)
\
Além dos conceitos base apresentados acima, também está abaixo um resumo dos principais conceitos teóricos das sessões 5.1, 5.4, 5.5, 5.6, 5.7, 5.8, 5.9 e 5.10 do livro "Software Defined Radio Using MATLAB & Simulink and the RTL-SDR":
=== Item 5.1 (Real and Complex Signals — it’s all Sines and Cosines)
\
- Objetivo: Introduzir os conceitos de sinais reais e complexos e sua representação em termos de senos e cossenos.
O processamento de sinais lida com sinais reais do mundo físico, como sinais de tensão analógicos que representam. Esses sinais variam ao longo do tempo e podem assumir valores positivos e negativos.
\
Um exemplo de sinal análogico, é oque é recebido por uma antena, que gera um sinal de tensão em resposta ao campo eletromagnético induzido ao seu redor, ou microfone captando ondas sonoras, e assim gerando um sinal de tensão analógico em resposta às variações na pressão do ar.
\
Em sistemas de comunicação analogicos e digitais, os sinais são frequentemente representados através de uma soma de exponenciais complexas, contudo, os sinais complexos não existam efetivamente no mundo real, porem, são representados assim na matemática para facilitar as operações e manipulações/conclusões, mas são usados apenas para fins de análise, ou seja, nenhum sinal puramente complexo (imaginário) é gerado ou transmitido na prática.
\
Para ilustrar sua importância, o texto do autor sugere tentar derivar as equações trigonométricas dos sinais analógicos sem a utilização de sinais complexos, e assim, perceber a dificuldade e a complexidade que seria necessária para realizar esse tipo de manipulação.
=== Item 5.4 (Quadrature Modulation and Demodulation (QAM)
\
- Objetivo: Apresentar a modulação e demodulação em quadratura (QAM) e sua aplicação em sistemas de comunicação.
\
Nesta sessão, entramos no ponto de representação 'complexa', onde podemos introduzir a demodulação de amplitude em quadratura (QAM) realizada pelo RTL-SDR.
\
O objetivo da transmissão em QAM é alcançar uma tecnica de modulação mais eficiente em largura de banda, em outras palavras, consumir (idealmente) a mesma largura no espectro elétromagnético para encaminhar mais informação.
\
Podemos utilizar como exemplo a modulação em quadratura, onde transmitimos dois sinais de largura de tensão, ambos modulados na mesma frequência de portadora, mas com a fase da portadora separada por 90°, sendo um uma onda senoidal e a outra uma onda cossenoidal (por convenção são snais de seno e cosseno, para que a representação complexa seja a mais simples possivel).
=== Item 5.5 (Quadrature Amplitude Modulation using Complex Notation)
\
- Objetivo: Apresentar a modulação de amplitude em quadratura (QAM) usando notação complexa.
\
Nesta sessão do livro, o autor apresenta a notação complexa para simplificar a análise matemática de um sistema QAM (Quadrature Amplitude Modulation). Inicialmente, é observado que o sistema QAM é composto apenas por sinais reais (ou seja, sem a adição de componentes imaginários), e, portanto, não utiliza notação complexa.
\
No entanto, ao introduzir a notação complexa, o equacionamento e tratamento dos sinais se torna mais fácil, pois podemos decompor o sinal em uma soma de componentes complexos.
\
=== Item 5.6 (Quadrature Amplitude Demodulation using Complex Notation)
\
- Objetivo: Apresentar a demodulação de amplitude em quadratura (QAM) usando notação complexa.
Nesta sessão, é descrito que o processo de demodulação do sinal QAM também pode ser convenientemente expresso usando notação de sinais complexos.
Isto é, após a multiplicação do sinal, o sinal recebido no receptor pode ser representado por uma função base (que identifica o sinal modulante) multiplicado por um deslocamento na frequência através da multiplicação de uma exponencial complexa.
=== Item 5.7 (Spectral Representation for Complex Demodulation)
\
- Objetivo: Apresentar a representação espectral para a demodulação complexa.
\
Nesta sessão, o autor descreve o processo de modulação e demodulação de sinais complexos, onde são utilizados sinais reais independentes para formar um sinal complexo. Apartir do sinal complexo gerado, são aplicadas transformadas de Fourier (FFT) para analisar os espectros de magnitude dos sinais.
\
Durante a modulação, o sinal modulante é deslocado em frequência para ser centrado em torno de uma frequência específica (a frequência da portadora). Já no processo de demodulação, os sinais real e imaginário são filtrados por um filtro (tipicamente um filtro passa-baixas, por ser mais simples que um passa faixas, e portanto mais barato), com as especificações de frequeência e ordem adequadas para recuperar o sinal modulante complexo.
=== Item 5.8 (Frequency Offset Error and Correction at the Receiver)
\
- Objetivo: Apresentar o erro de deslocamento de frequência e sua correção no receptor.
\
Nesta sessão, texto apresenta um possível erro no receptor devido a
ao desvio de frequência em relação a frequência ideal para recepção do sinal modulado.
\
Esse desvio é ilustrado na equação de demodulação do receptor, que é modificada para compensar esse erro, de maneira em que o receptor se sintonize novamente com a frequência central onde o sinal está sendo transmitido.
\
De acordo com o autor, este tipo de erro é comum em receptores RTL-SDR, onde a frequência de recepção pode ser desviada por algumas centenas de Hz ou até kHz devido a tolerâncias nos componentes do receptor que variam as impedâncias imaginárias causando descasamento, portanto, sendo necessário um ajuste compensar este erro em software.
=== Item 5.9 (Frequency Correction using a Complex Exponential)
\
- Objetivo: Apresentar a correção de frequência usando uma exponencial complexa.
\
Nesta sessão, o autor explica sobre a coreção de frequência comentada na sessão anterior para compensar casos de desvio de frequência, através da multiplicação do sinal complexo de entrada no receptor por um valor específico complexo.
\
Em seguida, o ajuste do desvio pode incluir ajuste da fase do sinal da portadora, sincronização e/ou seções de temporização, seguidas pelas seções de decodificação do sinal, porem, as etapas que o sinal sofrerá para ser ajustado irão depender do tipo de sinal que está tentando ser captado.
\
Nesta sessão o autor também comenta que deve-se notar sobre as orientações passadas serem válidas para cenários idealizados. Ou seja, as operações realizadas assumem operações matemáticas perfeitas e ignora os efeitos do mundo real ao transmitir o sinal no meio. No entanto, essas operações são úteis para entender o processo de ajuste de frequência e a correção de erros de desvio de frequência.
=== Item 5.10 (RTL-SDR Quadrature / Complex Architecture)
\
- Objetivo: Apresentar a arquitetura quadratura/complexa do RTL-SDR.
\
Nesta sessão, o autor descreve a arquitetura do RTL-SDR, que é um receptor de software definido que utiliza modulação em quadratura para receber sinais de rádio.
\
Primeiro descreve o processo de modulação e transmissão pelo meio de comunicação (no caso de RadioFrequência, o meio wireless). Em seguida, no lado do receptor, descreve sobre o sinal sendo recebido e demodulado pelo circuito RTL-SDR, e depois passado para algum interpretador de sinais, como o MATLAB ou Simulink.
\
Neste ponto, as amostras lidas no MATLAB ou Simulink correspondem ao sinal de saída (sinal demodulado), e podem ser lidas e compreendidas através de ilustrações do modelo de sistema apresentado na Figura 5 do livro.
= Análise dos resultados
\
Inicialmente, foi feita a importação dos sinais de áudio que serão utilizados como modulantes para a transmissão. Como os sinais de áudio utilizados não possuem exatamente o mesmo comprimento, foram renomeados como *sinal curto* e um *sinal longo*, ambos em formato .wav, e posteriormente através do comprimento dos vetores, um corte foi realizado no sinal com o maior comprimento, para torna-los iguais em termos de duração.
\
A figura abaixo mostra o sinal de áudio curto que será utilizado como modulante da portadora cosseno (em vermelho) e o sinal de áudio longo que será utilizado como modulante da portadora seno (em preto).
\
Na figura abaixo também está representado o sinal de áudio curto no domínio da frequência, através da aplicação de uma transformada de Fourier, é possível visualizar a distribuição da amplitude do sinal no espectro de frequência.
#figure(
outlined: true,
image("./pictures/modulante.png", width: 100%),
caption: [Portadoras geradas e sinal Modulado. \ Figura elaborada pelo autor],
supplement: "Figura"
);
Em seguida, um sinal de portadora foi gerado a partir da definição de frequência e amplitude do sinal (40000Hz e 1 respectivamente), e posteriormente, a modulação AM foi realizada para cada sinal de áudio, utilizando a portadora correspondente a cada sinal.
\
Neste processo é importante notar que o primeiro sinal de portadora foi gerado a partir de um cosseno, equanto que o segundo sinal de portadora foi gerado a partir de um seno. Essa diferença entre os dois sinais de base é vital para a transmissão dos sinais modulados pelo mesmo meio de transmissão.
\
Isso ocorre pois os sinais de cosseno e seno são ortogonais entre si, ou seja, não interferem um no outro (idealmente), e por isso, podem ser transmitidos simultaneamente na mesma frequência sem interferência. Caso fosse utilizado dois sinais não ortogonais entre si, haveria interferência entre os mesmos no momento da transmissão no mesmo meio, oque impossibilitaria a distinção entre eles no receptor, e portanto, a informação iria se perder.
#figure(
outlined: true,
image("./pictures/portadora.png", width: 100%),
caption: [Sinal de Áudio aplicado como modulante \ Figura elaborada pelo autor],
supplement: "Figura"
);
Uma vez com o sinal modulado, e multiplexado, podemos transmiti-lo pelo meio físico sem que haja interferência entre cada portadora (idealmente). O sinal no meio físico é ilustrado abaixo em azul.
\
Sua FFT também é apresentada, de maneira em que é possível visualizar a distribuição de frequência dos sinais multiplexados no espectro de frequências. Note que a FFT do sinal multiplexado é a soma das FFTs dos sinais modulados, oque é esperado, pois a modulação neste caso é uma operação linear, e portanto, a FFT do sinal modulado é a soma das FFTs dos sinais modulantes.
\
Desta forma, é possivel verificar que as amplitudes máximas do sinal multiplexado são mais "largas" do que as de cada sinal modulante individualmente. Isso ocorre pois a soma da FFT dos dois sinais modulantes irá representar uma distribuição de frequência mais ampla do que a de cada sinal modulante individualmente.
#figure(
outlined: true,
image("./pictures/multiplexado.png", width: 100%),
caption: [Sinal de Áudio aplicado como modulante \ Figura elaborada pelo autor],
supplement: "Figura"
);
Em seguida, com o sinal já transmitido, é necessário realizar sua recepção e demodulação no receptor. Para isso, o sinal é multiplicado pela portadora correspondente a cada sinal modulante (o que está visível no script maltab) que será apresentado mais adiante.
\
Posteriormente, com o sinal demodulado (ou seja, retornado a sua frequência de banda base), devemos aplicar um filtro no sinal para garantir que o mesmo seja limpo e que a informação possa ser extraída de maneira correta.
\
Para isso, foi utilizado um filtro FIR de ordem relativamente alta (neste caso 100), com frequência de corte de 20kHz. A frequência neste script foi fixada em 20kHz, pois trata-se de um sinal de áudio, e portanto, não há informação relevante acima desta frequência para ser capturada.
\
Para verificar se de fato o filtro está atuando corretamente, foi plotado a resposta em frequência do filtro FIR para cada sinal demodulado, as respostas estão exibidas em vermelho (cosseno) e preto (seno) respectivamente.
\
#figure(
outlined: true,
image("./pictures/respostafiltro.png", width: 100%),
caption: [Sinal de Áudio aplicado como modulante \ Figura elaborada pelo autor],
supplement: "Figura"
);
\
Com o sinal demodulado e filtrado, podemos verificar se distorções ocorreram indevidamente no sinal, ou se a informação foi corretamente extraída. Para isso, foi plotado o sinal demodulado (coluna esquerda na imagem abaixo) e apenas filtrado para cada sinal modulante (coluna direita na imagem abaixo).
\
Para garantir que mesmo instante de tempo fosse plotado, um janeamento foi realizado no sinal de entrada, de maneira em que apenas o intervalo de tempo entre 30% e 70% da duração do sinal fosse plotado, assim, está ilustrado o mesmo instante de tempo antes da modulação e após a demodulação do sinal de áudio.
#figure(
outlined: true,
image("./pictures/demodulado.png", width: 100%),
caption: [Sinal de Áudio aplicado como modulante \ Figura elaborada pelo autor],
supplement: "Figura"
);
Outro parâmetro importante a ser analisado é a densidade espectral de potência dos sinais modulantes e do sinal multiplexado. Como podemos ver na imagem abaixo, está plotada a densidade dos sinais modulantes (cosseno e seno) e também do sinal multiplexado.
\
Note que a densidade do sinal multiplexado está deslocada para a direita, isso ocorre pois após a modulação, a frequência do sinal é deslocada para a frequência da portadora, e portanto, a densidade espectral de potência do sinal modulado é deslocada para a frequência da portadora.
#figure(
outlined: true,
image("./pictures/densidade.png", width: 100%),
caption: [Sinal de Áudio aplicado como modulante \ Figura elaborada pelo autor],
supplement: "Figura"
);
= Scripts e Codigos
\
Abaixo estão os scripts e códigos utilizados para a realização deste relatório:
== Etapa 1 - Import do sinal e definições:
\
Inicialmente, foi feita a importação dos sinais de áudio que serão utilizados como modulante para transmissão. Em seguida, foi feita a transposição e corte dos sinais de áudio para transformar em vetores com a mesma amplitude.
\
Em seguida, foi calculado o vetor de tempo a partir do tamanho do menor sinal, e a FFT dos sinais de entrada foi calculada para amostrar seu estado no domínio da frequência.
\
Por fim, foi plotado os sinais de entrada no domínio do tempo e da frequência, para visualizar a amplitude e a distribuição de frequência dos sinais modulantes.
#sourcecode[```matlab
% IQ transmission of two diferent audio signals.
% IQ - In-Phase and Quadrature Modulation
clc; clear all; close all
pkg load signal
% Definição dos parâmetros da portadora do sinal IQ:
carrier_amplitude = 1;
carrier_frequency = 40000;
% Coletando os sinais para transmissão:
[short_signal, Fs] = audioread('short-signal.wav');
[long_signal, Fs2] = audioread('long-signal.wav');
% Fazendo a transposição linha/coluna do sinal de entrada
short_signal = transpose(short_signal);
long_signal = transpose(long_signal);
% pegando a duração da transmissão a partir do tamanho do menor sinal;
duracao = length(short_signal)/Fs;
% calculando vetor de t no dominio do tempo;
Ts = 1/Fs;
t=[0:Ts:duracao-Ts];
% Igualando o comprimento dos sinais ao vetor de tempo
signal_cos = short_signal(1:length(t));
signal_sin = long_signal(1:length(t));
% calculando o passo no dominio da frequência;
f_step = 1/duracao;
% vetor "f" correspondente ao periodo de análise (dominio da frequência);
f = [-Fs/2:f_step:Fs/2];
f = [1:length(signal_cos)];
% calculando a FFT do sinal de entrada (que será utilizado no cosseno):
signal_cos_F = fft(signal_cos)/length(signal_cos);
signal_cos_F = fftshift(signal_cos_F);
% calculando a FFT do sinal de entrada (que será utilizado no seno):
signal_sin_F = fft(signal_sin)/length(signal_sin);
signal_sin_F = fftshift(signal_sin_F);
% Plot dos sinais de entrada (dominio do tempo e frequência):
figure(1)
subplot(221)
plot(t,signal_cos,'r')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante da Portadora Cosseno (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(223)
plot(t,signal_sin,'k')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante da Portadora Seno (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(222)
plot(f,abs(signal_cos_F), 'r')
title('Sinal Modulante da Portadora Cosseno (Frequency domain)')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
subplot(224)
plot(f,abs(signal_sin_F), 'k')
title('Sinal Modulante da Portadora Seno (Frequency domain)')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
```]
== Etapa 2 - Modulação IQ dos sinais:
Uma vez com os sinais de entrada (modulantes) importados e seus respectivos sinais gerados, podemos criar os sinais de portadora para a transmissão de maneira ortogonal.
Em seguida foi realizada a modulação AM dos sinais de áudio com sua respectiva portadora (seno e cosseno), e posteriormente, a multiplexação dos sinais foi realizada somando os sinais modulados.
Após a multiplexação, o sinal no dominio do tempo foi plotado no domínio do tempo e da frequência, para visualizar a amplitude e a distribuição de frequência do sinal multiplexado.
#sourcecode[```matlab
% Criando dos sinais de portadora para transmissão ortogonal (um com seno e outro com cosseno):
carrier_cos = carrier_amplitude*cos(2*pi*carrier_frequency*t);
carrier_sin = carrier_amplitude*sin(2*pi*carrier_frequency*t);
% Realizando a modulação AM do sinal de aúdio na portadora correspondente a cada sinal:
modulated_cos = signal_cos .* carrier_cos;
modulated_sin = signal_sin .* carrier_sin;
% Realizando a multiplexação do sinal (a partir do principio de ortogonalidade):
multiplexed_signal = modulated_cos + modulated_sin;
% Calculando a FFT do sinal para amostrar seu estado no dominio da frequência:
multiplexed_signal_F = fft(multiplexed_signal)/length(multiplexed_signal);
multiplexed_signal_F = fftshift(multiplexed_signal_F);
figure(2)
subplot(221)
plot(f,carrier_cos,'r', 'LineWidth', 2)
xlim([0 100*f_step])
title('Portadora Cosseno')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
subplot(223)
plot(f,carrier_sin,'k', 'LineWidth', 2)
xlim([0 100*f_step])
title('Portadora Seno')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
subplot(222)
plot(t,modulated_cos,'r')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Cossenoidal Modulado (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(224)
plot(t,modulated_sin,'k')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Senoidal Modulado (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
% Verificando o sinal multiplexado:
figure(3)
subplot(211)
plot(t,multiplexed_signal,'b')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal multiplexado')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(212)
plot(f,abs(multiplexed_signal_F), 'b')
title('Sinal multiplexado (Frequency domain)')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
```]
== Etapa 3 - Demodulação dos sinais:
Uma vez com o sinal multiplexado gerado, simulando que o sinal foi transmitido pelo meio físico e recebido no receptor, é necessário realizar a demodulação do sinal.
\
Para isso, no receptor o sinal é multiplicado novamente pelo sinal da portadora correspondente a cada sinal modulante, e posteriormente, um filtro FIR é aplicado para garantir que o sinal seja limpo e que a informação possa ser extraída de maneira correta.
\
Em seguida, para podermos aproveitar o sinal recebido, precisamos realizar uma filtragem para retirar qualquer possivel ruído que possa ter sido adicionado durante a transmissão.
\
Para isso, definimos um filtro passa baixa (20KHz) de alta ordem (100) para realizar a filtragem do sinal demodulado. Para visualizarmos se o filtro corresponde as especificações necessárias, um plot da resposta em frequência do filtro FIR para cada sinal demodulado foi realizado.
#sourcecode[```matlab
% Realizando a demodulação do sinal no receptor:
demodulated_cos = multiplexed_signal .* carrier_cos;
demodulated_sin = multiplexed_signal .* carrier_sin;
% Ordem do filtro FIR
filtro_ordem = 100;
% Frequência de corte do filtro FIR
% Como trata-se de um sinal de áudio, a frequência de corte pode ser fixada em 20kHz
frequencia_corte = 20000;
% Coeficientes do filtro FIR para cada sinal demodulado
coeficientes_filtro = fir1(filtro_ordem, frequencia_corte/(Fs/2));
% Resposta em frequência do filtro FIR para cada sinal demodulado
[H_cos, f_cos] = freqz(coeficientes_filtro, 1, length(t), Fs);
[H_sin, f_sin] = freqz(coeficientes_filtro, 1, length(t), Fs);
% Plot da resposta em frequência dos filtros:
figure(5)
subplot(211)
plot(f_cos, abs(H_cos), 'r', 'LineWidth', 3)
xlim([0 frequencia_corte*1.1])
title('Resposta em Frequência do Filtro FIR Cossenoidal')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
subplot(212)
plot(f_sin, abs(H_sin), 'k', 'LineWidth', 3)
xlim([0 frequencia_corte*1.1])
title('Resposta em Frequência do Filtro FIR Senoidal')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
```]
== Etapa 4 - Filtragem:
\
Uma vez com o filtro FIR definido, podemos aplicar a filtragem do sinal demodulado para garantir que a informação seja extraída de maneira correta.
#sourcecode[```matlab
% Filtragem dos sinais demodulados
demodulated_cos_filtered = filter(coeficientes_filtro, 1, demodulated_cos);
demodulated_sin_filtered = filter(coeficientes_filtro, 1, demodulated_sin);
% Calculando a FFT dos sinais demodulados para amostrar seu estado no dominio da frequência:
demodulated_sin_F = fft(demodulated_sin_filtered)/length(demodulated_sin_filtered);
demodulated_sin_F = fftshift(demodulated_sin_F);
demodulated_cos_F = fft(demodulated_cos_filtered)/length(demodulated_cos_filtered);
demodulated_cos_F = fftshift(demodulated_cos_F);
% Plot dos sinais demodulados
figure(4)
subplot(221)
plot(t, demodulated_cos_filtered, 'r')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante (Portadora Cosseno) Demodulado (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(223)
plot(t, demodulated_sin_filtered, 'k')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante (Portadora Seno) Demodulado Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(222)
plot(f,abs(demodulated_cos_F), 'r')
title('Sinal Modulante (Portadora Cosseno) Demodulado (Frequency domain)')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
subplot(224)
plot(f,abs(demodulated_sin_F), 'k')
title('Sinal Modulante (Portadora Seno) Demodulado (Frequency domain)')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
```]
== Etapa 5 - Comparando com o sinal original:
\
Em seguida, os dois sinais demodulados no dominio do tempo foram comparados com os sinais de entrada, para verificar se a informação foi corretamente modulada, transmitida e demodulada, e portanto, se a informação foi preservada.
#sourcecode[```matlab
% Comparando sinal transmitido com sinal recebido:
figure(4)
subplot(221)
plot(t, demodulated_cos_filtered, 'r')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante (Portadora Cosseno) Demodulado (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(223)
plot(t, demodulated_sin_filtered, 'k')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante (Portadora Seno) Demodulado Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(222)
plot(t,signal_cos,'r')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante da Portadora Cosseno (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
subplot(224)
plot(t,signal_sin,'k')
xlim([(duracao*0.3) (duracao*0.7)])
title('Sinal Modulante da Portadora Seno (Time domain)')
xlabel('Tempo (s)')
ylabel('Amplitude')
```]
== Etapa 6 - Calculando a densidade espectral de potência:
#sourcecode[```matlab
% Calculando a densidade espectral dos sinais:
figure(7)
subplot(221)
plot(pwelch(signal_cos), 'r', 'LineWidth', 3);
xlim([0 200])
title('Densidade Espectral do Sinal Modulante (Portadora Cosseno)')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
xlim([0 100])
subplot(222)
plot(pwelch(signal_sin), 'k', 'LineWidth', 3);
xlim([0 200])
title('Densidade Espectral do Sinal Modulante (Portadora Seno)')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
xlim([0 100])
subplot(2, 2, [3 4])
plot(pwelch(multiplexed_signal), 'b', 'LineWidth', 3);
xlim([0 100])
title('Densidade Espectral do Sinal Multiplexado')
xlabel('Frequência (Hz)')
ylabel('Magnitude')
```]
= Conclusão
\
A partir dos conceitos vistos e dos resultados apresentados, podemos concluir que a modulação IQ é uma técnica eficiente para a transmissão de dados, pois permite a transmissão de dois sinais distintos na mesma frequência sem interferência (idealmente), ao mesmo tempo em que é uma técnica razoavelmente simples de ser implementada, pois necessita apenas de dois sinais ortogonais base para agir como portadora.
Desta forma, a modulação IQ permitiu a transmissão dos dois sinais de áudio pela mesma frequência de tranmissão base (40000Hz), sem que houvesse interferência entre os sinais (idealmente), e portanto, aproveitando de melhor maneira o espectro elétromagnético.
Assim podemos compreender o motivo pela qual essa técnica de transmissão por ortogonalidade vem sido aplicada nas tecnologias mais atuais de transmissão wireless como OFDM e OFDMA, pela capacidade de aproveitar da melhor maneira possivel o espectro eletromagnético disponível, e portanto, transmitir mais dados em menos tempo e com menos interferência.
= Referências
\
Para o desenvolvimento deste relatório, foi utilizado o seguinte material de referência:
\
\
- #link("https://www.researchgate.net/publication/287760034_Software_Defined_Radio_using_MATLAB_Simulink_and_the_RTL-SDR")[Software Defined Radio Using MATLAB & Simulink and the RTL-SDR, de <NAME>] |
https://github.com/MrHedmad/kerblam-paper | https://raw.githubusercontent.com/MrHedmad/kerblam-paper/main/src/main.typ | typst | #import "template.typ": *;
#import "@preview/subpar:0.1.1";
#show: project.with(
title: "Structuring data analysis projects in the Open Science era with Kerblam!",
authors: (
(
name: "<NAME>",
shortname: "L.V.",
email: "<EMAIL>",
affiliation: "*",
orcid: "0000-0003-2568-5694",
corresponding: true
),
(
name: "<NAME>",
shortname: "L.M.",
email: "<EMAIL>",
affiliation: "*",
orcid: "0000-0001-9247-4446",
corresponding: false
),
(
name: "<NAME>",
shortname: "F.A.R.",
email: "<EMAIL>",
affiliation: "*",
orcid: "0000-0002-3084-0380",
corresponding: false
),
),
affiliations: (
(id: "*", value: "Department of Life Sciences and Systems Biology, University of Turin, Turin, Italy"),
),
// Insert your abstract after the colon, wrapped in brackets.
// Example: `abstract: [This is my abstract...]`
abstract: [
Structuring data analysis projects, that is, defining the layout of files and folders needed to analyze data using existing tools and novel code, largely follows personal preferences. In this work, we look at the structure of several data analysis project templates and find little structural overlap. We highlight the parts that are similar between them, and propose guiding principles to keep in mind when one wishes to create a new data analysis project. Finally, we present Kerblam!, a project management tool that can expedite project data management, execution of workflow managers, and sharing of the resulting workflow and analysis outputs. We hope that, by following these principles and using Kerblam!, the landscape of data analysis projects can become more transparent, understandable, and ultimately useful to the wider community.
],
// Disable this once you are done
work_in_progress: false
)
/*
--- A NOTE ABOUT FIGURES AND MULTICOLUMNS ---
A limitation in typst right now is that an environment cannot be broken.
For this reason, if text is in a #columns(...)[] environment you cannot place a
multi-column spanning figure in there (it will span just one column).
There is an issue tracking this addition:
https://github.com/typst/typst/issues/553
However, it seems that fixing it will be quite difficult for how typst currently works.
This is the reason why when a #figure is added, the columns environment is temporarily broken.
A side effect is that the text is broken where the figure is placed.
Layout will require manual intervention later on, once the text is all written.
*/
= Introduction
Data analysis is a key step in every scientific experiment.
In numerical-data-centric fields, data analysis is, in essence, a series of computational steps in which input data is processed by software to produce some output.
Usually, the ultimate goal of such analyses is to create secondary data for human interpretation in order to produce knowledge and insight on the raw input data.
These manipulations can involve downloading input data on local storage, creating workflows and novel software---also saved locally---and running the analysis on local or remote ("cloud") hardware.
In this article, we will use the phrase “data analysis project structure” to refer to the way data analysis projects are organized on the actual file system, including the structure of folders on disk, the places where data, code, and workflows are stored, and the format in which the project is shared with the public.
Unfortunately, such structures can vary a lot from one researcher to another, making it hard for the public to inspect and understand them.
With the Open Science movement gaining more and more traction in the recent years @bertramOpenScience2023, there is a growing need to standardize how routine data analysis projects are structured and carried out.
Notably, even if originally thought to provide guidelines for the management of data, FAIR principles have recently been extended also to other contexts, such as that of software @barkerIntroducingFAIRPrinciples2022.
In this view, by making data analyses more transparent and intelligible, the standardization of project structure complies with the FAIR principles' philosophy of more Findable, Accessible, Interoperable, and Reusable research objects @wilkinsonFAIRGuidingPrinciples2016.
Consistently, efforts are being made from many parts to make reproducible pipelines easier to be created and executed by the wider public---for example by leveraging methods such as containerization.
However, while new tools and technologies offer unprecedented opportunities to make the whole process of data analysis increasingly transparent and reproducible, their usage still takes time and effort, as well as expertise and sensibility to the issue of standardization and reproducibility by the experimenter.
In this work, we inspect the structure of many data science and data analysis project templates currently available online.
Then, we outline best practices and considerations to take into account when thinking about structuring data analysis projects.
Following these principles, we propose a simple, lightweight and extensible project structure that fits many needs and is in line with the projects already present in the ecosystem, thus providing a certain level of standardization.
Finally, we introduce Kerblam!, a new tool that can be used to work in projects with this standard structure, taking care of common tasks like data retrieval and cleanup, as well as workflow management and containerization support.
This could ultimately benefit the scientific community by making others' work easier to understand and reproduce, for example during the peer review process.
= Data Collection
To fetch the structure of the most common data analysis projects, we ran two GitHub searches: one for the keywords _cookiecutter_ and _data_ (`cookiecutter` is a Python package that allows users to create, or "cut", new projects from templates) and the other for the much more generic keywords _project_ and _template_.
We downloaded the top 50 such repositories from each search sorted by GitHub stars, as a proxy for popularity and adoption rate.
For each downloaded project, we either cut it with the `cookiecutter` Python package or used it as-is (for non-cookiecutter templates).
Of these $100$ repositories, $87$ could ultimately be successfully cut and parsed, and were therefore considered.
All files and folders from the resulting projects were then listed and compiled into a frequency graph.
Some housekeeping files (like the `.git` directory and all its content) were stripped from the final search results as they were deemed not relevant to the project as a whole.
For example, `.gitkeep` files---which are commonly used to commit empty directories to version control---were excluded from the final figure.
Finally, only files present in at least three or more templates were retained for plotting.
The analysis was performed with the latest commits of all the considered repositories as of the 12th of July, 2024.
The only exception was the "drivendataorg/cookiecutter-data-science" repository, for which we fetched version `1.0` due to non-standard parsing requirements of the latest commit.
The code for the analysis is available online.
See the "Code and Data availability" section for more information.
= Data Interpretation
The choice of how to structure projects is an issue universally shared by anyone who performs data analysis.
This results in a plethora of different tools, folder hierarchies, accepted practices, and customs.
To explore the most common project-structuring practices, we inspected $87$ different project templates available on GitHub and produced a frequency graph of shared files and folders visible in @fig:frequency_graph.
#figure(
image("resources/images/plot.png"),
caption: [Frequency graph of the structure of the $87$ most starred data analysis project templates, as retrieved from GitHub. Only files present in at least three or more templates are shown. The size and color intensity of the circle at the tip of each link is proportional to the frequency with which this file or folder is found in different project templates. Red text represents files, while blue text represents folders. The central dot of the root node was assigned an arbitrary size.],
placement: top
) <fig:frequency_graph>
By looking at this figure, we can point out common patterns in project structure.
It must be highlighted, however, that templates influence each other.
For example, many Python data science project templates seem to be modified versions of #link("https://github.com/drivendataorg/cookiecutter-data-science")[drivendataorg/cookiecutter-data-science], which has a very high number of stars and is therefore probably popular with the community.
In any case, the two most highly found files are the `README.md` file (with a frequency of $77 / 87 tilde.eq 0.89$) and the `LICENSE` and `LICENSE.md` files ($(46 + 3)/ 87 tilde.eq 0.56$).
The `pyproject.toml` file at the top level of the repository---marking the project as a Python package---is also prevalent ($16 / 87 tilde.eq 0.18$).
This is potentially due to the popular "cookiecutter-data-science" template mentioned before, also highlighting how projects following this template are intimately linked with the usage of Python, potentially exclusively.
The predominance of Python-based projects is also noticeable by the presence of `requirements.txt` (a file usually used to store Python's package dependencies), `setup.py`, and `setup.cfg` (now obsolete versions of the `pyproject.toml` file, used to configure Python's build system).
The `project` folder at the top level of the templates is most likely the Python package (represented by the `__init__.py` file) that the `pyproject.toml` file refers to (the name "project" is artificial, deriving from the default way that cookiecutter templates were cut).
The presence of files related to the R programming language (the `R` directory, `.Rbuildignore`, `README.Rmd`) reflect its usage in the data analysis field, although at a lower frequency than that of Python.
The relatively low prevalence of the R programming language could be due to biases introduced by the search queries, or to the overwhelming popularity of Python project templates, also in the light of the fact that the cookiecutter utility itself is written in Python.
Community-relevant files such as `CONTRIBUTING.md` ($8/87 tilde.eq 0.09$) and `CODE_OF_CONDUCT.md` ($ 5/87 tilde.eq 0.06$) show little prevalence in templates.
This is also true for the `CITATION.cff` ($4/87 tilde.eq 0.05$) file, useful for machine-readable citation data.
The `src` ($31/87 tilde.eq 0.36$), `data` ($35/87 tilde.eq 0.40$), and `docs` ($28/87 tilde.eq 0.32$) folders are very highly represented, containing code, data, and project documentation, respectively.
In particular, the `data` directory contains with a high frequency the `raw`, `processed`, `interim`, and `external` folders to host the different data types---input, output, intermediate, and third party---according to the structure promoted by the "cookiecutter-data-science" template.
The prevalence of these sub-folders, however, is lower than the frequency of `data` itself, which means that the presence of `data` folder is not uniquely due to that specific template.
Interestingly, other templates include `data` in the `src` folder, mixing it with analysis code.
Other common folders present in the `src` directory are also the ones promoted by "cookiecutter-data-science", but again, as already noted for `data`, their occurrence is lower than that of the parent folder, indicating that many different templates adopt `src` as a folder name.
Docker-related files are present, mostly in the top-level of the project: `Dockerfile` ($ 5/87 tilde.eq 0.06$), `.dockerignore` ($4/87 tilde.eq 0.05$), and `docker-compose.yml` or `yaml` ($(6 + 1)/87 tilde.eq 0.08$).
Docker-related files and folders are also present with sub-threshold frequencies in many other forms, often as directories with multiple Dockerfiles in different folders.
The presence of the `docker-compose.yml` file and docker subdirectories could be indicative of a common need to manage multiple execution environments---that work together in the case of Docker Compose---throughout the analysis.
The sparse usage of many tools can be appreciated by the amount of unique files and folders across all templates.
Out of $4195$ different files and directories considered by this approach, the vast majority ($3908$, or $93.16%$) are present in only one template.
Looking at directories only, $783$ are unique over $864$ total ($90.63%$).
This figure might be inflated due to the presence of some compiled libraries, files, and Git objects that are included in the analysis and not correctly removed by our filtering.
However, we argue that this overwhelmingly high uniqueness would not be significantly affected by manual filtering.
The tiny overlap between templates reflects the fact that project structure is, by its nature, a matter of personal preference.
At the same time, @fig:frequency_graph confirms that the core structure of the repositories tends to be similar.
This is potentially due to both the epistemic need to share one's own work with others and technical requirements of research tools, which cause the adoption of community standards either by choice (in the former case) or imposition (in the latter).
For instance, the high presence of the `README.md` file is a community standard that is broadly shared by the majority of software developers, users, and researchers alike.
This adoption is purely due to practical reasons: specifically the need to share the description of the work with others in an obvious ("please read me"), logical (in the topmost layer of the project layout), and predictable (i.e., used by the wider community) way.
Borrowing a term from genetics, the `README` file can be thought to be a "housekeeping" file: without it, the usefulness of a project is severely impaired.
In this regard, another possible housekeeping candidate is the `LICENSE` file.
It is essential for collaborating with the community in the Open Source paradigm, and thus commonly found in many software packages.
The concession for code reuse is also essential in data analysis projects, both to allow reproducers to replay the initial work and for other researchers to build on top of previous knowledge.
Incidentally, the common presence of the `LICENSE` file in the _template_ of a project is interesting.
This could be either due to apathy toward licensing issues, leading to picking a "default license" without particular considerations, or a general feeling in individuals that one particular license fits their projects across the board.
A potentially new housekeeping file that is however not yet commonly found is the `CITATION.cff` file.
This file contains machine-readable citation metadata, which could be used by both human and machine users to obtain such information, potentially automatically.
= Intervention
== Design Principles
The observations made above can all be taken into consideration when designing a more broadly applicable project template that may be used in a variety of contexts.
To this end, it is helpful to conceptualize some core guiding principles that should be followed by all data analysis projects, in particular under the Open Science paradigm.
As data analysis projects often involve writing new software, a data analysis project structure requires support for both _data analysis_ proper and _software development_.
The methods of software development fall outside the scope of this work, but some concepts are useful in the context of data analysis, particularly for _ad hoc_ data analysis.
For instance, many programming languages require specific folder layouts in order to create self-contained, distributed software.
Take, for instance, the Python programming language: to create a Python package, a specific project layout has to be followed @PackagingPythonProjects.
This is visible also in @fig:frequency_graph, with the presence of the "project" folder, and many files specific to Python packages, crucially, in the locations required by Python build backends.
Something similar occurs for many programming languages, such as R @PackageStructureState and Rust @CreatingNewPackage, among others.
However, a researcher might not want to create self-contained, distributed software.
Languages like Python and R can interpret and execute single-file scripts which achieve some goal (i.e., "scripting").
As scripting is so fast, convenient, and easy to do, it is the most common method of doing data analysis.
Scripting provides much flexibility during the development process, but this typically exacerbates the fragmentation of project structures.
In particular, the environment of execution now becomes much more relevant: which packages are installed and at which versions, the order that the scripts were read and executed and, potentially, even the order of _which lines_ are (manually) run become important to the success of the overall analysis.
This increased flexibility is obviously useful for the research process, which requires the ability to change quickly in order to adapt to new findings, especially during hypothesis-generating "exploratory" research.
The principles presented here aim to retain this essential requirement of adaptability, but, at the same time, push for increased standardization of methods, avoiding the most common and dangerous pitfalls one can encounter during the process of data analysis.
=== 1. Use a version control system
At its core, software is a collection of text files, and this includes data analysis software.
While working on code, it is important to record the differences between the different versions of these files.
This is very useful, especially during the research process, to "retrace our steps" or to attempt new methodologies without the fear of losing any previous work.
Such records are also useful as provenance information, and potentially as proof of authorship, similarly to what a laboratory notebook does for a "wet-lab" experimental researcher.
There is consolidated software that can be used as a version control system.
The overwhelming majority of projects use `git` to this aim, although others exist.
Platforms that integrate `git` such as GitHub (#link("https://github.com")[github.com]) and GitLab (#link("https://gitlab.com")[gitlab.com]) are increasingly used for data analysis both as a collaboration tool during the project and a sharing platform afterwards.
The first principle should therefore be this: *use a version control system*, such as `git`.
A few practical observations stem from this principle:
- version control encourages good development practices, such as atomic commits, meaningful commit messages, and more, reducing the amount of mistakes made while programming and increasing efficiency by making debugging easier;
- version control discourages the upload of very large (binary) files, so input and output data cannot be efficiently shared through such a system, incentivizing the deposit of data in online archives and---by extension---favoring the FAIR-ness of the manipulated data objects;
- code collaboration and collaboration techniques (such as "GitHub Flow" or "trunk based development" @appletonStreamedLinesBranching1998 @GitHubFlow) can be useful to promote a more efficient development workflow in data analysis disciplines such as bioinformatics, especially in mid- to large- research groups;
- the core unit of a project should be a code repository, containing everything related to that project---from code, to documentation, to configuration.
The usage of a version control system has implications also for FAIR-ness.
Leveraging remote platforms can be fundamental for both Findability and Accessibility.
Integrations of platforms like GitHub with archives like Zenodo, for instance, allow developers to easily archive for long-term preservation their data analysis code, promoting Accessibility, Findability, and Reusability.
=== 2. Documentation is essential
When working on a data analysis project, documentation is important for both the experimenter themselves and any possible external users.
Through ideal documentation, the rationale, the process, and potentially the result of the analysis are presented to the user, together with practical steps on how to _actually_ reproduce the work.
As with all other aspects of data analysis, documentation takes many different forms, but is the most difficult thing to standardize for one simple reason: documentation is written by humans for human consumption.
Documentation is therefore allowed high flexibility in structure, content, form, and delivery method.
Even though rigid standardization is impossible, some guidelines on how to write effective documentation can still be drawn, oftentimes from best practices in the much wider world of Open Source software.
We have already highlighted the fundamental role of the `README` file and its very wide adoption.
This file contains high-level information about the project and is usually the first---and perhaps only---documentation that all users encounter and read.
It is therefore essential that core aspects of the project are delivered through the `README` file, such as:
- the aim of the project, in clear, accessible language;
- the methods used to achieve such aim (and/or a link to further reading material);
- a guide on how to run the analysis on the user's machine, potentially including information on hardware requirements, software requirements, container deployment methods, and every information a human reproducer might need to execute the analysis;
- in an Open Science mindset, including information on how to collaborate on the project and contact information of the authors is also desirable.
Other aspects of the project, such as a list of contributors, might also be included in the `README` file.
The `README` file may also be named `DESCRIPTION`, although `README` is a much more widely accepted standard.
Additional documentation can be added to the project in many forms (also see @fig:frequency_graph).
A common documentation file is the `CONTRIBUTING` file, with information on how to contribute to the project, on how authorship of eventual publications will be assigned, and other community-level information.
The `CODE_OF_CONDUCT` file contains guidelines and policy on how the project is managed, the expected conduct of project members, and potentially how arising issues between project members are dealt with.
Such a file can be important to either projects open to collaboration from the public or large consortium-level projects.
Another important documentation file in the Open Source community is the `CHANGELOG` file.
It contains information about how the project changed over time and its salient milestones.
For data analysis, it could be used to inform collaborators of important changes in the codebase, methodology or any other news that might be important to announce and record.
Additionally, together with the commit history, `CHANGELOG` files can be useful to track the provenance of the analysis, as we have already mentioned.
A common place to store documentation is the top-level of the project repository, but some templates use the `docs` folder, also coming from guidelines used in the Python community (to use tools such as Sphinx @SphinxSphinxDocumentation).
We can conclude by reiterating that the second principle states that *documentation is essential*.
=== 3. Be logical, obvious, and predictable
When a project layout is logical, obvious, and predictable, a human user can easily and quickly understand and interact with it.
To be _logical_, a layout should categorize files based on their content, and logically arrange them following such categories.
To be _obvious_, this categorization should make sense at a glance, even to non-experts.
For instance, a folder named "scripts" should contain scripts (to be obvious) and only scripts (to be logical).
To be _predictable_, a layout should adhere to community standards, so that it "looks" similar to other projects.
This creates minimal friction when a user first encounters the project and desires to interact with it.
This principle is present also in other aspects of project structure other than layout.
For instance, the structure of documentation can also benefit from the same principles, but in a different context: logically arranged, obvious in structure, and similar to other projects.
This might be the most difficult principle to follow, as it largely depends on the community as a whole.
For this reason, we hope that the analysis shown above, especially @fig:frequency_graph, and our proposed minimal structure (presented in the next sections) will be useful as guides to effectively implement this principle.
We can summarize this third principle like this: *be logical, obvious, and predictable*.
=== 4. Promote (easy) reproducibility
Scientific Reproducibility has been and still is a central issue, particularly in the field of biomedical research @erringtonChallengesAssessingReplicability2021 @ioannidisWhyMostPublished2005a.
Scientific software developers hold the crucial responsibility towards the scientific community of creating reproducible data analysis software.
"Reproducibility" can be understood as the ability of a third-party user to understand the research issue investigated by the project, how it was addressed, and practically execute the analysis proper again to obtain a hopefully similar and ideally identical result as the original author(s).
This has twofold benefits: a reproducible analysis evokes more confidence in those that read and review it, and it makes it much easier to repurpose the analysis to similar data in the future.
In the modern era, scientists are equipped with powerful tools to enable reproducibility, such as containerization, virtualization, etc.
While a discussion on how reproducibility can be achieved eludes the scope of this article, the project layout can promote it, especially when all other principles presented here are respected.
This increased adoption can be promoted by including obvious and easily implementable reproducibility methods right in the layout of the project.
Workflow managers, like Nextflow @ditommasoNextflowEnablesReproducible2017, Snakemake @molderSustainableDataAnalysis2021, and the Common Workflow Language (CWL) @crusoeMethodsIncludedStandardizing2022 are key tools to enable reproducibility.
They allow a researcher to describe in detail the workflow used, from input files to final output, offloading the burden of execution to the workflow manager.
This allows greater transparency in the methodology used, and even makes reproducibility a possibility in more complex data analysis scenarios.
Additionally, some workflow managers are structured to promote reusability of the analysis code, even on different architectures or in high performance computing environments @crusoeMethodsIncludedStandardizing2022.
We can conclude this section by stating the fourth and last principle: *be (easily) reproducible*.
== Kerblam!
We have designed a very simple but powerful and flexible project layout together with a project management tool aiming at upholding the principles outlined in the previous section.
We named this tool "Kerblam!".
#subpar.super(
{
grid(
[#figure(image("resources/images/layout.svg"), caption: []) <fig:kerblam:layout>],
[#figure(image("resources/images/cow.svg"), caption: []) <fig:kerblam:datatypes>],
columns: (1fr, 1fr)
)
[#figure(image("resources/images/flow.svg"), caption: []) <fig:kerblam:flow>]
v(3mm)
},
caption: [Salient concepts implemented by Kerblam!
(a): Basic skeleton of the proposed folder layout for a generic data analysis project associated with relevant Kerblam! commands. Folders are depicted in blue, while files are depicted in red. (b): Data is qualitatively divided into input, output, and temporary data. Input data can be further divided into input data remotely available (i.e., downloadable) and local-only data. The latter is "precious", as it cannot be easily recreated. Other types of data are "fragile", as they may be created again _on the fly_. (c): Overview of a generic Kerblam! workflow.
],
label: <fig:kerblam>
)
Kerblam! is a command-line tool written in Rust that incentivizes researchers to use a common, standardized filesystem structure, adopt containerization technologies to perform data analysis, leverage remote file storage when possible, and create and publish readily executable container images to the public to re-run pipelines for reproducibility purposes (see @fig:kerblam:flow).
These aims try to allow and promote the principles described above.
The most basic skeleton of the project layout implemented by Kerblam! can be seen in @fig:kerblam:layout.
The `kerblam.toml` file contains configuration information for Kerblam! and marks the folder as a Kerblam-managed project.
Kerblam! provides a number of utility features _out of the box_ on projects that adapt to the layout presented in @fig:kerblam:layout, or any other project structure after proper configuration.
=== Data management
Kerblam! can be used to manage a project's data.
Kerblam! automatically distinguishes between input, output, and intermediate data, based on which folder the data files are saved in: the `data` folder contains intermediate data produced during the execution of the workflows, the `data/in` contains input data, and similarly `data/out` contains output data.
Furthermore, the user can define in the `kerblam.toml` configuration which input data files can be fetched remotely, and from which endpoint.
This allows Kerblam! to both fetch these files upon request (`kerblam fetch`) and to distinguish between remotely available input files and local-only files.
Local-only files are deemed "precious", as they cannot be easily recreated.
All other data files are "fragile", as they can be deleted without repercussion to save disk space (@fig:kerblam:datatypes).
// Sure "fragile" is a good term?
These distinctions between data types enable further functions of Kerblam!.
`kerblam data` shows the number and size of files of all types, to quickly check how much disk space is being used by the project.
Fragile data can be deleted to save disk space with `kerblam data clean` and precious input data can be exported easily with `kerblam data pack`.
`kerblam data pack` can also be used to export output data quickly to be shared with, for example, colleagues.
Allowing Kerblam! to manage the project's data with these tools can offload several chores usually done manually by the experimenter.
=== Workflow management
Kerblam! can manage multiple workflows written for any workflow manager.
At its core, Kerblam! can spawn shell subprocesses that then execute the particular workflow manager, potentially one configured by the user.
This allows Kerblam! to manage _other_ workflow managers, making them transparent to the user and with a single access point.
Kerblam! also can act before and after the workflow manager proper to aid in several tasks.
Firstly, Kerblam! can manage workflows in the `src/workflows` folder _as if_ they were written in the root of the project.
It achieves this by moving the workflow files from said folder to the root of the repository _just before_ execution.
This allows for slimmer workflows which do not crowd the root of the repository or conflict with each other, thus being more consistent.
Secondly, it allows the concept of _input data profiles_.
Data profiles are best explained through an example.
Imagine an input file, `input.csv`, containing some data to be analyzed.
The experimenter may wish to test the workflows that they have written with a similar, but---say---smaller `test_input.csv`.
Kerblam! allows the hot-swapping of these files just before execution of the workflow manager through profiles.
By configuring them in the `kerblam.toml` file, the experimenter can execute a workflow manager (with `kerblam run`), specifying a profile:
Kerblam! will then swap these two files just before and just after the execution of the workflow to seamlessly use exactly the same workflow but with different input data, in this case for testing purposes.
Kerblam! supports _out of the box_ GNU `make` as its workflow manager of choice.
Indeed, makefiles can directly be ran through Kerblam! with no further configuration by the user.
Any other workflow manager can be used by writing tiny shell wrappers with the proper invocation command.
The range of workflow managers supported out-of-the-box by Kerblam! may increase in the future.
=== Containerization support
Containers can be managed directly by Kerblam!.
By writing container recipes in `src/dockerfiles`, Kerblam! can automatically execute workflow managers inside the containers, seamlessly mounting data paths and performing other housekeeping tasks before running the container.
As already stated, Kerblam! works "above" workflow managers.
Therefore, the reader might be questioning the usefulness of a containerization wrapper at the level of Kerblam! if the workflow manager of choice already supports it.
This containerization feature is meant to be used when a workflow manager would be inappropriate.
For instance, very small analyses might not warrant the increased development overhead to use tools such as CWL.
Kerblam! allows even shell scripts to be containerized anyway, making even the tiniest analyses reproducible.
With these capabilities, Kerblam! promotes reproducibility and allows even inexperienced users to perform even the simplest analyses in a reproducible way.
=== Pipeline export
Workflows managed by Kerblam! with an available container can be automatically exported in a reproducible package through `kerblam package`.
This creates a preconfigured container image ready to be uploaded to a container registry of choice together with a compressed tarball containing information on how to (automatically) replay the input analysis: the "replay package".
The process automatically strips all unneeded project files, leading to small container images.
The replay package can be inspected manually by a potential examiner, and either re-run manually or through the convenience function `kerblam replay` which recreates the same original project layout, fetches the input container and runs the packaged workflow.
=== The Kerblam! analysis flow
Kerblam! favors a very specific methodology when analyzing data, starting with an empty `git` repository.
First, upload the input data in some remote archive (in theory promoting FAIR-er data).
Then, configure Kerblam! to download the input data, and write code and workflows to analyze it, potentially in isolated containers or with specific workflow management tools.
During development, periodically clean out intermediate and output files to check if the correct execution of the analysis has become dependent on local-only state.
Finally, package the results and pipelines into the respective environments and share them with the wider public (e.g., as a GitHub release or in an archive like Zenodo).
We believe that this methodology is simple yet flexible and robust, allowing for high-quality analyses in a wide variety of scenarios.
=== Availability
Kerblam! is a Free and Open Source Software, available on GitHub at #link("https://github.com/MrHedmad/kerblam")[MrHedmad/kerblam].
Kerblam! is written in Rust and may be compiled to support both GNU/Linux-flavored operating systems and Mac-OS.
Alternatively, GitHub releases provide pre-compiled artifacts for both these operating systems.
Support for Windows machines is untested at the time of writing.
The full documentation to Kerblam! is available at #link("https://kerblam.dev/")[kerblam.dev].
Active support for Kerblam! and its future development are guaranteed for the foreseeable future.
= Conclusions
Structuring data analysis projects is a personal matter, heavily dependent on the preference of the individual(s) who run the analysis.
Nevertheless, best practices arise and can be individuated in this fragmented landscape.
With this work, we aimed at providing such guidelines, and included a robust tool to leverage the regularity of such standardized layout.
As the proposed layout is, for all intents and purposes, largely arbitrary, Kerblam! can be configured to work with any layout.
Through these and potentially future standardization efforts, tools such as containerization and workflow managers can become more mainstream and even routine, leading to an overall more mature and scientifically rigorous way to analyze data of any kind.
/// -----------------------------------------------
= Code and Data availability
The raw data fetched by the analysis of project templates (e.g., list of fetched repositories, detected frequencies, etc.) are available on Zenodo at the following DOI: #link("https://doi.org/10.5281/zenodo.13627214")[10.5281/zenodo.13627214].
The code for the analysis is available on GitHub at #link("https://github.com/MrHedmad/ds_project_structure")[MrHedmad/ds_project_structure] and is archived on Zenodo with DOI: #link("https://zenodo.org/doi/10.5281/zenodo.13627322")[10.5281/zenodo.13627322].
Kerblam! is available on GitHub at #link("https://github.com/MrHedmad/kerblam") and archived at every release in Zenodo at DOI: #link("https://zenodo.org/doi/10.5281/zenodo.10664806")[10.5281/zenodo.10664806].
Its documentation is available at #link("https://kerblam.dev/").
= Author's contributions
Conceptualization: L.V., L.M., and F.A.R.; Software: L.V.; Methodology: L.V. and F.A.R; Resources and Funding Acquisition: L.M.; Writing - Original Draft: L.V., L.M., and F.A.R.; Supervision: L.M. and F.A.R.
/// -----------------------------------------------
/*
#pagebreak(weak: true)
#set heading(numbering: "A.1", supplement: "Appendix")
#counter(heading).update(0)
= Full path frequency table
<ax:freq_table>
#todo[Include here the full table (as a link maybe) to the `result.csv` file of the analysis.]
*/
#pagebreak()
#bibliography("resources/refs.bib", style: "resources/nature-no-superscript-square.csl")
|
|
https://github.com/voXrey/cours-informatique | https://raw.githubusercontent.com/voXrey/cours-informatique/main/typst/06-graphes-pondérés.typ | typst | #import "@preview/codly:0.2.1": *
#show: codly-init.with()
#codly()
#set text(font: "Roboto Serif")
= <NAME> <graphes-pondérés>
== I - Mathématiquement <i---mathématiquement>
== III - Recherche de chemin de poids minimal dans un graphe pondéré <iii---recherche-de-chemin-de-poids-minimal-dans-un-graphe-pondéré>
=== 1. Algorithme de Dijkstra <algorithme-de-dijkstra>
==== Parenthèse : structure de "sac" <parenthèse-structure-de-sac>
On retrouve ces 4 fonctions :
- `create`
- `add`
- `take`
- `is_empty`
```python
def whatever_first_search(G, s):
n = len(G)
visited = tous à faux
bag = create
add s bag
while not is_empty(bag):
u = take bag
if not visited[u]:
visited[u] = True
for each v neighbor:
add v bag
```
==== Description de l'algorithme <description-de-lalgorithme>
```python
def dijsktra(G, s):
n = len(G)
pq = initialise une file de prio avec sommets à prio = +infini
update pq s 0
Tant que pq non vide:
u = extract_min pq
mise à jour de tab_distance[u]
Pour chaque voisin v:
min_d = prio(u) + pond(u,v)
if v in pq && min_d < prio(v):
update pq v min_d
renvoyer le tableau des distances
```
==== Correction de l'algorithme <correction-de-lalgorithme>
#quote(
block: true,
)[
Lemme : Soit s et t deux sommets d'un graphe orienté et pondéré G. Soit c un chemin $s = u_0 , u_1 , dots , u_k = t$ de poids minimal de s à t. Alors quelque soit i, le chemin $s = u_0 , dots , u_i$ est de poids minimal de s à $u_i$.
Preuve : S'il y avait un meilleur chemin de s à $u_i$, alors on obtiendrait un meilleur chemin de s à t.
]
Invariant
- Si prio(u) différent de $+infinity$ alors $exists c : -> u$ de poids prio\(u)
- Si $x in.not p q$ alors pour tout voisin w de x on a prio\(u) $lt.eq$ prio\(x) + pond\(x, w)
- Lorsque u sort de la file, prio\(u) \= $delta lr((s , u))$
Préservation de l'invariant
On suppose u différent de s
On considère c un chemin optimal de s à u.
- Sur ce chemin, on note w le premier du chemin qui est dans la file (existe car u est dans la file)
- Sur ce chemin on note x le prédécesseur de w (existe car s $in.not$ pq donc s différent de w)
$delta lr((s , w)) lt.eq "prio" lr((w))$ par invariant
$x in.not p q$ puisque w est le premier du chemin à être dans pq.
Donc par le 2ème invariant : $"prio" lr((w)) lt.eq "prio" lr((x)) + "pond" lr((x , w))$ où $"prio" lr((x)) = delta lr((s , w))$.
Par le lemme, le préfixe du chemin c de s à w est optimal, donc de poids $delta lr((s , w))$
De même pour x, donc $delta lr((s , w)) = delta lr((s , x)) + "pond" lr((w , x))$
#strong[On met tout ensemble :]
$delta lr((s , w)) lt.eq "prio" lr((w)) lt.eq delta lr((s , w)) + "pond" lr((w , x))$ et $"prio" lr((w)) = delta lr((s , w))$
Et comme $u =$ extract_min(pq).
On a $"prio" u <= "prio"(w) = delta lr((s , w)) = delta lr((s , u)) - "pond" lr((c_2))$
Donc $delta lr((s , u)) lt.eq delta lr((s , u)) - "pond" lr((c_2))$
Donc $"pond" lr((c_2)) = 0$
L'invariant est vérifié.
==== Complexité <complexité>
Avant la boucle : $O lr((n))$ pour initialiser la file de priorité
Boucle while : exécutée exactement une fois par sommet
Extraction du min : $O lr((l o g lr((n))))$
Pour chaque voisin : $O lr((l o g lr((n))))$ à cause de la mise à jour de priorité
#strong[Finalement :]
$
O(n) + sum_(u in V)O(log(n) + sum_(v in "voisins")O(log(n))) &= O(log(n)) + sum_(u in V)O(log(n))d_+(u) \
&= O(n log(n)) + O(m log(n)) \
&= O(log(n)(n+m))
$
==== Conclusion <conclusion>
- L'algorithme donne pour un sommet s : les poids minimaux et plus courts chemins de s à tous les $t in V$.
=== 2. Algorithme de Floyd Warshall <algorithme-de-floyd-warshall>
==== Introduction <introduction>
- On travaille avec la matrice d'adjacence
- On va déterminer tous les plus courts chemins de s à t $forall lr((s , t))$.
==== Première idée - Adaptation du produit matriciel <première-idée---adaptation-du-produit-matriciel>
Essayons d'adapter la méthode des puissances matricielles. On note A la matrice d'adjacence du graphe et suppose :
$A_(i j) = + infinity "si" lr((i , j)) in.not E$
$A_(i j) = "pond"lr((i , j))$
$A_(j j) = 0$
On aimerait que $A_(i j)^k$ donne le poids minimal d'un chemin de longueur au plus k de i à j.
$A_(i j)^k = min_(l = 0)^(n - 1) lr((A_(i l)^(k - 1) + A_(l j)))$
Complexité
En supposant la multiplication matricielle modifiée en $O lr((n ³))$ le calcul de $A^n$ est en $O lr((n ³ l o g lr((n))))$.
==== Description de l'algorithme <description-de-lalgorithme-1>
De manière similaire, on fractionne le problème "aller de i à j en un chemin de poids minimal" en des sous-problèmes "aller de i à j #strong[en utilisant uniquement les sommets \[0, k-1\]] et de poids minimal".
On définit $p m_(i j)^k$ le poids minimal d'un chemin de i à j dont les sommets intermédiaires sont dans $lr([0 , k - 1])$.
$p m_(i j)^0 = A_(i j)$
$p m_(i j)^k = m i n lr(
(p m_(i j)^(lr((k - 1))) , p m_(i , k - 1)^(k - 1) + p m_(k - 1 , j)^(k - 1))
)$
Complexité : $O lr((n^3))$
Pseudo-code : On applique la recette du cours de programmation dynamique
- Création du tableau
- C'est un `int array array`
- Convention : $T . lr((i)) . lr((j)) . lr((k)) = p m_(i j)^k$
- Cas de base : facile
```ocaml
for i
for j
T.(i).(j).(0) <- ...
```
- Remplissage : ne pas se tromper dans l'ordre des boucles
```ocaml
for k = 1 to ...
for i = 0 to ...
for j = 0 to ...
T.(i).(j).(k) <- ...
```
Gain en espace : `T.(i).(j)` : table 2D.
Invariant : $T . lr((i)) . lr((j)) = p m_(i j)^(k - 1)$
|
|
https://github.com/LaPreprint/typst | https://raw.githubusercontent.com/LaPreprint/typst/main/lapreprint.typ | typst | MIT License | #let template(
// The paper's title.
title: "Paper Title",
subtitle: none,
// An array of authors. For each author you can specify a name, orcid, and affiliations.
// affiliations should be content, e.g. "1", which is shown in superscript and should match the affiliations list.
// Everything but but the name is optional.
authors: (),
// This is the affiliations list. Include an id and `name` in each affiliation. These are shown below the authors.
affiliations: (),
// The paper's abstract. Can be omitted if you don't have one.
abstract: none,
// The short-title is shown in the running header
short-title: none,
// The short-citation is shown in the running header, if set to auto it will show the author(s) and the year in APA format.
short-citation: auto,
// The venue is show in the footer
venue: none,
// An image path that is shown in the top right of the page. Can also be content.
logo: none,
// A DOI link, shown in the header on the first page. Should be just the DOI, e.g. `10.10123/123456` ,not a URL
doi: none,
heading-numbering: "1.a.i",
// Show an Open Access badge on the first page, and support open science, default is true, because that is what the default should be.
open-access: true,
// A list of keywords to display after the abstract
keywords: (),
// The "kind" of the content, e.g. "Original Research", this is shown as the title of the margin content on the first page.
kind: none,
// Content to put on the margin of the first page
// Should be a list of dicts with `title` and `content`
margin: (),
paper-size: "us-letter",
// A color for the theme of the document
theme: blue.darken(30%),
// Date published, for example, when you publish your preprint to an archive server.
// To hide the date, set this to `none`. You can also supply a list of dicts with `title` and `date`.
date: datetime.today(),
// Feel free to change this, the font applies to the whole document
font-face: "Noto Sans",
// The path to a bibliography file if you want to cite some external works.
bibliography-file: none,
bibliography-style: "apa",
// The paper's content.
body
) = {
/* Logos */
let orcidSvg = ```<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 24 24"> <path fill="#AECD54" d="M21.8,12c0,5.4-4.4,9.8-9.8,9.8S2.2,17.4,2.2,12S6.6,2.2,12,2.2S21.8,6.6,21.8,12z M8.2,5.8c-0.4,0-0.8,0.3-0.8,0.8s0.3,0.8,0.8,0.8S9,7,9,6.6S8.7,5.8,8.2,5.8z M10.5,15.4h1.2v-6c0,0-0.5,0,1.8,0s3.3,1.4,3.3,3s-1.5,3-3.3,3s-1.9,0-1.9,0H10.5v1.1H9V8.3H7.7v8.2h2.9c0,0-0.3,0,3,0s4.5-2.2,4.5-4.1s-1.2-4.1-4.3-4.1s-3.2,0-3.2,0L10.5,15.4z"/></svg>```.text
let spacer = text(fill: gray)[#h(8pt) | #h(8pt)]
let dates;
if (type(date) == "datetime") {
dates = ((title: "Published", date: date),)
}else if (type(date) == "dictionary") {
dates = (date,)
} else {
dates = date
}
date = dates.at(0).date
// Create a short-citation, e.g. Cockett et al., 2023
let year = if (date != none) { ", " + date.display("[year]") }
if (short-citation == auto and authors.len() == 1) {
short-citation = authors.at(0).name.split(" ").last() + year
} else if (short-citation == auto and authors.len() == 2) {
short-citation = authors.at(0).name.split(" ").last() + " & " + authors.at(1).name.split(" ").last() + year
} else if (short-citation == auto and authors.len() > 2) {
short-citation = authors.at(0).name.split(" ").last() + " " + emph("et al.") + year
}
// Set document metadata.
set document(title: title, author: authors.map(author => author.name))
show link: it => [#text(fill: theme)[#it]]
show ref: it => [#text(fill: theme)[#it]]
set page(
paper-size,
margin: (left: 25%),
header: locate(loc => {
if(loc.page() == 1) {
let headers = (
if (open-access) {smallcaps[Open Access]},
if (doi != none) { link("https://doi.org/" + doi, "https://doi.org/" + doi)}
)
return align(left, text(size: 8pt, fill: gray, headers.filter(header => header != none).join(spacer)))
} else {
return align(right, text(size: 8pt, fill: gray.darken(50%),
(short-title, short-citation).join(spacer)
))
}
}),
footer: block(
width: 100%,
stroke: (top: 1pt + gray),
inset: (top: 8pt, right: 2pt),
[
#grid(columns: (75%, 25%),
align(left, text(size: 9pt, fill: gray.darken(50%),
(
if(venue != none) {emph(venue)},
if(date != none) {date.display("[month repr:long] [day], [year]")}
).filter(t => t != none).join(spacer)
)),
align(right)[
#text(
size: 9pt, fill: gray.darken(50%)
)[
#counter(page).display() of #locate((loc) => {counter(page).final(loc).first()})
]
]
)
]
)
)
// Set the body font.
set text(font: font-face, size: 10pt)
// Configure equation numbering and spacing.
set math.equation(numbering: "(1)")
show math.equation: set block(spacing: 1em)
// Configure lists.
set enum(indent: 10pt, body-indent: 9pt)
set list(indent: 10pt, body-indent: 9pt)
// Configure headings.
set heading(numbering: heading-numbering)
show heading: it => locate(loc => {
// Find out the final number of the heading counter.
let levels = counter(heading).at(loc)
set text(10pt, weight: 400)
if it.level == 1 [
// First-level headings are centered smallcaps.
// We don't want to number of the acknowledgment section.
#let is-ack = it.body in ([Acknowledgment], [Acknowledgement])
// #set align(center)
#set text(if is-ack { 10pt } else { 12pt })
#show: smallcaps
#v(20pt, weak: true)
#if it.numbering != none and not is-ack {
numbering(heading-numbering, ..levels)
[.]
h(7pt, weak: true)
}
#it.body
#v(13.75pt, weak: true)
] else if it.level == 2 [
// Second-level headings are run-ins.
#set par(first-line-indent: 0pt)
#set text(style: "italic")
#v(10pt, weak: true)
#if it.numbering != none {
numbering(heading-numbering, ..levels)
[.]
h(7pt, weak: true)
}
#it.body
#v(10pt, weak: true)
] else [
// Third level headings are run-ins too, but different.
#if it.level == 3 {
numbering(heading-numbering, ..levels)
[. ]
}
_#(it.body):_
]
})
if (logo != none) {
place(
top,
dx: -33%,
float: false,
box(
width: 27%,
{
if (type(logo) == "content") {
logo
} else {
image(logo, width: 100%)
}
},
),
)
}
// Title and subtitle
box(inset: (bottom: 2pt), text(17pt, weight: "bold", fill: theme, title))
if subtitle != none {
parbreak()
box(text(14pt, fill: gray.darken(30%), subtitle))
}
// Authors and affiliations
if authors.len() > 0 {
box(inset: (y: 10pt), {
authors.map(author => {
text(11pt, weight: "semibold", author.name)
h(1pt)
if "affiliations" in author {
super(author.affiliations)
}
if "orcid" in author {
link("https://orcid.org/" + author.orcid)[#box(height: 1.1em, baseline: 13.5%)[#image.decode(orcidSvg)]]
}
}).join(", ", last: ", and ")
})
}
if affiliations.len() > 0 {
box(inset: (bottom: 10pt), {
affiliations.map(affiliation => {
super(affiliation.id)
h(1pt)
affiliation.name
}).join(", ")
})
}
place(
left + bottom,
dx: -33%,
dy: -10pt,
box(width: 27%, {
if (kind != none) {
show par: set block(spacing: 0em)
text(11pt, fill: theme, weight: "semibold", smallcaps(kind))
parbreak()
}
if (dates != none) {
let formatted-dates
grid(columns: (40%, 60%), gutter: 7pt,
..dates.zip(range(dates.len())).map((formatted-dates) => {
let d = formatted-dates.at(0);
let i = formatted-dates.at(1);
let weight = "light"
if (i == 0) {
weight = "bold"
}
return (
text(size: 7pt, fill: theme, weight: weight, d.title),
text(size: 7pt, d.date.display("[month repr:short] [day], [year]"))
)
}).flatten()
)
}
v(2em)
grid(columns: 1, gutter: 2em, ..margin.map(side => {
text(size: 7pt, {
if ("title" in side) {
text(fill: theme, weight: "bold", side.title)
[\ ]
}
set enum(indent: 0.1em, body-indent: 0.25em)
set list(indent: 0.1em, body-indent: 0.25em)
side.content
})
}))
}),
)
let abstracts
if (type(abstract) == "content") {
abstracts = (title: "Abstract", content: abstract)
} else {
abstracts = abstract
}
box(inset: (top: 16pt, bottom: 16pt), stroke: (top: 1pt + gray, bottom: 1pt + gray), {
abstracts.map(abs => {
set par(justify: true)
text(fill: theme, weight: "semibold", size: 9pt, abs.title)
parbreak()
abs.content
}).join(parbreak())
})
if (keywords.len() > 0) {
parbreak()
text(size: 9pt, {
text(fill: theme, weight: "semibold", "Keywords")
h(8pt)
keywords.join(", ")
})
}
v(10pt)
show par: set block(spacing: 1.5em)
// Display the paper's contents.
body
if (bibliography-file != none) {
show bibliography: set text(8pt)
bibliography(bibliography-file, title: text(10pt, "References"), style: bibliography-style)
}
}
|
https://github.com/yhtq/Notes | https://raw.githubusercontent.com/yhtq/Notes/main/经济学原理/hw/hw7.typ | typst | #import "../../template.typ": *
#show: note.with(
title: "作业7",
author: "YHTQ ",
date: none,
logo: none,
withOutlined: false
)
= C
吉芬品即收入效应(负)大于替代效应(正)的商品
= D
热狗消费量随收入增加先增加后下降
= D
$Q(t K, t L) = t^(1.2)Q(K, L)$
=
可能。假设食物、衣服和第三种商品的效用函数为:
$
u = x + y + 100 (min(z, 2)-2)
$
它是关于每个分量单调的凸函数,因此偏好是良态的。
三种商品的价格都为 $1$
- 显然,当可用收入小于等于 $2$ 时,最优选择是全部投入食物和衣服
- 可用收入超过 $2$ 时,最优选择是全部投入第三种商品
因此假如助教的收入恰为 $2$,那么收入提高时购买的食物和衣服数量都会降低。
//= 讨论题
// (1)
// $
// "MPK"(lambda K, lambda L) = (delta f(lambda K, lambda L))/(delta (lambda K)) = lambda (delta f(K, L))/(delta (K)) (delta(K))/(delta lambda K) = (delta f(K, L))/(delta (K))
// $
// 另一个同理
// (2)
// |
|
https://github.com/pawarherschel/typst | https://raw.githubusercontent.com/pawarherschel/typst/main/src/msg.typ | typst | Apache License 2.0 | #import "template/template.typ": *
#show: layout
#set text(size: 12pt) //set global font size
#let dateSeparator = "/"
#let date = {[#datetime.today().day()] + dateSeparator + [#datetime.today().month()] + dateSeparator + [#datetime.today().year()]}
#let companyName = "Zelis"
#let jobTitle = "RUST Programmer - Data Processing & Back End Development"
#let hiringManager = "<NAME>"
#letterHeader(
myAddress: [Kharghar, \ Navi Mumbai, \ Maharashtra, \ India - 410210],
recipientName: [#companyName],
recipientAddress: [],
date: [#date],
subject: "Subject: Request for Internship Opportunity as a " + jobTitle,
)
Dear #hiringManager,
I recently came across your job posting for the position of #jobTitle at #companyName, and I am writing to express my sincere interest in joining your team.
Although I understand that the job description requires 3–5 years of professional programming experience, including 1–2 years of Rust, I believe that my passion for programming and foundational knowledge in Rust could make me a potential asset for your organization.
Even though I lack the necessary number of years of work experience, I've been focusing on honing my Rust skills by developing my own projects for the past year.
Currently, I am working on a personal project which requires me to develop a data processing pipeline using Rust.
The project's temporary name is `VRCX Insights`.
Specifically, the project focuses on extracting data from an SQLite3 database, processing it to derive meaningful insights, and implementing algorithms to identify potential friend circles within the dataset.
Another personal project involves automating data extraction from a website before REST APIs were readily available for the website.
In this project, I developed a web scraping tool to request data from the target website, parsing through the HTML to extract all relevant information.
I did the proof of concept using python first, but then migrated it to Rust.
In both these projects I used Rayon for parallelizing the workload, but in the `VRCX Insights` project I had to move away from Rayon since I was encountering deadlocks and then rewrote the entire main loop using Tokio with the multithreading runtime.
I believe these endeavors have not only deepened my understanding of Rust but also provided me with practical experience in designing efficient data processing pipelines, which I believe is a crucial skill at Zelis.
While I may not meet the requirements outlined in the job description, I am eager to demonstrate my capabilities and commitment through an internship opportunity at Zelis.
I am confident that with guidance and mentorship, I can quickly adapt and contribute effectively to your team.
I would like to talk more about how my skills and experience match what your team needs.
Furthermore, I would like to ask for scheduling a brief interview at your earliest convenience.
I am flexible and willing to accommodate to your schedule.
Thank you for reading my application. I am enthusiastic about the possibility of contributing to Zelis and look forward to the opportunity to discuss how I can contribute to your team.
Sincerely, \
// #letterSignature("/src/signature.png")
<NAME>
#letterFooter()
|
https://github.com/antonWetzel/Masterarbeit | https://raw.githubusercontent.com/antonWetzel/Masterarbeit/main/arbeit/setup.typ | typst | #import "../packages/todo.typ": *
#import "../packages/placeholder.typ": *
#import "../packages/subfigure.typ": *
#import "@preview/cetz:0.2.0" as cetz
#import "@preview/tablex:0.0.8": tablex, colspanx, rowspanx
#import "@preview/wordometer:0.1.0": word-count, total-words
#let black = cmyk(0%, 0%, 0%, 100%)
#let setup(document, print: false) = {
set text(font: "Noto Serif")
// set text(font: "Noto Sans")
let setup_print(document) = {
set text(fill: black)
set table(stroke: black)
set rect(stroke: black)
set line(stroke: black)
show link: it => text(fill: black, it)
document
}
let setup_digital(document) = {
show link: it => text(fill: eastern, it)
show ref: it => text(fill: eastern, it)
document
}
let rule = if print {
setup_print
} else {
setup_digital
}
show: rule
set text(lang: "de", region: "DE", size: 11pt, weight: 400, fallback: false)
show math.equation: set text(font: "Noto Sans Math", weight: 600, fallback: false)
set par(justify: not sys.inputs.at("spellcheck", default: false))
set page(margin: 3cm)
set heading(numbering: (..nums) => {
let nums = nums.pos()
if nums.len() >= 4 {
return none
}
return numbering("1.1", ..nums)
})
show raw: it => text(size: 1.2em, it)
show heading: it => block({
if it.numbering != none and it.level < 4 {
counter(heading).display()
h(13pt)
}
it.body
})
show figure: it => {
v(1em)
align(center, box({
align(center + horizon, it.body)
align(center + horizon, {
box({
set align(left)
set par(hanging-indent: 0.5cm, justify: true)
pad(left: 0.5cm, right: 1cm, it.caption)
})
})
}))
v(1em)
}
show heading.where(level: 1): it => {
pagebreak(weak: true)
it
}
show heading.where(level: 2): it => pad(
top: 0.1cm,
bottom: 0.2cm,
it,
)
set bibliography(style: "chicago-author-date")
document
}
#let style-outline(doc) = {
show outline.entry: it => {
let elem(it, off) = {
v(1.1em, weak: true)
h((it.level - 1 - off) * 2em, weak: false)
if it.body.has("children") {
it.body.children.at(0)
h(10pt)
for x in it.body.children.slice(2) {
x
}
} else {
it.body
}
box(width: 1fr, it.fill)
it.page
};
if it.level == 1 {
v(2.5em, weak: true)
strong(elem(it, 0))
} else {
v(1.15em, weak: true)
elem(it, 1)
}
}
doc
}
#let link-footnote(_link, _body) = {
link(_link, _body)
footnote(raw(_link))
}
#let number = (number, unit: none) => {
let number = str(float(number))
let split = number.split(".")
let res = []
{
let text = split.at(0)
for i in range(text.len()) {
res += text.at(i)
let idx = text.len() - i - 1
if idx != 0 and calc.rem(idx, 3) == 0 {
res += sym.space.thin
}
}
}
if split.len() >= 2 {
res += $,$
let text = split.at(1)
for i in range(text.len()) {
res += text.at(i)
if i != 0 and calc.rem(i, 3) == 0 {
res += sym.space.thin
}
}
}
if unit != none {
res += [ ] + unit
}
return box(res)
}
|
|
https://github.com/seapat/markup-resume-lib | https://raw.githubusercontent.com/seapat/markup-resume-lib/main/utils.typ | typst | Apache License 2.0 | #let parse_isodate(isodate) = {
let date = ""
date = datetime(
year: int(isodate.slice(0, 4)),
month: int(isodate.slice(5, 7)),
day: int(isodate.slice(8, 10)),
)
date
}
#let capitalize(string) = [
#upper(string.at(0))#string.slice(1)
]
#let format_date(entry, date_format, render_settings) = {
// cases:
// if start and end -> set duration
// if start but no end -> until present
// if end but no start -> single date
// release is a shorter endDate, for semantics
let start = if "startDate" in entry.keys() [#parse_isodate(entry.startDate).display(date_format)]
let end = if "endDate" in entry.keys() [#parse_isodate(entry.endDate).display(date_format)] else if start != none { if "present_string" in render_settings.keys() {render_settings.present_string} else {"Present"} }
let release = if "releaseDate" in entry.keys() [#parse_isodate(entry.releaseDate).display(date_format)]
// return value, NOTE: if we put everything in the []-block, newline above would be returned as well
if start != none [#start #sym.dash.en #end] else if end != none {end} else {release}
}
|
https://github.com/BoostCookie/systemd-tshirtd | https://raw.githubusercontent.com/BoostCookie/systemd-tshirtd/main/src/common.origlogo.typ | typst | //#set page(fill: rgb(0%, 0%, 0%, 0%))
//#set page(fill: rgb("#201a26"))
#set page(width: 252mm, height: auto, margin: (top: 0em, right: 0em, bottom: 2em, left: 0em))
#set align(center)
#set text(font: "Roboto", fill: white, weight: "bold")
#let logo_height = 9.6em
#image("./systemd-dark-logo.svg", height: logo_height)
#v(-4.5em)
#text(size: 9em)[systemd-tshirtd]
|
|
https://github.com/pank-su/report_3 | https://raw.githubusercontent.com/pank-su/report_3/master/templates/index.typ | typst | #import "titlepage.typ": titlepage
#import "toc.typ": toc
#import "escd.typ": basicFrame
#let index(
authors: (),
title: "",
body,
) = {
set document(author: authors, title: title)
// Структура
// Титульный лист
// titlepage(title: title,
// authors: authors,
// position: "Преподаватель",
// education: "МИНИСТЕРСТВО НАУКИ И ВЫСШЕГО ОБРАЗОВАНИЯ РОССИЙСКОЙ ФЕДЕРАЦИИ\nфедеральное государственное автономное образовательное учреждение высшего образования\n«Санкт-Петербургский государственный университет аэрокосмического приборостроения»", group: "С021к", documentName: "ПОЯСНИТЕЛЬНАЯ ЗАПИСКА К КУРСОВОМУ ПРОЕКТУ", teachers: ("<NAME>",), city: "Санкт-Петербург", discipline: "по дисциплине: МДК.03.01 Разарботка мобильных приложений", department: "ФАКУЛЬТЕТ СРЕДНЕГО ПРОФЕССИОНАЛЬНОГО ОБРАЗОВАНИЯ")
// pagebreak()
// Содержание
toc()
// pagebreak()
// Настройки страниц.
set text(font: "Times New Roman", size: 14pt, lang: "ru", region: "ru", hyphenate: true)
set heading(numbering: "1.1")
set page(margin: (left: 30mm, right: 20mm, top: 20mm, bottom: 25mm), background: basicFrame())
set par(justify: true, leading: 1.2em)
// show heading: set block(below: 16pt, above: 32pt)
show table: set text(hyphenate: true)
show table: set par(justify: false, leading: 0.3em, first-line-indent: 0em)
show grid: set text(hyphenate: true)
show grid: set par(justify: false, leading: 0.3em, first-line-indent: 0em)
show figure.where(kind: image): set figure(supplement: "Рисунок")
show figure.where(kind: table): set figure(supplement: "Таблица")
//set figure(supplement: "Рисунок")
show figure.where(kind: table): set figure.caption(position: top)
show figure.where(kind: table): it => [#align(left, [#it.caption #it.body \ ])]
set figure.caption(separator: [ -- ])
show figure.where(kind: raw): it => [#align(left, [#it.caption #it.body \ ])]
let indent = 1.25cm
set list(marker: [---], indent: indent, tight: true)
show list: it => {
it
par(text(size:0.1em, h(0.0em)))
}
set par(first-line-indent: indent)
show heading: set text(size: 14pt, hyphenate: false)
show heading.where(numbering: "1.1"): it => {
stack(dir: ltr, h(indent), it)
par(text(size:0.35em, h(0.0em)))
}
show heading.where(numbering: none): it => {
it
par(text(size:0.45em, h(0.0em)))
}
//show heading.where(level: 1): it => [#pagebreak() #it]
show "| ": it => [
--- #h(5mm)
]
// полу-утилита:
show ref: it =>{
let el = it.element
if el != none and el.func() == figure {
numbering(
el.numbering,
el.counter.at(el.location()).at(0)
)
} else {
it
}
//repr(el.at("body"))
}
body
}
|
|
https://github.com/EricWay1024/typst-cv | https://raw.githubusercontent.com/EricWay1024/typst-cv/master/resume.typ | typst | #import "./templates/resume.template.typ": *
#show: resume.with(
author: "<NAME>",
// location: "Hill Valley, CA",
contacts: (
[#link("mailto:<EMAIL>")[<EMAIL>]],
// [+44 (0)7856737491],
// [+86 18961356233],
),
)
= Education
#edu(
institution: "University of Oxford",
date: "Oct 2023 - Jun 2024",
location: "Oxford, UK",
degree: "MSc",
degree_title: "Mathematical Sciences (Graduated with Distinction)",
details: [
- Overall mark: 79/100
- Rank among cohort: 3rd/58
- Courses: Category Theory, Algebraic Topology, Homological Algebra, Numerical Linear Algebra, Computational Algebraic Topology, Representation Theory of Semisimple Lie Algebras ]
)
#edu(
institution: "University of Nottingham",
location: "Nottingham, UK",
date: "Sep 2021 - Jun 2023",
)
#v(-20pt)
#edu(
institution: "University of Nottingham Ningbo China",
location: "Ningbo, China",
date: "Sep 2019 - Jun 2021",
degree: "BSc Hons",
degree_title: "Mathematics with Applied Mathematics (Joint programme) (First Class)",
details: [
- Overall mark: 92/100
- Key Courses: Group Theory, Metric and Topological Spaces, Linear Analysis, Scientific Computation and Numerical Analysis, Discrete Mathematics and Graph Theory, Advanced Algorithms and Data Structures
// - Mathematics Group Projects: Prime Number Theorem, Galois Theory.
]
)
// #edu(
// institution: "University of Colombia",
// date: "Aug 1948",
// )
= Mathematical Writings
#project(
title: [Synthetic Homotopy Theory | #link("https://arxiv.org/abs/2409.15693")[arXiv:2409.15693]],
kind: "MSc dissertation",
date: "Apr 2024",
details: [Supervised by <NAME>. Based on Martin-Löf’s dependent type theory and homotopy type theory (HoTT). Presented a synthetic proof of the Blakers--Massey theorem. Mark: 90/100. Awarded the Dissertation Prize by Oxford Mathematics.]
)
#project(
title: [Discrete Morse Theory for Persistent Homology for Sequences of Cosheaves | #link("https://github.com/EricWay1024/Computational-AT-mini-project/releases/download/0.1/main.pdf")[pdf]],
kind: "Mini-project for MSc course Computational Algebraic Topology",
date: "Apr 2024",
details: [Focused on filtrations of a cosheaf over a finite simplicial complex and using Morse chain complex to compute the persistent homology.
Mark: 82/100.
Included by the Past Projects Archive of Oxford Mathematics.
]
)
#project(
title:[Homological Algebra | #link("https://github.com/EricWay1024/Homological-Algebra-Notes/releases/download/v1.44/hom-alg-notes-color.pdf")[pdf]],
kind: "Revised lecture notes for MSc course Homological Algebra",
date: "Jan 2024",
details: [
Main results include derived functors, the balancing of $"Ext"$ and $"Tor"$, Koszul (co)homology and group (co)homology.
]
)
#project(
title: [Where’s <NAME>? | #link("https://arxiv.org/abs/2410.02422")[arXiv:2410.02422]],
kind: "Undergraduate research project",
date: "Sep 2023",
details: [Joint work with <NAME> and <NAME>. Developed a 2D optimisation benchmark with 957,174 local optima based on Great Britain terrain data. Submitted to _Mathematical Programming Computation_ (under review).]
)
#project(
title: [Galois Theory | #link("https://github.com/EricWay1024/Galois-Theory-Group-Project/releases/download/v1.0.0/Galois_Theory.pdf")[pdf]],
kind: "Undergraduate group project",
date: "Mar 2023",
details: [
Personal contributions include sections on splitting fields, Galois groups, Galois extensions, Galois correspondence, solvable groups and the insolvability of quintic equations. Mark: 70/100.
]
)
= Seminars Attended
#project(
title: "7th International Conference on Applied Category Theory & 40th Conference on Mathematical Foundations of Programming Semantics",
kind: "University of Oxford, Oxford, UK",
date: "17-21 Jun 2024"
)
#project(
title: "Twelfth Symposium on Compositional Structures (SYCO 12)",
kind: "University of Birmingham, Birmingham, UK",
date: "15-16 Apr 2024"
)
#project(
title: "4th Southern and Midlands Logic Seminar",
kind: "University of Birmingham, Birmingham, UK",
date: "13 Dec 2023"
)
#pagebreak()
= Working Experience
#work(
role: "Tutor in Competitive Programming",
company: "Kuaimajiabian (SmartCoder) Information Technology (Lianyungang) Co. Ltd.",
location: "Lianyungang, China",
date: "Sep 2024 - Present",
// details: [
// ],
)
#work(
role: "Chief Technology Officer, Full-Stack Web Application Developer, Data Analyst",
company: "Ningbo Xiaocheng Information Technology Co., Ltd.",
location: "Ningbo, China",
date: "Oct 2019 - Jun 2022",
details: [
Presided a student developer team.
Initiated the development of new features of a timetabling and course-rating web app based on React and Node.js with 3000+ daily active users.
],
)
// #work(
// company: "Zhiyuan Education Co., Ltd.",
// role: "Tutor in Competitive Programming",
// location: "Lianyungang, China",
// date: "Jun 2021 - Aug 2021",
// details: [
// Delivered lectures on dynamic programming and mathematics-based algorithms to competitive programming participants. Prepared lecture notes with C++ solutions and held regular tests.
// ]
// )
#work(
role: "Data Analyst",
company: "DiDi Chuxing Technology Co.",
location: "Beijing, China",
date: "Jun 2020 - Sep 2020",
details: [ Performed data manipulation tasks with HiveSQL databases. Penned and translated weekly data reports. Created and maintained interactive data visualisation dashboards in Tableau. ]
)
// #pagebreak()
= Software Projects
#project(
title: [#link("https://github.com/CardiacModelling/BenNevisBenchmark")[BenNevisBenchmark]],
kind: "Python module",
date: "Aug 2022",
details: [
Related to writing "Where's Ben Nevis". A benchmark framework for optimisation algorithms with data visualisation and animation methods.
]
)
#project(
title: link("https://github.com/EricWay1024/nott-course")[Nott Course],
kind: "Web application",
date: "Jun 2022",
details: [An unofficial enhancement of the University of Nottingham course catalogue,written with React and C++.],
)
= Awards
For academic performance:
- *Departmental Dissertation Prize* _(Jul 2024)_, awarded by Mathematical Institute, University of Oxford;
- *Second Martin Pluck G100 Prize* _(Jul 2023)_ and *The Harold Farnsworth OBE Prize* _(Oct 2022)_, awarded by Mathematical Sciences, University of Nottingham;
- *President's Scholarship* and *Best Performer of the Year* _(Dec 2021 and Dec 2020)_, both awarded twice by University of Nottingham Ningbo China;
- *Zhejiang Government Scholarship* _(Dec 2020)_, awarded by Zhejiang Provincial Government of China.
For competitive programming:
- *Honourable Mention* _(Nov 2022 and Nov 2021)_, awarded by ICPC Northwestern Europe Regional Contest;
- *Honourable Mention* _(Dec 2020 and Apr 2021)_, awarded by the 45th ICPC Asia Regional Contest.
// \datedsubsection{\textbf{Second Martin Pluck G100 Prize}}{07/2023}
// Awarded by Mathematical Sciences, University of Nottingham
// \datedsubsection{\textbf{The Harold Farnsworth OBE Prize} }{10/2022}
// Awarded by Mathematical Sciences, University of Nottingham
// \datedsubsection{\textbf{President's Scholarship} and \textbf{Best Performer of the Year}}{12/2021 \& 12/2020}
// Awarded by University of Nottingham Ningbo China
// \datedsubsection{\textbf{Honorable Mention}}{11/2022 \& 11/2021}
// Awarded by ICPC Northwestern Europe Regional Contest
// \datedsubsection{\textbf{Honorable Mention}}{05/2022 \& 05/2021}
// Awarded by Mathematical Contest in Modeling
// \datedsubsection{\textbf{Honorable Mention}}{12/2020 \& 04/2021}
// Awarded by the 45th ICPC Asia Regional Contest
// \datedsubsection{\textbf{Zhejiang Government Scholarship}}{12/2020}
// Awarded by Zhejiang Provincial Government of China
// \datedsubsection{\textbf{Provincial First Class Honour}}{11/2018}
// Awarded by National Olympiad in Informatics in Provinces (NOIP) of China
= Skills
#skills((
("Programming", (
[Python, C++, JavaScript],
[Full-stack web development],
[Competitive programming],
[Functional programming],
[Linux command line],
[Git version control],
)),
("Data analysis", (
[Python, MATLAB, R],
[MySQL, MongoDB],
[Tableau]
)),
("Typesetting", (
[LaTeX, Typst],
)),
("Languages", (
[English (fluent, IELTS overall 8.5 with speaking 8)],
[Mandarin Chinese (native)],
[Cantonese (conversational)],
[French (elementary)],
)),
))
|
|
https://github.com/SnowManKeepsOnForgeting/NoteofModernControlTheory | https://raw.githubusercontent.com/SnowManKeepsOnForgeting/NoteofModernControlTheory/main/Homework/Homework_1.typ | typst | #import "@preview/physica:0.9.3": *
#import "@preview/i-figured:0.2.4"
+ Given a circuit as shown in the figure below,let input be voltage $u_1$ and output be voltage $u_2$.Try to select state variables and write down the state space representation of the system.
#figure(
image("pic/电路图.svg",width: 50%),
caption: [Circuit diagram]
)
*Solution*:Let we choose $u_(c 1),u_(c 2)$ voltage across capacitors C1 and C2 as state variables.
We have:
$
cases(
u_(c 1) + i R_2 + u_(c 2) = u_1\
i R_2 + u_(c 2) = u_2\
C_1 dv(u_(c 1),t) + u_(c 1)/R_1 = i\
C_2 dv(u_(c 2),t) = i
)
$
Substituting $x_1 = u_(c 1),x_2 = u_(c 2)$ and reformulating the above equations,we get:
$
cases(
accent(x_1,dot) = (u_1 - x_1 - x_2)/(C_1 R_2) - x_1/(C_1 R_1)\
accent(x_2,dot) = (u_1 - x_1 - x_2)/(C_2 R_2)
)
$
Let input $u_1 = u$ and Rewrite it as vector form:
$
vec(delim: "[",accent(x_1,dot),accent(x_2,dot)) = mat(delim: "[",- (R_1 + R_2)/(C_1 R_1 R_2),-1/(C_1 R_2); -1/(C_2 R_2),-1/(C_2 R_2)) vec(delim: "[",x_1,x_2) + vec(delim: "[",1/(C_1 R_2),1/(C_2 R_2)) u
$
Let output $u_2 = y$.The output equation is:
$
y = [-1,0]vec(delim: "[",x_1,x_2) + u
$
Therefore,the state space representation of the system is:
$
cases(
vec(delim: "[",accent(x_1,dot),accent(x_2,dot)) = mat(delim: "[",- (R_1 + R_2)/(C_1 R_1 R_2),-1/(C_1 R_2); -1/(C_2 R_2),-1/(C_2 R_2)) vec(delim: "[",x_1,x_2) + vec(delim: "[",1/(C_1 R_2),1/(C_2 R_2)) u\
y = [-1,0]vec(delim: "[",x_1,x_2) + u
)
$
+ Given a spring-damper system as shown in the figure below,try to establish the state space representation of the system.
#figure(
image("pic/弹簧阻尼系统.svg",width: 40%),
caption: [Spring-damper system]
)
*Solution*:Take downward direction as positive,apply force analysis to the system,we have:
$
cases(
M_1 a_1 = -k x_1 - b_1 (v_1 - v_2)\
M_2 a_2 = f(t)-b_2 v_2 + b_1 (v_1 - v_2)
)
$
where $x_1,x_2$ stands for the displacement of mass $M_1,M_2$,$v_1,v_2$ stands for the velocity of mass $M_1,M_2$ and $a_1,a_2$ stands for the acceleration of mass $M_1,M_2$.
We choose $x_1 = x_1("displacement of mass"),x_2 = x_2("displacement of mass"),x_3 = v_1,x_4 = v_2$as state variables and $y_1 = x_1,y_2 = x_2$ as output variables.Then we have:
$
cases(
accent(x_1,dot) = x_3\
accent(x_2,dot) = x_4\
accent(x_3,dot) = -k/M_1 x_1 - b_1/M_1 (x_3 - x_4)\
accent(x_4,dot) = f(t)/M_2 - b_2/M_2 x_4 + b_1/M_2 (x_3 - x_4)
)\
cases(
y_1 = x_1\
y_2 = x_2
)
$
Let input $f(t) = u(t)$ and rewrite it as vector form:
$
cases(
vec(delim: "[",accent(x_1,dot),accent(x_2,dot),accent(x_3,dot),accent(x_4,dot)) = mat(delim: "[",0,0,1,0;0,0,0,1;-k/M_1,0,-b_1/M_1,b_1/M_1;0,0,b_1/M_2,-(b_2+b_1)/M_2) vec(delim: "[",x_1,x_2,x_3,x_4) + vec(delim: "[",0,0,0,1/M_2) u(t)\
vec(delim: "[",y_1,y_2) = mat(delim: "[",1,0,0,0;0,1,0,0) vec(delim: "[",x_1,x_2,x_3,x_4)
)
$
+ Given a state space representation,try to find the transfer function of the system.
$
cases(
bold(accent(x,dot)) = mat(delim: "[",-5,-1;3,-1)bold(x) + mat(delim: "[",2;5)u\
y = [1,2]bold(x) + 4 u
)
$
*Solution*:
$
g(s) = Y(s)/U(s) &= bold(C)(s bold(I)- bold(A))^(-1) bold(B) + D\
&= [1,2] mat(delim: "[",s+5,1;-3,s+1)^(-1) mat(delim: "[",2;5) + 4\
&= (12 s + 59)/(s^2 + 6 s + 8) + 4
$
+ Given a state space representation,try to find the transfer function of the system.
$
cases(
vec(delim: "[",accent(x_1,dot),accent(x_2,dot),accent(x_3,dot)) = mat(delim: "[",-2,1,0;0,-3,0;0,1,-4) vec(delim: "[",x_1,x_2,x_3) + mat(delim: "[",-1,-1;1,4;2,-3) vec(delim: "[",u_1,u_2)\
vec(delim: "[",y_1,y_2) = mat(delim: "[",1,1,1;-2,-1,0) vec(delim: "[",x_1,x_2,x_3)
)
$
*Solution*:
$
bold(G)(s) = (bold(Y)(s))/(bold(U)(s)) &= bold(C)(s bold(I)- bold(A))^(-1) bold(B) + bold(D)\
&= mat(delim: "[",1,1,1;-2,-1,0) mat(delim: "[",s+2,-1,0;0,s+3,0;0,-1,s+4)^(-1) mat(delim: "[",-1,-1;1,4;2,-3)\
&= mat(delim: "[",
1,1,1;
-2,-1,0
)
mat(delim: "[",
1/(s+2),1/((s+2)(s+3)),0;
0,1/(s+3),0;
0,1/((s+3)(s+4)),1/(s+4);
)
mat(delim: "[",
-1,-1;
1,4;
2,-3
)\
&=mat(delim: "[",
(2s+7)/((s+3)(s+4)),(10s+26)/((s+2)(s+3)(s+4));
1/(s+3),-(2s+10)/((s+2)(s+3))
)
$ |
|
https://github.com/jneug/schule-typst | https://raw.githubusercontent.com/jneug/schule-typst/main/tests/ab/test.typ | typst | MIT License | #import "../../src/schule.typ": ab
#import ab: *
#show: arbeitsblatt.with(
/* @typstyle:off */
titel: "Base template test",
reihe: "TYPST-TEST",
datum: "15.06.2024",
nummer: "1",
fach: "Typst",
kurs: "101",
autor: (
name: "<NAME>",
kuerzel: "Ngb",
),
version: "2024-06-15",
)
#lorem(500)
|
https://github.com/Nrosa01/TFG-2023-2024-UCM | https://raw.githubusercontent.com/Nrosa01/TFG-2023-2024-UCM/main/Memoria%20Typst/capitulos/Contribuciones.typ | typst | En este capítulo se detalla la aportación de cada uno de los alumnos en el desarrollo del proyecto.
== <NAME>
Existen 3 aportaciones principales de Nicolás al proyecto:
- Simulador en C++
- Maquetación inicial del proyecto, implementación de OpenGL, GLFW y IMGUI.
- Implementación de la lógica inicial de la simulación de arena y la interacción con el usuario.
- Refactorización de las partículas básicas para este modelo: Arena, Agua, Ácido, Roca, Aire y Gas para generalizar su comportamiento y facilitar el desarrollo de nuevas partículas.
- Investigación e implementación de un sistema de interacciones entre partículas.
- Optimizaciones y revisiones del código pertinentes para asegurar que la implementación sea un buen referente comparativo para el resto de implementaciones.
- Simulador en Lua con Love2D
- Diseño de la arquitectura del proyecto y la implementación de toda la lógica de la simulación de arena.
- Implementación de un sistema de partículas programables en Lua.
- Diseño e implementación del API para la creación de partículas y sus interacciones.
- Sistema de eventos implementado con Beholder para disminuir la dependencias entre componentes.
- Implementación de un sistema de multithreading para mejorar el rendimiento del simulador.
- Implementación de un algoritmo para encontrar el mayor número de hilos y tamaño de chunk en función del tamaño de la matriz de la simulación y la cantidad de núcleos de la CPU que permita aprovechar la mayor cantidad de hilos simultáneos sin incurrir en condiciones de carrera.
- Sincronización del trabajo entre los hilos aplicando una implementación propia de la técnica "work stealing" en base al uso de canales.
- Implementación de un sistema de doble buffer para evitar condiciones de carrera y asegurar la consistencia de los datos.
- Elaboración de la documentación del API del sistema de partículas.
- Simulador en Rust con Macroquad
- Creación del proyecto y configuración de las dependencias.
- Configuración del proyecto para soportar WebAssembly, aplicando características distintas según el destino de compilación mediante flags de compilación condicional.
- Diseño de la arquitectura del proyecto y la implementación de la lógica de la simulación de arena.
- Implementación de un sistema de comunicación con WebAssembly mediante una cola de comandos global.
- Implementación de un sistema de plugins para facilitar la creación de nuevas partículas.
- Extensión del sistema de plugins mediante la creación de un tipo de plugin que toma como datos un fichero JSON y genera una función a ejecutar para la partícula.
- Diseño e implementación del formato de JSON para definir partículas y sus interacciones.
- Web Vue3 envoltorio del ejecutable de Rust
- Creación de la web usando Vue, Vite y TailwindCSS.
- Diseño en Canva y posterior implementación de la interfaz gráfica de la web.
- Implementación del código JavaScript pegamento que permite comunicar la web y el ejecutable WebAssembly.
- Implementación de un sistema de guardado y cargado de plugins en formato JSON.
- Implementación de un menú de ayuda y un sistema de gestos para facilitar la interacción con la web.
- Implementación de integración continua en GitHub mediante GitHub Actions para automatizar la generación de la web.
- Interactividad de los botones, gestos y otros elementos de la web mediante JavaScript y Vue (variables reactivas, watchers).
- Implementación de Pinia para gestionar un estado global reactivo y minimizar la interdependencia entre componentes.
- Colaboración con Jonathan para integrar Blockly en la web.
- Edición de algunos generadores y definiciones de bloques creados por Jonathan para adaptarlos a las necesidades del proyecto.
- Diseño del logo de la web.
- Otros
- Elaboración del plan de pruebas con usuario.
- Realización de parte de las pruebas de usabilidad con usuario.
- Elaboración de pruebas de rendimiento entre distintos simuladores.
- Elaboración de figuras para la memoria mediante scripting en Typst y Canva.
== <NAME>
#linebreak()
- Simulador en C++
- Configuración inicial del proyecto así como configurado de solución, proyecto y biblicotecas
- Implementación de partículas iniciales como agua, roca y gas
- Asistido en la interaccion con el usuario añadiendo pincel ajustable
- Añadido propiedades físicas a las particulas como la densidad
- Movimiento que se ajuste a estos parámetros físicos
- Añadido de granularidad a las partículas
- Investigación de sistema alternativo de interaccion entre partículas mediante funciones anónimas
- Solución de bugs a lo largo del desarrollo relacionados con rendimiento e interacciones
- Simulador en Rust con Vulkan haciendo uso de GPU
- Investigación de posibles formas de hacer uso de la GPU para el cálculo de la lógica, entre ella añadir OpenMP o SYCL al proyecto principal
- Desarrollo de pipeline gráfico básico haciendo uso de Vulkan
- Desarrollo de sistema de interacción básico para colocar partículas
- Implementacion de interfaz mediante ImGUI
- Implementación de partícula de arena
- Investigación y desarrollo de compute shaders que permitan delegar el movimiento a la GPU
- Exploración de diferentes tamaños de work group que den lugar a un mayor rendimiento de ejecución
- Blockly para simulador de Rust
- Investigación sobre las necesidades del proyecto y los requisitos del módulo de Blockly.
- Creación de todos los bloques presentes en el proyecto, así como de los posibles mutadores que necesiten a excepcion de uno
- Ajuste del toolbox para incluir los bloques desarrollados
- Implementación de los generadores para cada bloque creado, aunque algunos de ellos tuvieron que ser corregidos más tarde junto a Nicolás de
- Colaboración con mi compañero para incluir Blockly en la página web
- Otros
- Realización de parte de las pruebas de usabilidad con usuario.
- Elaboración de pruebas de rendimiento entre distintos simuladores. |
|
https://github.com/0x1B05/algorithm-journey | https://raw.githubusercontent.com/0x1B05/algorithm-journey/main/practice/note/content/Manacher.typ | typst | #import "../template.typ": *
#pagebreak()
= Manacher
求`str`内的最长回文子串.
都需要对原字符串进行处理:
`abcd`->`#a#b#c#d#`
1. 暴力计算,遍历每个字符,然后向外扩展.复杂度 O(n^2)
2. Mancher 优化
== 暴力计算代码
== Manacher
引入
`R=-1`
`C=-1`
1. 当遍历字符串的下标没有在 R 内,暴力扩
2. 当遍历字符串的下标有在 R 内:
`...|L...i'...C...i...R|...`
> 为什么 i 一定在 C-R,因为 i 必然在 C,R 之后出现(C,R 一起出现)
1. i'对应的回文子串完全在[L,R]内部:i 的回文半径等于 i'的回文半径
`...|L..x(.i'.)y..C..y(.i.)x..R|...`
2. i'对应的回文子串一部分在[L,R]外部:i 的回文半径 R-i+1
`..(..x|L..i'..)..C..(..i..R|y..)..` x!=y.
3. i'对应的回文子串左侧最远恰好在 L 处:i 的回文半径至少为 R-i+1
`....x(|L..i'..)..C..(..i..R|)y....` x 与 y 的关系不知道
```
s-处理->str
R=-1,C=-1
pArr[]存 i 位置的回文半径
for i in range(len):
if str[i] 在 R 内:
暴力扩
else:
if(i 回文在内部):
pArr[i]=pArr[i']
if(i 回文在外部):
pArr[i]=R-i+1
if(i 回文在 L):
pArr[i]=R-i+1
扩
```
=== 代码实现
=== 复杂度分析
每个位置最多扩失败一次,故失败 O(n) 成功:
#tablem[
| 分类 | i(max=n) | R(max=n) | | ---- | -------- | -------- | | 1 | + | + | |
2.1 | + | \ | | 2.2 | + | \ | | 2.3 | + | + |
]
故成功也为 O(n)
总为 O(n)
|
|
https://github.com/kaewik/din5008-typst | https://raw.githubusercontent.com/kaewik/din5008-typst/master/README.md | markdown | MIT License | # din5008-typst
A simple implementation of DIN 5008 B in typst used for German business letters.
## Usage
Example:

```typst
#import "din5008-letter.typ": *
#set text(
font: "HK Grotesk",
size: 11pt
)
#makeLetter(
senderName: "<NAME>",
senderStreet: "Schlossallee",
senderHouseNo: "5",
senderZipCode: "12345",
senderCity: "Musterstadt",
senderPhoneNo: "0123 456789",
senderMail: "<EMAIL>",
senderGithub: "https://github.com/maxMusti",
receiverName: "Die Firma AG",
receiverStreet: "Geschäftsstraße",
receiverHouseNo: "7",
receiverZipCode: "54321",
receiverCity: "Unternehmenstadt",
receiverMail: "<EMAIL>",
subject: "Bewerbung auf die Stelle als \"Typst Developer\" in Unternehmenstadt",
opening: "Sehr geehrte Damen und Herren",
body: [
hiermit möchte ich mich auf ...
],
greeting: "<NAME>"
)
```
|
https://github.com/exusiaiwei/quarto-ilm | https://raw.githubusercontent.com/exusiaiwei/quarto-ilm/main/README.zh-CN.md | markdown | MIT License |
<div align="center">
<h1>🎓 Quarto ILM 模板</h1>
<p>
<a href="README.md">English</a> |
<b>简体中文</b>
</p>
<!-- 徽章 -->
<p>
<img src="https://img.shields.io/badge/quarto-模板-blue" alt="quarto 模板">
<img src="https://img.shields.io/github/stars/exusiaiwei/quarto-ilm" alt="星标数">
<img src="https://img.shields.io/github/forks/exusiaiwei/quarto-ilm" alt="分支数">
<img src="https://img.shields.io/github/license/exusiaiwei/quarto-ilm" alt="许可证">
<img src="https://img.shields.io/github/last-commit/exusiaiwei/quarto-ilm" alt="最后提交">
</p>
</div>
## 📚 目录
- [📚 目录](#-目录)
- [🌟 关于](#-关于)
- [✨ 特性](#-特性)
- [📥 安装](#-安装)
- [🚀 使用](#-使用)
- [🔧 自定义](#-自定义)
- [🤝 贡献](#-贡献)
- [📄 许可证](#-许可证)
## 🌟 关于
Quarto ILM 是一个多功能、简洁且极简的非虚构写作模板,改编自 Typst 的 ['ilm' 模板](https://github.com/talal/ilm)。'Ilm'(乌尔都语:عِلْم)发音为 /ə.ləm/,是乌尔都语中"知识"的意思。这个 Quarto 模板非常适合用于课堂笔记、报告和书籍编写。
## ✨ 特性
- 简洁极简的设计
- 标题页
- 目录
- 不同类型图形的索引(图片、表格、代码块)
- 动态页脚,显示章节标题(顶级标题)
- 支持 Iosevka 字体(默认使用 Fira Mono)
## 📥 安装
要使用此模板,请在终端中运行以下命令:
```bash
quarto use template exusiaiwei/quarto-ilm
```
这将创建一个包含所有必要文件的新目录,帮助您快速开始。
## 🚀 使用
安装后,您可以通过修改 Quarto 文档中的 YAML 元数据来自定义模板。目前支持以下元数据选项:
```yaml
title: "ILM Typst 模板示例"
author: "姓名"
date: "YYYY-MM-DD"
abstract: "本文档展示了 Quarto 的 ILM 模板的特性。"
bibliography: refs.bib
biblio-style: apa
format:
ilm-typst: default
```
## 🔧 自定义
您可以通过修改 `_extensions/ilm-typst` 文件来进一步自定义模板。请随意调整样式和布局以满足您的需求。
## 🤝 贡献
欢迎贡献!如果您有任何建议或遇到任何问题,请随时提交 Pull Request 或开启 Issue。
## 📄 许可证
本项目采用 MIT 许可证 - 详情请见 [LICENSE](LICENSE) 文件。
<div align="center">
<p>
<a href="#-quarto-ilm-模板">返回顶部</a>
</p>
</div>
|
https://github.com/HiiGHoVuTi/requin | https://raw.githubusercontent.com/HiiGHoVuTi/requin/main/jeux/nim_set.typ | typst | #import "../lib.typ": *
#show heading: heading_fct
Soit $A subset.eq NN^*$ avec $1 in A$.
On considère un jeu à deux joueurs où $N>0$ objets sont disposés sur une table, et chaque joueur doit à tour de rôle retirer $t in A_(>N)$ objets de la table. Le joueur qui retire le dernier objet perd.
On appellera Alice le joueur qui commence et Bob le deuxième joueur.
#question(2)[
Pour les valeurs de $A$ suivantes, pour quel $N$ Alice possède-t'elle une stratégie gagnante ?
- $A = NN$
- $A = {1}$
- $A = {2k+1 | k in NN}$
- $A = {1, 2, ...,p}$ pour $p$ fixé
]
#question(1)[Si $max(A) <= N$, proposer un algorithme qui décide si Alice possède une stratégie gagnante pour un $N$ donné.
]
#correct([
0.
- Si $N >1$, alice à une stratégie gagante en jouant $N-1$. Si $N = 1$, elle perd.
- Alice perd si $N$ est impair.
- Comme tout nombre pair s'écrit $n = n-1 + 1$, alice gagne pour tout nombre pair en prenant $n-1$. Alice perd pour tout nombre impair $>1$ car elle en laissera un nombre pair $>1$, ce qui implique par vol de stratégie une victoire pour Bob.
- Alice gagne si $N in [|2;p+1|]$. Bob gagne si $N = p+2$.
Puis par récurrence, alice gagne si $N in [|p+3 ; 2p+2]$, Bob si $N =2p+3$ etc...
Autrement dit, Alice gagne si $N equiv.not 1 [p+1]$
1. On peut le faire en $O(A N) = O(N^2)$ par prog dyn: $T[i] = 1$ si Alice gagne et $2$ sinon.
On pose juste $T[n] = 1$ si $exists i in A, T[N-i] = 2$, avec $T[1] = 2$
]) |
|
https://github.com/DrakeAxelrod/cvss.typ | https://raw.githubusercontent.com/DrakeAxelrod/cvss.typ/main/cvss/0.1.0/src/examples.typ | typst | MIT License | #import "main.typ" as cvss;
```typ
#cvss.parse("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")
```
#block(
breakable: false,
inset: 0.5em,
stroke: 1pt + black,
radius: 0.25em,
width: 100%,
fill: gray.lighten(50%),
[#cvss.parse("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")]
)
```typ
#cvss.score("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")
```
#block(
breakable: false,
inset: 0.5em,
stroke: 1pt + black,
radius: 0.25em,
width: 100%,
fill: gray.lighten(50%),
[#cvss.score("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")]
)
```typ
#cvss.severity("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")
```
#block(
breakable: false,
inset: 0.5em,
stroke: 1pt + black,
radius: 0.25em,
width: 100%,
fill: gray.lighten(50%),
[#cvss.severity("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")]
)
```typ
#cvss.metrics("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")
```
#block(
breakable: false,
inset: 0.5em,
stroke: 1pt + black,
radius: 0.25em,
width: 100%,
fill: gray.lighten(50%),
[#cvss.metrics("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")]
)
```typ
#cvss.verify("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")
#cvss.verify("CVS S:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")
```
#block(
breakable: false,
inset: 0.5em,
stroke: 1pt + black,
radius: 0.25em,
width: 100%,
fill: gray.lighten(50%),
[#cvss.verify("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H") \
#cvss.verify("CVS S:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:H")]
)
```typ
#cvss.NONE
#cvss.LOW
#cvss.MEDIUM
#cvss.HIGH
#cvss.CRITICAL
#cvss.re
```
#block(
breakable: false,
inset: 0.5em,
stroke: 1pt + black,
radius: 0.25em,
width: 100%,
fill: gray.lighten(50%),
[
#cvss.NONE \
#cvss.LOW \
#cvss.MEDIUM \
#cvss.HIGH \
#cvss.CRITICAL \
#cvss.re
]
)
|
https://github.com/connachermurphy/typst-cv | https://raw.githubusercontent.com/connachermurphy/typst-cv/main/working_papers.typ | typst | MIT License | #let items = (
[#quote[An Academic Paper: Theory and Evidence from Theory and Evidence] (with Other Author), _Journal of Academic Research_. #link("https://www.doi.org/")[DOI].],
).rev()
#enum(..items, numbering: it => numbering("1.", items.len() + 1 - it)) |
https://github.com/WarriorHanamy/typst_tutorial | https://raw.githubusercontent.com/WarriorHanamy/typst_tutorial/main/general_beg.typ | typst | #import "@preview/touying:0.4.0": *
#import "@preview/cetz:0.2.2"
#import "@preview/fletcher:0.4.3" as fletcher: node, edge
#import "@preview/ctheorems:1.1.2": *
#import emoji: face
// cetz and fletcher bindings for touying
#let cetz-canvas = touying-reducer.with(reduce: cetz.canvas, cover: cetz.draw.hide.with(bounds: true))
#let fletcher-diagram = touying-reducer.with(reduce: fletcher.diagram, cover: fletcher.hide)
// Register university theme
// You can remove the theme registration or replace other themes
// it can still work normally
#let s = themes.university.register(aspect-ratio: "16-9")
// Set the numbering of section and subsection
#let s = (s.methods.numbering)(self: s, section: "1.", "1.1")
// Global information configuration
#let s = (s.methods.info)(
self: s,
title: [Optimal Control],
subtitle: [MPC Basics: Numerical Optimization \ Sequential Quadratic Programming],
author: [<NAME>],
date: datetime.today(),
institution: [Sun Yat-sun University],
)
// Pdfpc configuration
// typst query --root . ./example.typ --field value --one "<pdfpc-file>" > ./example.pdfpc
#let s = (s.methods.append-preamble)(self: s, pdfpc.config(
duration-minutes: 30,
start-time: datetime(hour: 14, minute: 10, second: 0),
end-time: datetime(hour: 14, minute: 40, second: 0),
last-minutes: 5,
note-font-size: 12,
disable-markdown: false,
default-transition: (
type: "push",
duration-seconds: 2,
angle: ltr,
alignment: "vertical",
direction: "inward",
),
))
// Theroems configuration by ctheorems
#show: thmrules.with(qed-symbol: $square$)
#let theorem = thmbox("theorem", "Theorem", fill: rgb("#eeffee"))
#let corollary = thmplain(
"corollary",
"Corollary",
base: "theorem",
titlefmt: strong
)
#let definition = thmbox("definition", "Definition", inset: (x: 1.2em, top: 1em))
#let example = thmplain("example", "Example").with(numbering: none)
#let proof = thmproof("proof", "Proof")
// Extract methods
#let (init, slides, touying-outline, alert) = utils.methods(s)
#show: init
#show strong: alert
// Extract slide functions
#let (slide, empty-slide) = utils.slides(s)
#show: slides
#let magic(term) = box[#emoji.face *#term*]
= Reference
== Reference
Numerical Optimization in Robotics by Wang et al.\
Convex Optimization by Boyd et al.\
Convex Optimization by Ryan et al.\
https://www.stat.cmu.edu/~ryantibs/convexopt/
= Convex optimization
== Problem formulation
#figure(
image("Figures/convex_optimization.png")
)
== Convex set
#slide[
#figure(
image("Figures/convex_set.png")
)
]
#slide[
#figure(
image("Figures/convex_hull.png")
)
]
#slide[
#figure(
image("Figures/halfs.png"),
)
#figure(
image("Figures/ellipsoid.png"),
)
]
== Operations perserves convexity
+ #magic[Intersection] \ the intersection of (any number of) convex sets is convex
+ #magic[Affine mappings]\ if $f:RR^n -> RR^m $ is affine: $f(x) = A x + b$, $A in RR^{m times n}$ and $b in RR^{m}$
+ #magic[scaling, translation]
+ #magic[projection onto some coordinates]: ${x|(x,y) in S}$
+ #magic[solution set of linear matrix inequality]: \ ${x|x_1 A_1 + ... + x_n A_n + B < 0}$ with $A_i in S^p$ and $B in S^p$
+ #magic[persepctive function] $P : RR^(n+1) -> RR^(n)$, $P(x,t)=x/t $, $t>0$
+ #magic[linear-fractional function] $f : RR^n -> RR^m$:\
$f(x) = (A x + b) / (c^T x + d)$, $c^T x + d > 0$
== Convex function
#figure(
image("Figures/epi.png"),
)
== Operations perserves convexity
#figure(
image("Figures/ops_conv.png"),
)
== Proper cone and Generalized inequality
#slide[
#figure(
image("Figures/proper_cone.png"),
)
]
#slide[
#figure(
image("Figures/gn.png"),
)
]
= Unconstrained Optimization
== Moving strategy
#magic[Given] current point $x_k$ and a direction $p_k$ , how we move to the new iterate $x_(k+1)$ .
+ #magic[Line Search]
- Exact
- Inexact
- Backtracking line serach (Armijo rule)\
$f(x_k + alpha_k p_k) <= f(x_k) + c_1 alpha_k p_k^T nabla f(x_k)$
- Wolfe conditions (Curvature condition) [$0 < c_1 < c_2 < 1$]
+ Weak wolfe condition: $-p_k^T nabla f(x_k + alpha_k p_k) <= -c_2 p_k^T nabla f(x_k)$
+ Strong wolfe condition: \ $|p_k^T nabla f(x_k + alpha_k p_k)| <= c_2 |p_k^T nabla f(x_k)|$
+ #magic[Trust Region]
the information about $f$ is used to construct a model $m_k$ whose behavior near the current point $x_k$ is similar to that of $f$.
$ min_p m_k (x_k + p)$ where $x_k + p$ lies inside the trust region.
Typically, $m_k (x_k + p) = f_k p^T nabla f_k + 1/2 p^T B_k p$.
== First Order Method
#slide[
+ #magic[Gradient descent]:\ $x^(k) = x^(k-1) - alpha nabla f(x^(k-1))$
+ #magic[Proximal Gradient descent]:\ $x^(+) = arg min_y f(x) + nabla f(x)^T (y - x) + 1/(2 t) ||y-x||^2_2 $
+ Others
]
#slide[
https://towardsdatascience.com/a-visual-explanation-of-gradient-descent-methods-momentum-adagrad-rmsprop-adam-f898b102325c
]
== Motivation
#slide[
#figure(
image("Figures/sg_draw.png")
)
]
#slide[
#figure(
image("Figures/cv_ad.png")
)
]
== Second Order Method
+ #magic[Newtion]
+ #magic[BFGS]
+ #magic[LBFGS]
+ #magic[Gaussian-Newtion]
// #slide[
// We can use `#pause` to #pause display something later.
// #pause
// Just like this.
// #meanwhile
// Meanwhile, #pause we can also use `#meanwhile` to #pause display other content synchronously.
// ]
// == Complex Animation
// #slide(repeat: 3, self => [
// #let (uncover, only, alternatives) = utils.methods(self)
// At subslide #self.subslide, we can
// use #uncover("2-")[`#uncover` function] for reserving space,
// use #only("2-")[`#only` function] for not reserving space,
// #alternatives[call `#only` multiple times \u{2717}][use `#alternatives` function #sym.checkmark] for choosing one of the alternatives.
// ])
// == Math Equation Animation
// #slide[
// Touying equation with `pause`:
// #touying-equation(`
// f(x) &= pause x^2 + 2x + 1 \
// &= pause (x + 1)^2 \
// `)
// #meanwhile
// Here, #pause we have the expression of $f(x)$.
// #pause
// By factorizing, we can obtain this result.
// ]
// == CeTZ Animation
// #slide[
// CeTZ Animation in Touying:
// #cetz-canvas({
// import cetz.draw: *
// rect((0,0), (5,5))
// (pause,)
// rect((0,0), (1,1))
// rect((1,1), (2,2))
// rect((2,2), (3,3))
// (pause,)
// line((0,0), (2.5, 2.5), name: "line")
// })
// ]
// == Fletcher Animation
// #slide[
// Fletcher Animation in Touying:
// #fletcher-diagram(
// node-stroke: .1em,
// node-fill: gradient.radial(blue.lighten(80%), blue, center: (30%, 20%), radius: 80%),
// spacing: 4em,
// edge((-1,0), "r", "-|>", `open(path)`, label-pos: 0, label-side: center),
// node((0,0), `reading`, radius: 2em),
// edge((0,0), (0,0), `read()`, "--|>", bend: 130deg),
// pause,
// edge(`read()`, "-|>"),
// node((1,0), `eof`, radius: 2em),
// pause,
// edge(`close()`, "-|>"),
// node((2,0), `closed`, radius: 2em, extrude: (-2.5, 0)),
// edge((0,0), (2,0), `close()`, "-|>", bend: -40deg),
// )
// ]
// = Theroems
// == Prime numbers
// #definition[
// A natural number is called a #highlight[_prime number_] if it is greater
// than 1 and cannot be written as the product of two smaller natural numbers.
// ]
// #example[
// The numbers $2$, $3$, and $17$ are prime.
// @cor_largest_prime shows that this list is not exhaustive!
// ]
// #theorem("Euclid")[
// There are infinitely many primes.
// ]
// #proof[
// Suppose to the contrary that $p_1, p_2, dots, p_n$ is a finite enumeration
// of all primes. Set $P = p_1 p_2 dots p_n$. Since $P + 1$ is not in our list,
// it cannot be prime. Thus, some prime factor $p_j$ divides $P + 1$. Since
// $p_j$ also divides $P$, it must divide the difference $(P + 1) - P = 1$, a
// contradiction.
// ]
// #corollary[
// There is no largest prime number.
// ] <cor_largest_prime>
// #corollary[
// There are infinitely many composite numbers.
// ]
// #theorem[
// There are arbitrarily long stretches of composite numbers.
// ]
// #proof[
// For any $n > 2$, consider $
// n! + 2, quad n! + 3, quad ..., quad n! + n #qedhere
// $
// ]
// = Others
// == Side-by-side
// #slide[
// First column.
// ][
// Second column.
// ]
// == Multiple Pages
// #slide[
// #lorem(200)
// ]
// // appendix by freezing last-slide-number
// #let s = (s.methods.appendix)(self: s)
// #let (slide, empty-slide) = utils.slides(s)
// == Appendix
// #slide[
// Please pay attention to the current slide number.
// ] |
|
https://github.com/teamdailypractice/pdf-tools | https://raw.githubusercontent.com/teamdailypractice/pdf-tools/main/typst-pdf/examples/example-05.typ | typst | + The climate
- Temperature
- Precipitation
+ The topography
+ The geology |
|
https://github.com/alberto-lazari/computer-science | https://raw.githubusercontent.com/alberto-lazari/computer-science/main/type-theory/theory-exercises/exercises/natural-numbers.typ | typst | #import "/common.typ": *
#exercise(
section: (num: "3.2", title: "Natural Numbers Type"),
ex: 3,
[Define the addition operation using the rules of the natural number type
$ x + y in Nat ctx(x in Nat, y in Nat) $
such that $x + 0 = x in Nat ctx(x in Nat)$.]
)
The addition $x + y$ can be defined as:
$ ElNat(y, x, (w, z). succ(z)) $
Let $Gamma = x in Nat, y in Nat$; \
$x + y in Nat ctx(x in Nat, y in Nat)$ is derivable:
#align(center, box[
#set text(8pt)
#prooftree(
axiom($Gamma cont$),
rule(label: var, $y in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: FNat, $Nat type ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: var, $x in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: Fc, $Gamma, w in Nat cont$),
rule(label: FNat, $Nat type ctx(Gamma, w in Nat)$),
rule(label: Fc, $Gamma, w in Nat, z in Nat cont$),
rule(label: var, $z in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(label: I2Nat, $succ(z) in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(n: 4, label: ENat, $ElNat(y, x, (w, z). succ(z)) in Nat ctx(Gamma)$)
)
])
Where $Gamma cont$ derivable, because:
- $Gamma = x in Nat, y in Nat$
- $x in Nat, y in Nat cont$ derivable:
#let Nat-type = (
axiom($ctx() cont$),
rule(label: FNat, $Nat type ctx()$),
)
#let var-cont(var) = (
..Nat-type,
rule(label: Fc, $#var in Nat cont$),
)
$
#prooftree(
..var-cont("x"),
rule(label: FNat, $Nat type ctx(x in Nat)$),
rule(label: Fc, $x in Nat, y in Nat cont$)
)
$
== Correctness
The definition is correct, in fact:
=== Base case
$y = 0 => x + y = x + 0 = x$
This is true, because:
- $x + 0 = ElNat(0, x, (w, z). succ(z))$
- $ElNat(0, x, (w, z). succ(z)) = x in Nat ctx(x in Nat)$ derivable:
#align(center, box[
#set text(8pt)
#prooftree(
..var-cont("x"),
rule(label: FNat, $Nat type ctx(x in Nat)$),
..var-cont("x"),
rule(label: var, $x in Nat ctx(x in Nat)$),
..var-cont("x"),
rule(label: FNat, $Nat type ctx(x in Nat)$),
rule(label: Fc, $x in Nat, w in Nat cont$),
rule(label: FNat, $Nat type ctx(x in Nat, w in Nat)$),
rule(label: Fc, $x in Nat, w in Nat, z in Nat cont$),
rule(label: var, $z in Nat ctx(x in Nat, w in Nat, z in Nat)$),
rule(label: I2Nat, $succ(z) in Nat ctx(x in Nat, w in Nat, z in Nat)$),
rule(n: 3, label: C1Nat, $ElNat(0, x, (w, z). succ(z)) = x in Nat ctx(x in Nat)$)
)
])
=== Inductive case
$y = succ(v) ctx(v in Nat) => x + y = x + succ(v) = succ(x + v)$
This is true, because:
- $x + succ(v) = ElNat(succ(v), x, (w, z). succ(z))$
- $succ(x + v) = succ(ElNat(v, x, (w, z). succ(z)))$
- Let $Gamma = x in Nat, v in Nat$; \
$ElNat(succ(v), x, (w, z). succ(z)) = succ(ElNat(v, x, (w, z). succ(z))) in Nat ctx(Gamma)$ derivable:
#align(center, box[
#set text(8pt)
#prooftree(
axiom($Gamma cont$),
rule(label: var, $v in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: FNat, $Nat type ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: var, $x in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: Fc, $Gamma, w in Nat cont$),
rule(label: FNat, $Nat type ctx(Gamma, w in Nat)$),
rule(label: Fc, $Gamma, w in Nat, z in Nat cont$),
rule(label: var, $z in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(label: I2Nat, $succ(z) in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(n: 4, label: C2Nat, $ElNat(succ(v), x, (w, z). succ(z)) = succ(ElNat(v, x, (w, z). succ(z))) in Nat ctx(Gamma)$)
)
])
Where $Gamma cont$ derivable, because:
- $Gamma = x in Nat, v in Nat$
- $x in Nat, v in Nat cont$ derivable:
$
#prooftree(
..var-cont("x"),
rule(label: FNat, $Nat type ctx(x in Nat)$),
rule(label: Fc, $x in Nat, v in Nat cont$)
)
$
#exercise(
section: (num: "3.2", title: "Natural Numbers Type"),
ex: 4,
[Define the addition operation using the rules of the natural number type
$ x + y in Nat ctx(x in Nat, y in Nat) $
such that $0 + x = x in Nat ctx(x in Nat)$.]
)
The addition $x + y$ can be defined as:
$ ElNat(x, y, (w, z). succ(z)) $
Let $Gamma = x in Nat, y in Nat$; \
$x + y in Nat ctx(x in Nat, y in Nat)$ is derivable:
#align(center, box[
#set text(8pt)
#prooftree(
axiom($Gamma cont$),
rule(label: var, $x in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: FNat, $Nat type ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: var, $y in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: Fc, $Gamma, w in Nat cont$),
rule(label: FNat, $Nat type ctx(Gamma, w in Nat)$),
rule(label: Fc, $Gamma, w in Nat, z in Nat cont$),
rule(label: var, $z in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(label: I2Nat, $succ(z) in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(n: 4, label: ENat, $ElNat(x, y, (w, z). succ(z)) in Nat ctx(Gamma)$)
)
])
Where $Gamma cont$ derivable, because:
- $Gamma = x in Nat, y in Nat$
- $x in Nat, y in Nat cont$ derivable:
$
#prooftree(
..Nat-type,
rule(label: Fc, $x in Nat cont$),
rule(label: FNat, $Nat type ctx(x in Nat)$),
rule(label: Fc, $x in Nat, y in Nat cont$)
)
$
== Correctness
The definition is correct, in fact:
=== Base case
$x = 0 => x + y = 0 + y = y$
Note that the exercise requires that $0 + x = x in Nat ctx(x in Nat)$, but that is equivalent to proving that $0 + y = y in Nat ctx(y in Nat)$, by renaming $y$ to $x$ in the latter, and this is true, because:
- $0 + y = ElNat(0, y, (w, z). succ(z))$
- $ElNat(0, y, (w, z). succ(z)) = y in Nat ctx(y in Nat)$ derivable:
#align(center, box[
#set text(8pt)
#prooftree(
..var-cont("y"),
rule(label: FNat, $Nat type ctx(y in Nat)$),
..var-cont("y"),
rule(label: var, $y in Nat ctx(y in Nat)$),
..var-cont("y"),
rule(label: FNat, $Nat type ctx(y in Nat)$),
rule(label: Fc, $y in Nat, w in Nat cont$),
rule(label: FNat, $Nat type ctx(y in Nat, w in Nat)$),
rule(label: Fc, $y in Nat, w in Nat, z in Nat cont$),
rule(label: var, $z in Nat ctx(y in Nat, w in Nat, z in Nat)$),
rule(label: I2Nat, $succ(z) in Nat ctx(y in Nat, w in Nat, z in Nat)$),
rule(n: 3, label: C1Nat, $ElNat(0, y, (w, z). succ(z)) = y in Nat ctx(y in Nat)$)
)
])
=== Inductive case
$x = succ(v) ctx(v in Nat) => x + y = succ(v) + y = succ(v + y)$
This is true, because:
- $succ(v) + y = ElNat(succ(v), y, (w, z). succ(z))$
- $succ(v + y) = succ(ElNat(v, y, (w, z). succ(z)))$
- Let $Gamma = v in Nat, y in Nat$; \
$ElNat(succ(v), y, (w, z). succ(z)) = succ(ElNat(v, y, (w, z). succ(z))) in Nat ctx(Gamma)$ derivable:
#align(center, box[
#set text(8pt)
#prooftree(
axiom($Gamma cont$),
rule(label: var, $v in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: FNat, $Nat type ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: var, $y in Nat ctx(Gamma)$),
axiom($Gamma cont$),
rule(label: Fc, $Gamma, w in Nat cont$),
rule(label: FNat, $Nat type ctx(Gamma, w in Nat)$),
rule(label: Fc, $Gamma, w in Nat, z in Nat cont$),
rule(label: var, $z in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(label: I2Nat, $succ(z) in Nat ctx(Gamma, w in Nat, z in Nat)$),
rule(n: 4, label: C2Nat, $ElNat(succ(v), y, (w, z). succ(z)) = succ(ElNat(v, y, (w, z). succ(z))) in Nat ctx(Gamma)$)
)
])
Where $Gamma cont$ derivable, because:
- $Gamma = v in Nat, y in Nat$
- $v in Nat, y in Nat cont$ derivable:
$
#prooftree(
..var-cont("v"),
rule(label: FNat, $Nat type ctx(v in Nat)$),
rule(label: Fc, $v in Nat, y in Nat cont$)
)
$
#exercise(
section: (num: "3.2", title: "Natural Numbers Type"),
ex: 6,
[Define the predecessor operator
$ #p (x) in Nat ctx(x in Nat) $
such that
$
&#p (0) = 0 \
&#p (succ(#n)) = #n
$]
)
The predecessor $#p (x)$ can be defined as:
$ ElNat(x, 0, (w, z). w) $
$#p (x) in Nat ctx(x in Nat)$ is derivable:
#align(center, box[
#set text(7pt)
#prooftree(
..var-cont("x"),
rule(label: var, $x in Nat ctx(x in Nat)$),
..var-cont("x"),
rule(label: FNat, $Nat type ctx(x in Nat)$),
..var-cont("x"),
rule(label: I1Nat, $0 in Nat ctx(x in Nat)$),
..var-cont("x"),
rule(label: FNat, $Nat type ctx(x in Nat)$),
rule(label: Fc, $x in Nat, w in Nat cont$),
rule(label: FNat, $Nat type ctx(x in Nat, w in Nat)$),
rule(label: Fc, $x in Nat, w in Nat, z in Nat cont$),
rule(label: var, $w in Nat ctx(x in Nat, w in Nat, z in Nat)$),
rule(n: 4, label: ENat, $ElNat(x, 0, (w, z). w) in Nat ctx(x in Nat)$)
)
])
== Correctness
The definition is correct, in fact:
=== Base case
$x = 0 => #p (x) = #p (0) = 0$
This is true, because:
- $#p (0) = ElNat(0, 0, (w, z). w)$
- $ElNat(0, 0, (w, z). w) = 0 in Nat ctx()$ derivable:
$
#prooftree(
..Nat-type,
axiom($ctx() cont$),
rule(label: I1Nat, $0 in Nat ctx()$),
..var-cont("w"),
rule(label: FNat, $Nat type ctx(w in Nat)$),
rule(label: Fc, $w in Nat, z in Nat cont$),
rule(label: var, $w in Nat ctx(w in Nat, z in Nat)$),
rule(n: 3, label: C1Nat, $ElNat(0, 0, (w, z). w) = 0 in Nat ctx()$)
)
$
=== Inductive case
$x = succ(y) ctx(y in Nat) => #p (x) = #p (succ(y)) = y$
This is true, because:
- $#p (succ(y)) = ElNat(succ(y), 0, (w, z). w)$
- $ElNat(succ(y), 0, (w, z). w) = y in Nat ctx(y in Nat)$ derivable:
#align(center, box[
#set text(8pt)
#prooftree(
..var-cont("y"),
rule(label: var, $y in Nat ctx(y in Nat)$),
..var-cont("y"),
rule(label: FNat, $Nat type ctx(y in Nat)$),
axiom($ctx() cont$),
rule(label: I1Nat, $0 in Nat ctx()$),
..var-cont("y"),
rule(label: FNat, $Nat type ctx(y in Nat)$),
rule(label: Fc, $y in Nat, w in Nat cont$),
rule(label: FNat, $Nat type ctx(y in Nat, w in Nat)$),
rule(label: Fc, $y in Nat, w in Nat, z in Nat cont$),
rule(label: var, $w in Nat ctx(y in Nat, w in Nat, z in Nat)$),
rule(n: 4, label: C2Nat, $ElNat(succ(y), 0, (w, z). w) = y in Nat ctx(y in Nat)$)
)
])
|
|
https://github.com/dainbow/MatGos | https://raw.githubusercontent.com/dainbow/MatGos/master/themes/3.typ | typst | #import "../conf.typ": *
= Теорема о промежуточных значениях непрерывной функции
#theorem(
"Больцано-Коши о промежуточных значениях",
)[
Пусть $f$ непрерывна на $[a, b]$. Тогда
#eq[
$forall x_1, x_2 in [a, b] : c := f(x_1) < d := f(x_2) : space forall e in (c, d) : exists gamma in [a, b] : f(gamma) = e$
]
]
#proof[
Рассмотрим частный случай $c < e = 0 < d$.
Построим последовательность отрезков ${[a_n, b_n]}_(n = 1)^oo$, где $[a_1, b_1] = {x_1, x_2}$ (мы
не знаем в каком порядке идут иксы).
Заметим, что $f(a_1) dot f(b_1) < 0$.
Рассмотрим $f((a_1 + b_1) / 2)$. Какие могут быть случаи?
- Если $f((a_1 + b_1) / 2) = 0$, то мы победили и останавливаемся.
- Если $f((a_1 + b_1) / 2) > 0$, то $a_2 := a_1, b_2 := (a_1 + b_1) / 2$.
- Если $f((a_1 + b_1) / 2) < 0$, то $a_2 := (a_1 + b_1) / 2, b_2 := b_1$.
Либо после конечного числа шагов мы найдём требуемую точку, либо построим
последовательность стягивающихся отрезков:
#eq[
$\
b_n - a_n = abs(x_2 - x_1) / 2^(n - 1) \ $
]
Тогда по принципу Кантора ${gamma} = sect.big_(n = 1)^oo [a_n, b_n]$, причём
#eq[
$lim_(n -> oo)a_n = lim_(n -> oo) b_n = gamma in [a, b]$
]
Тогда в силу непрерывности $f$:
#eq[
$f(gamma) = lim_(n -> oo) f(a_n) = lim_(n -> oo) f(b_n)$
]
Заметим, что после каждой итерации алгоритма изначальное свойство сохраняется:
#eq[
$f(a_n) dot f(b_n) < 0$
]
Совершив предельный переход в неравенстве, получим
#eq[
$f^2(gamma) <= 0$
]
Из чего следует $f(gamma) = 0$.
В общем случае рассматривается вспомогательная функция $F(x) = f(x) - e$.
]
|
|
https://github.com/kdog3682/2024-typst | https://raw.githubusercontent.com/kdog3682/2024-typst/main/src/cetz-example-1.typ | typst | #import "@preview/cetz:0.2.0"
#cetz.canvas({
import cetz.draw: *
import cetz.tree
tree.tree(
spread:3,
grow:4,
([root],[A],[B]),
draw-node: (node, parentnode) => {
content((),text(4pt,[#node]),padding:.1,name:"content")
rect("content.top-left","content.bottom-right")
}
)
})
#let graph(data) = {
cetz.canvas({
import cetz.draw: *
import cetz.chart
// Left - Basic
let data = (("A", 10), ("B", 20), ("C", 13))
group(name: "a", {
chart.columnchart(
y-tick-step: 4,
// x-ticks: (1,2,3),
size: (4, 3), data,
labels: (
[hi, bye $x^2$],
[hi, bye],
[hi, bye],
[hi, bye],
[hi, bye],
)
)
})
})
}
|
|
https://github.com/augustebaum/petri | https://raw.githubusercontent.com/augustebaum/petri/main/src/lib.typ | typst | MIT License | #import "cetz-shapes.typ": place, transition
#import "fletcher-shapes.typ": p, t
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/tlacuache-thesis-fc-unam/0.1.1/template/capitulo1.typ | typst | Apache License 2.0 | #import "@preview/tlacuache-thesis-fc-unam:0.1.1": chapter
// completamente opcional cargar la bibliografía, compilar el capítulo
#show: chapter.with(bibliography: bibliography("references.bib"))
= Mi primer capítulo
Introducción al primer cápitulo.
#include "seccion1.typ" |
https://github.com/jamesrswift/chemicoms-paper | https://raw.githubusercontent.com/jamesrswift/chemicoms-paper/main/src/elements/float.typ | typst | #let float(
content,
align: bottom,
) = {
place(
align, float: true,
box(width: 100%)[
#if ( align == bottom){line(length: 100%, stroke:0.5pt);v(0.6em)}
#content
#if ( align == top){v(0.6em);line(length: 100%, stroke:0.5pt)}
]
)
} |
|
https://github.com/Woodman3/modern-ysu-thesis | https://raw.githubusercontent.com/Woodman3/modern-ysu-thesis/main/README.md | markdown | MIT License | # 燕山大学本科论文 modern-ysu-thesis
本模板在[modern-nju-thesis](https://github.com/nju-lug/modern-nju-thesis)的基础上修改而来
> [!WARNING]
>
> 本模板正处于积极开发阶段,存在一些格式问题,适合尝鲜 Typst 特性
>
> 本模板是民间模板,**可能不被学校认可**,正式使用过程中请做好随时将内容迁移至 Word 或 LaTeX 的准备
# QuickStart
clone 本项目后,照着 template\thesis.typ 下写就行
## 致谢
- 感谢 [modern-nju-thesis](https://github.com/nju-lug/modern-nju-thesis) Typst 中文论文模板。
## License
This project is licensed under the MIT License.
|
https://github.com/Myriad-Dreamin/tinymist | https://raw.githubusercontent.com/Myriad-Dreamin/tinymist/main/syntaxes/textmate/tests/unit/errror_tolerance/paren.typ | typst | Apache License 2.0 |
#((show: body => 2) * body)
--- |
https://github.com/kunalchandan/resume | https://raw.githubusercontent.com/kunalchandan/resume/main/portfolio.typ | typst | #set page(
margin: (
x : 2.5em,
y : 2em,
),
height: 20cm
)
#import "conf.typ": page_heading, experience, accent_1, accent_10, heading_font_size, main_font_size
#set text(font: ("Jost"), weight: "light", size: main_font_size + 3pt,)
#set list(marker: ([--], [-]))
#show heading: it => {
if it.level == 1 {
text(
weight: "medium",
size : heading_font_size + 2pt,
fill : accent_1,
it
)
}
else if it.level == 2 {
// Since smallcaps aren't implemented yet for fonts without scmp, use upper
// smallcaps(it)
text(
weight: "regular",
size: heading_font_size + 2pt,
spacing: 100%,
upper(it)
)
}
else if it.level == 3 {
text(
weight: "medium",
size: heading_font_size + 2pt,
it
)
} else {
it
}
}
#show strong: set text(weight: "extralight", fill: accent_10)
#page_heading(
name : (
first : "Kunal's",
last : "Portfolio",
email : "<EMAIL>",
phone : "814-807-7652",
github : "kunalchandan",
linkedin : "kunal-chandan",
caption : "University of Waterloo",
subcaption : "B.A.Sc Honours Electrical & Computer Engineering",
website : "chandan.one"
)
)
#let software = experience(
description : (
[KiCAD],
[LTSpice/PySpice],
[Cadence],
[LayoutEditor],
[Quartus Prime],
[Linux],
)
)
#let awards = experience(
description : (
[Baylis Medical Capstone Design Award],
[NSERC Undergraduate Student Research Award],
)
)
#let interests = experience(
description : (
[Cycling],
[Rock Climbing],
[Juggling],
)
)
#let certifications = experience(
description : (
[2023 - Ignition Core Certified],
[2022 - #link("https://qnfcf.uwaterloo.ca/", "QNFCF Cleanroom Certification")],
[2022 - #link("https://uwaterloo.ca/giga-to-nanoelectronics-centre/lab-equipment", "G2N Cleanroom Certification")],
)
)
#let Summary_Quals = experience(
description : (
[Multidisciplinary generalist electrical engineering skills specialist in software development at scale in data engineering with *Python* and performance critical development in *C++*],
[Experienced electrical engineering skills with clean-room and hands-on electrical lab-work],
[Strong electrical engineering foundation through coursework in semiconductor device physics, RF devices, control systems, and IC design]
)
)
#let risc_v_core = experience(
title : "Pipelined Risc-V Core",
description : (
[Designed 5-stage pipelined *RISC-V* 32-bit core in *Verilog* using only synthesizable constructs],
[Core synthesized on FPGA and successfully ran programs. Testbenches used to ensure cycle accuracy],
)
)
#let analog_filters = experience(
title : "Realizable Analog Filters",
website : "https://chandan.one/posts/filter/",
description : (
[Generated optimal schematics and realizable parts for analog filter given cutoff frequency and roll-off],
[Used *sympy* for circuit analysis and *pandas* as a parts database backend],
)
)
#let go_sequencer = experience(
title : "Multiple DNA sequence Aligner",
website : "https://chandan.one/posts/GoLang-Refresher/",
description : (
[Perform multiple sequence alignement on DNA or amino acid sequences, *dynamic programming* and *graph theory* used to generate optimal sequence],
[Used Go threads to improve performance, parallelization improved performance 8x],
)
)
#let compiler = experience(
title : "Compiler For Novel Language",
website : "https://github.com/kunalchandan/RajLang/",
description : (
[Written in *C++* to support basic arithmetic functions, arrays, maps, and functions as first order members],
[Used *CMake* to manage project and dependencies, *Catch* for unit and end-to-end testing],
[Used *Boost* to manage graph datastructures and vizualization of Abstract Syntax Tree (AST)],
[Targetting *LLVM IR* to allow for cross-platform compatability],
)
)
#let mandelbrot_gen = experience(
title : "GPU Mandelbrot Generator",
website : "https://chandan.one/posts/CUDA/",
description : (
[Fractal generator written in *C++* using *CUDA*],
[Parallleized code ran *56,160%* faster compared to single threaded CPU program],
)
)
#let hearing_aid = experience(
title : "Beamforming Hearing Aid",
website : "https://chandan.one/posts/mic-array/",
description : (
[Designed 4 channel microphone array PCB in *KiCAD*, PCB does active analog bandpass filtering, differential amplification, and multichannel *ADC* over *SPI* to Raspberry Pi],
[R-Pi does compression and sends audio over *Flask* server for further digital filtering and beamforming],
[*Pytorch* to create quantized voice isolation model and minimize latency and maintain performance],
[Used *multiprocessing*, *asyncio*, and *websockets* to maximize throughput and performance],
)
)
#let ray_tracing = experience(
title : "Ray Tracing Engine",
website : "https://github.com/kunalchandan/ToyTracer/",
description : (
[Implemented 3D recursive path-tracing for arbitrary materials on basic geometric shapes],
[Used *nalgebra* for arbitrary rotations & positions of camera & objects],
[Parallel processing of ray-tracing using *rayon* yielding *\~10X* performance speed-up],
)
)
#let education = experience(
title : "University of Waterloo \nB.A.Sc Electrical & Computer Engineering 23'",
description : (
[Electronic devices, Semiconductor physics, Analog/Digital integrated circuits],
[Analog/Digial/Multivariable control systems],
[Radio frequency and microwave circuits]
)
)
// #box(height: 1.8cm,
// columns(3, gutter: 5pt)[
// = Awards
// #awards
// = Certifications
// #certifications
// = Interests
// #interests
// ]
// )
#grid(
columns : (2fr, 2fr, 1fr),
rows: (auto),
[
= Awards
#awards
],
[
= Certifications
#certifications
],
[
= Interests
#interests
]
)
#box(height: 12cm,
columns(2, gutter: 10pt)[
= Hardware Projects
#hearing_aid
#risc_v_core
#analog_filters
#colbreak()
= Software Projects
#ray_tracing
#go_sequencer
#compiler
#mandelbrot_gen
]
)
// #let cell = rect.with(
// // inset: 8pt,
// // fill: rgb("888888"),
// width: 100%,
// // radius: 6pt
// )
// #let mini_column(body) = style(styles => {
// let size = measure(body, styles)
// [
// #body
// // #size.width
// // #size.height
// ]
// // cell(height : 88%)[#body #size.width]
// })
// #grid(
// columns: (18%, 82%),
// // columns: (12%, 44%, 44%),
// rows: (auto,),
// gutter: 3pt,
// mini_column[
// = Software
// #software
// = Languages
// #languages
// = Lab Skills
// #lab_tools
// = Interests
// #interests
// = Award
// #awards
// ],
// mini_column[
// = Summary of Qualifications
// #Summary_Quals
// = Experience
// #uw_wong
// #groq_inc
// #uw_yash
// #huawei
// // ],
// // mini_column[
// // = Experience
// #mappedin
// #robarts
// #oicr
// = Projects
// #hearing_aid
// #risc_v_core
// #ray_tracing
// // = Education
// // #education
// ],
// )
|
|
https://github.com/mumblingdrunkard/mscs-thesis | https://raw.githubusercontent.com/mumblingdrunkard/mscs-thesis/master/src/computer-architecture-fundamentals/scaling-up.typ | typst | == Scaling Up <sec:scaling-up>
There are two main ways to increase the performance of a processor to do more work in the same amount of time:
1) Increase the clock frequency, or
2) increase the number of instructions performed each clock cycle... somehow.
This section provides a short overview of the history of performance improvements and the mechanisms by which the performance improvements have been achieved.
=== The Easy Option: Increasing Frequency
For a while, two key phenomena dominated the improvements in processor performance: Moore's Law, and Dennard Scaling.
Moore's Law is the observation that the number of transistors in a circuit is roughly doubled every year (later changed to every two years) because of improvements in technology that enable smaller and cheaper transistors.
As everything shrinks, the physical distances get shorter, and transmitting information between components uses less energy.
The water analogy holds here.
If the wires between components in the chip are tubes, and the tubes have to be filled up and drained every time a bit is changed, shorter, narrower tubes require less total volume to flow.
The tubes have a lower capacity.
The electrical equivalent is _capacitance_, denoted by the symbol $C$.
The important capacitances in a circuit are those of the transistors and of the wires.
For the water analogy: transistors have buckets that must be filled and drained above and below a given level to reliably switch on or off.
Capacitance in a circuit means that after applying a voltage level at one end, it takes some time before the other end reaches that same voltage level.
Transistors also have an internal _transition time_, the time it takes for the output to reliably change after changing an input.
This time is proportional to the capacitance.
The maximum frequency of the circuit is governed by delays, which are in turn dominated by this transition time.
The frequency of the transistor cannot exceed $1/d_max$ where $d_max$ is the maximum delay from one register to the next.
A basic equation describes the power consumption of processors:
$
P = alpha f C V^2 + P_"static"
$
Dynamic power consumption (power consumed due to signals turning on and off within the circuit) is equal to the frequency of switching $f$, multiplied by the capacitance $C$ of the circuit, multiplied by the square of the applied voltage $V$.
This is scaled by the activity factor $alpha$.
There is also some constant $P_"static"$ which was negligible for a while.
Due to physical properties, a reduction in transistor sizes causes an approximately equal reduction in capacitance and voltage.
With an equal reduction in voltage, the transition time still decreases in proportion to the transistor shrink.
Because the transition time decreases, the delay decreases, and the frequency can be scaled up and performance, thus increasing performance.
With these different factors, it can be shown that as technology improves and transistors shrink, the _power density_ stays approximately constant.
That is, each square millimetre of circuit has approximately the same power output, no matter the technology.
Conversely: the same circuit can be implemented in less area, using less power, all while running faster.
This is Dennard Scaling: the observation that power density stays the same across improvements in technology.
To get a faster and more power-efficient processor all one had to do was wait.
These two "laws" were responsible for much of the performance improvements until Dennard Scaling started breaking down around two decades ago.
The number of transistors per area has still been increasing steadily---albeit at a slower pace---but the frequency increases have been much, much smaller.
The reason for this is attributed to _static power_---power used just because the circuit is turned on without any computation going on---becoming a larger factor as transistors shrink.
Static power becomes an increasingly large part of power consumption as it becomes more difficult to electrically insulate components within the circuit when they become too small, leading to _leakage_ where electrons can cross through material that is intended to be non-conductive.
A current flows within the circuit simply because there is an electron supply and not because any computation is being performed.
This is problematic because more power means more heat.
When the processor becomes too hot, the materials within it start breaking down.
To meet performance goals, companies started turning up frequencies more than Dennard Scaling allowed for while staying within the same power budgets.
To increase the frequency beyond the "original" max frequency, the voltage must be tuned up to decrease the delays by saturating capacitances faster.
This causes the power output to scale with the cube of the frequency increase, which has necessitated more powerful cooling to keep up with the increased heat output.
=== Scaling Horizontally
The second alternative to increase work per unit of time is to scale horizontally by adding more units that can perform execution.
As the number of transistors per area keeps increasing, the extra area can be spent on more execution units.
This category of scaling can be split into two further groups:
==== Adding Cores
Most high-performance computers are systems that run a wide variety of programs at the same time.
By adding more cores to the processor, multiple applications can be executed in parallel.
This does not directly speed up programs that are not explicitly written to use many cores.
However, in a system with many tasks, and only a single core, the system must divide time on the processor between each of the tasks.
By adding more cores, more time becomes available to each task.
==== Superscalar Processors
Though adding cores helps for overall system performance, and for workloads written with multiple cores in mind, it cannot speed up programs that are written for a single core.
Some programs can be rewritten to take advantage of multiple cores.
Others require so much fine-grained synchronisation between the cores that the overhead of synchronisation negates any benefit.
However, these programs are still likely to have a lot of available ILP, even if they are too serial to be split across many cores.
This is where _superscalar processing_ helps.
Superscalar processing attempts to exploit available ILP to complete more than one instruction per cycle.
It is possible to construct a superscalar pipelined processor by simply doubling up all of the units shown in @fig:pipelined-cpu.
Two IF stages, two ID/OF stages... etc. that all run in parallel.
This adds some complexity in handling hazards.
The forwarding logic becomes twice as complex and instead of 3 bundles of connections running from various pipeline registers, there are 12.
It must also be ensurred that two instructions entering at the same time do not depend on each other, or the later instruction must somehow be stalled.
This can be dealt with.
Increasing the number of pipelines to three worsens the problem.
The forwarding logic increases from 12 to 27 wire bundles.
=== Scaling Complexity
Generally, naive scaling like this turns out to be quadratic for integral parts of the circuit.
One way to tackle the problem is to use static scheduling with _very long instruction words_ (VLIW) and design the ISA such that instructions cannot depend on results that are generated in other pipelines until they are written back to the register file.
VLIW requires that the programmer (or more likely: the compiler) must find instructions that can safely be executed in parallel and group them together in a packet.
Where a single instruction is usually just called an instruction word, this packet of multiple instructions is called a very long instruction word.
This allows all the forwarding logic and dependency detection to be removed.
This moves complexity over to the programmer (or more likely: the compiler) who has to find suitable groups of instructions and where to put them.
However, it has its own set of issues with scaling into the future where code written with one width in mind cannot benefit from increased execution width.
Modern processors use various techniques to efficiently tackle this scaling without increasing complexity for compilers and programmers.
This is explained further in @sec:high-performance-processor-architecture.
|
|
https://github.com/MattiaOldani/Informatica-Teorica | https://raw.githubusercontent.com/MattiaOldani/Informatica-Teorica/master/capitoli/complessità/20_zona_grigia.typ | typst | #import "@preview/lemmify:0.1.5": *
#let (theorem, lemma, corollary, remark, proposition, example, proof, rules: thm-rules) = default-theorems(
"thm-group",
lang: "it",
)
#show: thm-rules
#show thm-selector("thm-group", subgroup: "theorem"): it => block(
it,
stroke: red + 1pt,
inset: 1em,
breakable: true,
)
#import "../alias.typ": *
= La "zona grigia"
Chiamiamo *zona grigia* quella _nuvola_ di problemi di decisione importanti e con molte applicazioni per i quali non si conoscono ancora algoritmi efficienti in tempo, _ma_ per i quali nessuno ha mai dimostrato che tali algoritmi non possano esistere. Infatti, dato un problema $Pi$, se mi viene detto che ad oggi non esiste un algoritmo efficiente per la sua soluzione, questo non implica che allora lo sia veramente: è molto difficile come dimostrazione.
I problemi di decisione in questa zona hanno una particolarità: sono *efficientemente verificabili*. Data un'istanza particolare, è facile capire se per quel problema e quell'istanza bisogna rispondere #text(green)[SI] o #text(red)[NO].
== Esempi
=== _CNF-SAT_
Il problema _*CNF-SAT*_ ha come obiettivo quello di stabilire se esiste un assegnamento a variabili booleane che soddisfi un predicato logico in forma normale congiunta. Le formule sono indicate con $phi(x_1, dots, x_n)$ e sono formate da congiunzioni $C_1 and dots and C_k$, ognuna delle quali contiene almeno una variabile booleana $x_i$.
Formalmente, data una _CNF_ $phi(x_1, dots, x_n)$, vogliamo rispondere alla domanda $ exists wstato(x) in {0,1}^n bar.v phi(wstato(x)) = 1? $
Un possibile algoritmo di risoluzione è quello esaustivo: $ P equiv & "for" wstato(x) in {0,1}^n "do" \ & quad "if" phi(wstato(x)) = 1 "then" \ & quad quad "return" 1 \ & "return" 0. $
Notiamo come le possibili permutazioni con ripetizione sono $2^n$, mentre la verifica della soddisfacibilità è fattibile in tempo polinomiale $n^k$. Di conseguenza, questo algoritmo risulta inefficiente, in quanto esplorare tutto l'albero dei possibili assegnamenti (_sottoproblemi_) richiederebbe tempo esponenziale.
*Attenzione al viceversa*. Se ho infinite configurazioni da testare non è vero che il problema usi algoritmi inefficienti: esistono problemi su grafi (_raggiungibilità_) che potrebbero testare infinite configurazioni ma che in realtà sono efficientemente risolti con altre tecniche.
=== Circuiti hamiltoniani
Dato $G = (V,E)$ grafo non diretto, vogliamo sapere se $G$ contiene un circuito hamiltoniano o meno.
Ricordiamo un paio di concetti sui grafi:
- *cammino*: sequenza di vertici $V_1, dots, V_k$ tali che $forall 1 lt.eq i lt k quad (V_i, V_(i+1)) in E$;
- *circuito*: cammino $V_1, dots, V_k$ tale che $V_1 = V_k$, quindi un cammino che parte e termina in uno stesso vertice;
- *circuito hamiltoniano*: circuito in cui tutti i vertici di $G$ vengono visitati una e una sola volta.
Un algoritmo per questo problema è il seguente: $ P equiv & "for" (V_i_1, dots, V_i_n, V_i_1) in op("Perm")(V) "do" \ & quad "if" op("IS_HC") (V_i_1, dots, V_i_n, V_i_1) = 1 "then" \ & quad quad "return" 1 \ & "return" 0, $ in cui sostanzialmente generiamo tutte le permutazioni possibili di vertici che iniziano e finiscono con lo stesso vertice e verifichiamo efficientemente se esse sono un circuito hamiltoniano o meno.
Calcoliamo la complessità temporale di questo algoritmo:
- il numero di permutazioni, e quindi il numero di volte che viene eseguito il ciclo, sono $n!$;
- il controllo sulla permutazione può essere implementato efficientemente in tempo polinomiale.
=== Circuiti euleriani
Dato $G = (V,E)$ un grafo non diretto, vogliamo sapere se $G$ contiene un circuito euleriano o meno.
Ricordiamo che un circuito euleriano è un circuito in cui tutti gli archi di $G$ vengono visitati una e una sola volta. Potrebbe sembrare simile al problema precedente, ma non lo è!
#theorem(numbering: none, name: "Eulero 1736")[
Un grafo $G$ contiene un circuito euleriano se e solo se ogni suo vertice ha grado pari, ovvero $ forall v in V quad "GRADO"(v) = 2k bar.v k in NN. $
]
Grazie a questo teorema è possibile risolvere il problema in tempo lineare, quindi efficiente. Purtroppo non esiste un teorema simile per i circuiti hamiltoniani.
== Classe EXPTIME
Definiamo ora la classe $ exptime = union.big_(k gt.eq 0) dtime(2^n^k) $ dei problemi con complessità temporale *esponenziale*. Ovviamente vale $ P subset.eq exptime, $ perché ogni polinomio è "_maggiorabile_" da un esponenziale. Per diagonalizzazione si è dimostrato in realtà che $ P subset exptime $ sfruttando una *NDTM* (_Non-Deterministic Turing Machine_) con timeout.
|
|
https://github.com/floriandejonckheere/utu-thesis | https://raw.githubusercontent.com/floriandejonckheere/utu-thesis/master/thesis/chapters/07-proposed-solution/05-decomposition.typ | typst | #import "@preview/acrostiche:0.3.1": *
#import "/helpers.typ": *
== Decomposition
Monolith decomposition is the process of identifying microservice candidates in a monolith application.
The goal of this process is to split the monolith into smaller, more manageable software components which can be deployed independently @dehghani_2018.
Traditionally, monolith decomposition is a manual process that requires a deep understanding of the software architecture and business requirements.
However, the burden of manual decomposition can be alleviated by using automated tools and algorithms.
The knowledge of software architects should be leveraged where possible to guide the decomposition process, without imposing the requirement of a deep understanding of the software architecture.
For example, #cite_full(<li_etal_2023>) proposed a method that utilizes expert knowledge, however requires the recommendations to be written in a domain-specific language, increasing the burden on the software architect.
MOSAIK implements an automated identification of microservice candidates in a monolith application using a clustering algorithm.
The decomposition process can be fine-tuned by assigning an importance to the different types of coupling strategies.
This way, the software architect can decide which coupling strategies are most relevant to the decomposition process.
Clustering algorithms group similar elements together based on one or multiple criteria.
Generally these algorithms work iteratively either top-down or bottom-up.
Top-down algorithms start by assigning all elements to one big cluster, and then progressively split it into smaller clusters until a stopping criterion is met.
Examples of top-down clustering algorithm are the Girvan-Newman algorithm @girvan_newman_2002 and hierarchical divisive clustering.
Bottom-up algorithms on the other hand, start by assigning each element to its own cluster, then merge similar clusters together in succession, until a stopping criterion is met.
Examples of bottom-up clustering algorithms are hierarchical agglomerative clustering and the Louvain algorithm @blondel_etal_2008.
==== Selection of algorithm
In @slr_algorithms, we performed an analysis of the state of the art in clustering algorithms used for microservice candidate identification.
We considered the following criteria when selecting the most suitable algorithm for our task:
- *Automation*: the algorithm should not require architectural knowledge up-front (e.g. number of clusters)
- *Complexity*: the algorithm should be computationally efficient
The first criteria disqualifies algorithms that require specifying the number of clusters up-front, such as Spectral Clustering, K-Means, and Hierarchical Agglomerative Clustering.
Search-based algorithms (e.g. genetic, linear optimization) were considered as well, due to their inherent ability to optimize multiple objectives @carvalho_etal_2020.
However, they require a lot of computing resources, and proper fine-tuning of parameters such as population size, mutation rate, and crossover rate, which makes them less suitable.
Affinity Propagation is an algorithm that does not require specifying the number of clusters up-front, but it is computationally expensive as well @frey_dueck_2007.
We found that the Louvain @blondel_etal_2008 and Leiden @traag_etal_2019 algorithms are the most suitable for this task, as they are designed for optimizing modularity in networks.
The algorithms are iterative and hierarchical, which makes them fast and efficient.
Similarly, in #cite(<rahiminejad_etal_2019>, form: "year") #cite_full(<rahiminejad_etal_2019>) performed a topological and functional comparison of community detection algorithms in biological networks.
They analyzed six algorithms based on certain criteria such as appropriate community size (not too small or too large), and performance speed.
The authors found that the Louvain algorithm @blondel_etal_2008 performed best in terms of quality and speed.
==== The Louvain/Leiden algorithm
The Louvain algorithm, introduced by #cite_full(<blondel_etal_2008>), is an algorithm for extracting non-overlapping communities in large networks.
The algorithm uses a greedy optimization technique to maximize the modularity of the network.
Modularity is a measure of the strength of division of a network.
Networks with high modularity have dense connections between the internal vertices of a community, and sparse connections between vertices of different communities.
The domain of the metric is between -0.5 (non-modular clustering) and 1 (fully modular clustering).
Optimizing the modularity theoretically results in the best possible clustering of the network, though for numerical computing reasons, the algorithm uses heuristics to approach the optimal solution.
The modularity of a network is defined as follows in @modularity @hairol_anuar_etal_2021.
$ Q = 1/(2m) sum_(i=1)^N sum_(j=1)^N [ A_("ij") - (k_i k_j)/(2m) ] delta (c_i, c_j) $ <modularity>
Where:
- $A$ is the adjacency matrix
- $k_i$ and $k_j$ are the degrees of the vertices $i$ and $j$ respectively
- $m$ is the number of edges in the network
- $N$ is the total number of vertices in the network
- $c_i$ and $c_j$ are the communities to which vertices $i$ and $j$ belong
- $delta (c_i, c_j)$ is 1 if $c_i$ and $c_j$ are in the same cluster, and 0 otherwise
#grid(
columns: (50%, 50%),
gutter: 1em,
[
#figure(
table(
columns: (auto),
inset: 5pt,
stroke: (x: none),
align: (left),
[*@louvain_algorithm*: Louvain algorithm],
text(size: 10pt)[
_graph_ $arrow.l$ original network \
\
*loop* \
#h(1em) *for each* _vertex_ *in* _graph_ \
#h(2em) Put _vertex_ in its own community \
\
#h(1em) #text(green.darken(40%), "// Phase 1: local modularity optimization") \
#h(1em) *for each* _neighbour_ *in* _vertex_._neighbours_ \
#h(2em) Move _vertex_ to community of _neighbour_ \
#h(2em) *if* modularity gain \
#h(3em) *break* \
\
#h(1em) #text(green.darken(40%), "// Phase 2: community aggregation") \
#h(1em) *for each* ( _community_ : _graph_ ) \
#h(2em) Reduce _community_ to a single vertex \
\
#h(1em) *if* modularity no longer increases \
#h(2em) *break* \
]
),
kind: "algorithm",
supplement: "Algorithm",
caption: [Louvain algorithm pseudocode],
) <louvain_algorithm>
],
[
@louvain_algorithm represents a pseudocode implementation of the Louvain algorithm.
The algorithm operates in two phases.
In the first phase, the algorithm optimizes the modularity locally by moving each vertex into the community of their neighbour that yield the best modularity gain.
This step is repeated for each vertex until a local maximum is reached.
Then, the algorithm aggregates each community in a single vertex, while preserving the network structure.
The algorithm can then be applied iteratively to the new network, until the modularity cannot be further increased.
]
)
#pagebreak()
A visualization of the intermediate steps of the Louvain algorithm is shown in @louvain.
#figure(
grid(
columns: (auto, auto),
rows: (auto, auto),
gutter: 1em,
[#include("/figures/07-proposed-solution/louvain-1.typ")],
[#include("/figures/07-proposed-solution/louvain-2.typ")],
[#include("/figures/07-proposed-solution/louvain-3.typ")],
[#include("/figures/07-proposed-solution/louvain-4.typ")],
),
caption: [Visualization of the Louvain algorithm @blondel_etal_2008]
) <louvain>
#pagebreak()
A major disadvantage of the Louvain algorithm is that it can only detect non-overlapping communities @blondel_etal_2008.
This means that a software component can only belong to one microservice, which is not in line with the principle of reuse in software engineering.
The algorithm has also been proven to generate small and disconnected communities @traag_etal_2019, which is not desirable in the context of microservices @fortunato_barthelemy_2007.
In #cite(<traag_etal_2019>, form: "year"), #cite_full(<traag_etal_2019>) introduced the Leiden algorithm, an improvement of the Louvain algorithm that addresses the disconnected community problem.
Similarly to the Louvain algorithm, the Leiden algorithm optimizes the quality of the network using the Constant Potts Model @traag_2011:
$ cal(H)(G,cal(P)) = sum_(C in cal(P)) |E(C, C)| - gamma binom(||C||, 2) $ <constant_potts_model>
#grid(
columns: (50%, 50%),
gutter: 1em,
[
The Leiden algorithm operates in three phases.
The first and last phases equal those of the Louvain algorithm (i.e., local modularity optimization and community aggregation).
In the second phase, the algorithm performs a refinement of partition on each small community.
The refinement ensures that the algorithm does not get stuck in a local optimum using a probability distribution.
The Leiden algorithm has been shown to outperform the Louvain algorithm in terms of quality and speed @traag_etal_2019.
],
[
#figure(
table(
columns: (auto),
inset: 5pt,
stroke: (x: none),
align: (left),
[*@leiden_algorithm*: Leiden algorithm (refinement)],
text(size: 12pt)[
#text(green.darken(40%), "// Phase 2: partition refinement") \
*for each* ( _community_ : _graph_ ) \
#h(1em) _partition_ $arrow.l$ _community_ \
#h(1em) *for each* ( _vertex_ : _partition_ ) \
#h(2em) *if* _vertex_ is a singleton \
#h(3em) assign _vertex_ to new \
#h(3em) community using \
#h(3em) probability distribution _P_ \
]
),
kind: "algorithm",
supplement: "Algorithm",
caption: [Leiden algorithm (refinement)],
) <leiden_algorithm>
]
)
A pseudocode implementation of the refinement step in the Leiden algorithm is shown in @leiden_algorithm.
Although the Leiden algorithm is more performant than the Louvain algorithm, it is more complex to implement due to the refinement phase.
Because of this added complexity, we opted to use the Louvain algorithm as default clustering algorithm for our solution.
However, the tool allows easy integration of additional algorithms such as the Leiden algorithm.
|
|
https://github.com/coljac/typst-dnd5e | https://raw.githubusercontent.com/coljac/typst-dnd5e/main/lib.typ | typst | MIT License | #let darkred = rgb("#540808")
#let darkyellow = rgb("#fcba03")
#let dnd = smallcaps("Dungeons & Dragons")
#let dndmodule(title: "",
author: "",
subtitle: "",
cover: none,
font_size: 12pt,
paper: "a4",
logo: none,
fancy_author: false,
body) = {
set document(author: author, title: title)
// set heading(numbering: "1.1")
show heading: it => text(
size: 1.5em,
fill: darkred,
weight: "regular",
// style: "italic",
smallcaps(it.body)
)
show heading.where(
level: 2
): it => text(
size: 1.5em,
fill: darkred,
weight: "regular",
)[
#box(width: 100%, inset: (bottom: 4pt), stroke: (bottom: 1pt + darkyellow))[#smallcaps(it.body)]
]
// Page settings
set page(paper,
flipped: false,
margin: (left: 15mm, right: 15mm, top: 30mm, bottom: 30mm),
numbering: "1",
number-align: start,
columns: 2,
background: image("img/background.jpg", width: 110%),
footer: locate(loc => {
if loc.page() > 1 {
place(left+bottom, image("img/footer.svg", width: 100%))
align(center)[#loc.position().page]
}})
)
if subtitle.len() > 0 {
subtitle = subtitle + "\n"
}
// FRONT PAGE
/* page(background: image(cover, height: 100%), margin: (top: 10mm, bottom: 5mm), */
page(background: cover, margin: (top: 10mm, bottom: 5mm),
columns: 1)[
#place(
top + center,
box(fill: rgb("#00000066"), inset: 10%, text(fill: white, size: 60pt, weight: 800, upper(title)))
)
#if subtitle.len() > 0 {
place(
bottom + center,
dy: -0.2cm,
box(width: 80%, fill: rgb("#00000066"), inset: (left:10pt, right:10pt, top:10pt, bottom: 10pt), text(fill: white, size:20pt)[#subtitle #if not fancy_author {"by " + author}]
))}
#if logo != none {
place(dx: 91%, dy: 100%-2.5cm,
logo // image("img/DMsGuildLogo.jpg", width: 13%)
)
}
#if fancy_author {
place(dx: -10%, dy: 73%, image("img/fire_splash.svg", width: 60%))
place(dx: -10% + 0.7cm, dy: 73% + 0.7cm)[#text(size: 18pt, fill: white, weight: 700)[by #author]]
}
]
set text(size: font_size, lang: "en", fill: black)
body
}
#let dndtab(name, columns: (1fr, 4fr), ..contents) = [
*#smallcaps(text(size: 1.3em)[#name])*
#v(-1em)
#table(
columns: columns,
align: (col, row) =>
if col == 0 { center }
else { left },
fill: (col, row) => if calc.odd(row+1) { rgb("#aaaaaa00") } else { rgb("#aaffaa33") },
inset: 10pt,
stroke: none,
// align: horizon,
..contents
)
]
#let marginset(where) = {
if where == top {
(top: -10pt)
} else {
(bottom: -10pt)
}
}
#let pagewithfig(where, figure, contents) = [
#set page(columns: 1, margin: marginset(where))
#pagebreak()
#place(where+center, float: true, figure)
#block[#columns(2)[#contents]]
]
#let breakoutbox(title, contents) = [#place(auto, float: true)[
#box(inset: 10pt, width: 100%, stroke: (top: 2pt, bottom: 2pt), fill: rgb("#ddeedd"))[
#if title.len() > 0 {
align(left, smallcaps[*#title*])
}
#align(left)[#contents]
]
]]
#let bonus(i) = {
let b = ""
if i >= 10 {
b = "+"
}
b + str(int((i - 10)/2))
}
#let stat-to-str(a) = {
(str(a) + " (" + bonus(int(a)) + ")")
}
#let stats-table(stats) = {
let content = ()
for k in stats.keys() {
content.push([#text(fill: darkred, weight: 700, k)])
}
for k in stats.values() {
content.push([#text(fill: black, stat-to-str(k))])
}
table(stroke: none, columns: (1fr, 1fr, 1fr, 1fr, 1fr, 1fr), inset: 0pt, row-gutter: 5pt, align: center, ..content)
}
#let statbox(stats) = [
#box(inset: 12pt, fill: white, stroke: 1pt, width: 100%)[
#show par: set block(spacing: .6em)
#set text(size: 10pt)
#heading(outlined: false, level: 3, stats.name)
_ #stats.description _
#line(stroke: 2pt + darkred, length: 100%)
#text(fill: darkred)[*Armor Class*] #stats.ac\
#text(fill: darkred)[*Hit Points*] #stats.hp\
#text(fill: darkred)[*Speed*] #stats.speed\
#line(stroke: 2pt + darkred, length: 100%)
#stats-table(stats.stats)
#line(stroke: 2pt + darkred, length: 100%)
#for skill in stats.skillblock {
[#text(fill: darkred)[*#skill.at(0)*] #skill.at(1)\ ]
}
#line(stroke: 2pt + darkred, length: 100%)
#for trait in stats.traits {
[ _*#trait.at(0).*_ #trait.at(1)]
}
#let sections = ("Actions", "Reactions", "Limited Usage", "Equipment", "Legendary Actions")
#for section in sections {
if section in stats.keys() {
block[
#show par: set block(spacing: 1em)
#text(size: 1.3em, fill: darkred)[#box(width:100%, inset: (bottom: 3pt), stroke: (bottom: 1pt+darkyellow))[#smallcaps(section)]]
#for action in stats.at(section) {
[_*#action.at(0).*_ #action.at(1) \ ]
}
]
}
}
]
]
#let spell(spl) = [
#show par: set block(spacing: .6em)
#heading(outlined: false, level: 3, spl.name)
_#spl.spell_type _
#v(0.5em)
#for prop in spl.properties {
[*#prop.at(0):* #prop.at(1) \ ]
}
#v(0.5em)
#spl.description
]
#let trademarks = text(size: 0.9em, style: "italic")[
#dnd D&D, Wizards of the Coast, Forgotten Realms, Ravenloft, Eberron, the dragon ampersand, Ravnica and all other Wizards of the Coast product names, and their respective logos are trademarks of Wizards of the Coast in the USA and other countries.
This work contains material that is copyright Wizards of the Coast and/or other authors. Such material is used with permission under the Community Content Agreement for Dungeon Masters Guild.
All other original material in this work is copyright 2023 by the author and published under the Community Content Agreement for Dungeon Masters Guild.
]
|
https://github.com/jrihon/multi-bibs | https://raw.githubusercontent.com/jrihon/multi-bibs/main/chapters/03_chapter/discussion.typ | typst | MIT License | #import "../../lib/multi-bib.typ": *
#import "bib_03_chapter.typ": biblio
== Conclusion
#lorem(20)
|
https://github.com/daskol/typst-telegram-bot | https://raw.githubusercontent.com/daskol/typst-telegram-bot/main/README.md | markdown | MIT License | # Typst Telegram Bot
*Render math expression with typst markup language in Telegram*
## Overview
Try [@TypstBot][1] in Telegram or deploy as follows. First, run simple HTTP API
to `typst`. It uses `typst` for rendering `*.typ` to `*.png`.
```shell
typst-telegram serve api \
--root-dir data \
--endpoint http://localhost:8080 \
--interface 127.0.0.1
```
Finally, one can run Telegram bot itself as follows with environemnt variable
`TELEGRAM_BOT_TOKEN` set.
```shell
typst-telegram serve bot --endpoint http://localhost:8080
```
[1]: https://t.me/TypstBot
## Deployment
Currently, deployment is based on Compose plugin but deployment requires some
preparation. We need to create directory `data` and properly assign ownership.
```shell
mkdir data
chown -R nobody:nobody data
```
Finally, one can run services as follows.
```shell
docker compose up -d
```
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/supercharged-dhbw/1.0.0/confidentiality-statement.typ | typst | Apache License 2.0 | #let confidentiality-statement(authors, title, university, university-location, date) = {
v(2em)
text(size: 20pt, weight: "bold", "Confidentiality Statement")
v(1em)
text("The Thesis on hand")
v(1em)
align(center,
text(weight: "bold", title)
)
v(1em)
let insitution
if (authors.map(author => author.company.name).dedup().len() == 1) {
insitution = "insitution"
} else {
insitution = "insitutions"
}
let companies = authors.map(author => author.company.name).dedup().join(", ", last: " and ")
par(justify: true, [
contains internal respective confidential data of #companies. It is intended solely for inspection by the assigned examiner, the head of the mobile computer science department and, if necessary, the Audit Committee at the #university #university-location.
The content of this thesis may not be made available, either in its entirety or in excerpts, to persons outside of the examination process and the evaluation process, unless otherwise authorized by the training #insitution (#companies).
])
v(3em)
text([#authors.map(author => author.company.city).dedup().join(", ", last: " and "), #date.display(
"[day].[month].[year]"
)])
for author in authors {
v(5em)
line(length: 40%)
author.name
}
} |
https://github.com/jgm/typst-hs | https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/regression/issue20.typ | typst | Other | #let a = "
This is a
multiline string
"
#a.len()
|
https://github.com/GYPpro/Java-coures-report | https://raw.githubusercontent.com/GYPpro/Java-coures-report/main/.VSCodeCounter/2023-12-14_20-23-42/results.md | markdown | # Summary
Date : 2023-12-14 20:23:42
Directory d:\\Desktop\\Document\\Coding\\JAVA\\Rep\\Java-coures-report
Total : 35 files, 2227 codes, 87 comments, 418 blanks, all 2732 lines
Summary / [Details](details.md) / [Diff Summary](diff.md) / [Diff Details](diff-details.md)
## Languages
| language | files | code | comment | blank | total |
| :--- | ---: | ---: | ---: | ---: | ---: |
| Java | 33 | 2,048 | 85 | 377 | 2,510 |
| Typst | 1 | 178 | 2 | 39 | 219 |
| Markdown | 1 | 1 | 0 | 2 | 3 |
## Directories
| path | files | code | comment | blank | total |
| :--- | ---: | ---: | ---: | ---: | ---: |
| . | 35 | 2,227 | 87 | 418 | 2,732 |
| . (Files) | 3 | 235 | 9 | 56 | 300 |
| rubbish | 1 | 73 | 0 | 15 | 88 |
| sis1 | 1 | 26 | 0 | 3 | 29 |
| sis2 | 2 | 192 | 15 | 22 | 229 |
| sis3 | 2 | 63 | 5 | 11 | 79 |
| sis4 | 1 | 34 | 0 | 3 | 37 |
| sis5 | 5 | 501 | 31 | 81 | 613 |
| sis6 | 2 | 124 | 8 | 19 | 151 |
| sis7 | 3 | 261 | 1 | 35 | 297 |
| sis8 | 10 | 435 | 18 | 120 | 573 |
| sis9 | 5 | 283 | 0 | 53 | 336 |
Summary / [Details](details.md) / [Diff Summary](diff.md) / [Diff Details](diff-details.md) |
|
https://github.com/hongjr03/shiroa-page | https://raw.githubusercontent.com/hongjr03/shiroa-page/main/24spring.typ | typst | #import "/book.typ": book-page
#show: book-page.with(title: "2024 春季学期")
= 2024 春季学期
|
|
https://github.com/feiyangyy/Learning | https://raw.githubusercontent.com/feiyangyy/Learning/main/README.md | markdown | # Learning
主要是学习相关的笔记,目前只有高代,后面依学习进度会逐步扩充
## 线性代数(高等代数)
其实我是一个工科学生是不学高等代数的,但奈何无国内的“线代” 太过"抽象“,加上线代本身概念、定理及推论又多,没有一个合适的主线是难以掌握的。这里推荐一下邱维声老师的高代课程视频和高代教材,他让我重新认识了线性代数(更适合中国宝宝体质的),我觉得他也能帮到更多人。
linear_algebra下都是高代相关的笔记,目前还没有进行章节整理
课程视频: [高等代数](https://www.bilibili.com/video/BV1jR4y1M78W/?spm_id_from=333.337.search-card.all.click)
教材: [《高等代数 第二版 邱维声著》](https://book.douban.com/subject/34778837/)
## 关于Typst
现在真的是一个美好的时代,感谢Typst的出现,它能让我专注于写作,而不用去折腾各种环境和编译器。本仓库内所有的学习笔记,均是用Typst所编写的。
作为一个小白,我用Typst至少感受到有以下几个好处:
1. 用较低的学习成本,可以书写中等复杂的公式
2. VSCode 中非常好的实时预览效果,且支持定位
3. 一键生成pdf,即使默认排版也十分美观自然
希望Typst 能被更多的人了解和使用 |
|
https://github.com/hrutvikyadav/typst | https://raw.githubusercontent.com/hrutvikyadav/typst/main/TODO.md | markdown |
# See later
- [ ] how to use excalidraw with typst
|
|
https://github.com/lucifer1004/leetcode.typ | https://raw.githubusercontent.com/lucifer1004/leetcode.typ/main/solutions/s0006.typ | typst | #import "../helpers.typ": *
#let zigzag-conversion-ref(s, numRows) = {
if numRows == 1 {
return s
}
let s = s.clusters()
let n = s.len()
let ret = ()
let cycleLen = 2 * numRows - 2
for i in range(numRows) {
for j in range(0, n - i, step: cycleLen) {
ret.push(s.at(j + i))
if i != 0 and i != numRows - 1 and j + cycleLen - i < n {
ret.push(s.at(j + cycleLen - i))
}
}
}
ret.join()
}
|
|
https://github.com/bryceberger/typst-lsp-sem-tok | https://raw.githubusercontent.com/bryceberger/typst-lsp-sem-tok/main/readme.md | markdown | Simple language server for `Typst` that only provides semantic tokens.
|
|
https://github.com/SillyFreak/typst-scrutinize | https://raw.githubusercontent.com/SillyFreak/typst-scrutinize/main/CHANGELOG.md | markdown | MIT License | # [unreleased](https://github.com/SillyFreak/typst-scrutinize/releases/tag/)
## Added
## Removed
## Changed
## Migration Guide from v0.1.X
---
# [v0.3.0](https://github.com/SillyFreak/typst-scrutinize/releases/tag/v0.3.0)
Scrutinize 0.3.0 is a major breaking release and adds compatiblitity checks for Typst 0.12 compatibility (this version was released when 0.12.0-rc1 was available).
## Added
- the README now contains images of an example exam
- the task kinds `free-form.lines()`, `free-form.grid()` and `gap.gap()` were added
- `free-form` and `gap` tasks have additional configuration options:
- `stretch` resizes the response space relative to the size of the sample solution
- `placeholder` adds content to show in the answer space in the unsolved exam. This is useful for tasks where something existing needs to be completed.
- tasks can now be fetched in a "scope" of the document, allowing e.g. multiple independent exams
## Changed
- the module stucture was changed
- the `question` module was renamed to `task` to be shorter and more general
- `questions` was renamed to `task-kinds` to match, and to be more descriptive
- from `questions`, the new `solution` module was extracted: `solution` provides facilities for working with the sample solution boolean state, and different task kinds utilize this for displaying different kinds of information
- `task-kinds` doesn't directly hold the task functions; it holds submodules that group them
- the `q()` function (now `t()`) does not wrap a whole task but instead attaches to the preceding heading
- tasks can now have subtasks, by using nested headings
---
# [v0.2.0](https://github.com/SillyFreak/typst-scrutinize/releases/tag/v0.2.0)
Scrutinize 0.2.0 updates it to Typst 0.11.0, using context to simplify the API and --input to more easily specify if a sample solution is to be generated. Some documentation and metadata errors in the 0.1.0 submission were also corrected.
## Added
- specify solution state via `--input solution=true`
## Changed
- functions that formerly took callback parameters to give access to state now depend on context being provided and simply return a value
---
# [v0.1.0](https://github.com/SillyFreak/typst-scrutinize/releases/tag/v0.1.0)
Initial Release
|
https://github.com/npujol/chuli-cv | https://raw.githubusercontent.com/npujol/chuli-cv/main/modules/skills.typ | typst | MIT License | #import "styles.typ": *
#let render-skills(skills: ()) = {
for skill in skills {
box(
rect(
stroke: skills-style.stroke,
radius: skills-style.radius,
skill
)
)
h(skills-style.margins.between-skill-tags)
}
} |
https://github.com/polarkac/MTG-Stories | https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/032%20-%20Ixalan/007_The%20Race%2C%20Part%202.typ | typst | #import "@local/mtgstory:0.2.0": conf
#show: doc => conf(
"The Race, Part 2",
set_name: "Ixalan",
story_date: datetime(day: 18, month: 10, year: 2017),
author: "<NAME> & <NAME>",
doc
)
= VRASKA
The river was getting too narrow for comfort. Vraska looked over the edge and saw the riverbed less than a body length below.
Two massive rocks stood like gate pillars on either side of the water ahead. Their ship would fit, but just barely.
Her blisters stung.
She eased off the left oar and began steering toward the riverbank.
Jace had given up on maintaining their invisibility several hours back. As night fell, glowing insects and other, stranger lights that Vraska could not identify lit the jungle anew. The banks on either side were too steep to bring the boat ashore. Were it not for the massive dinosaurs that undoubtedly lurked in the woods, she would have found the atmosphere quite lovely.
#figure(image("007_The Race, Part 2/01.jpg", width: 100%), caption: [Swamp | Art by Christine Choi], supplement: none, numbering: none)
"We'll sleep in the boat," Vraska said. She let go of the oars and hissed as she poked at one of her blisters.
The thaumatic compass sat on the plank between the two Planeswalkers. Jace picked it up and looked where it was pointing. "That thing would be more useful if it told us how far we had to go," said Vraska as she stretched one arm out, then the other. She laced her fingers together and sighed with relief.
Jace didn't answer.
He looked up, and the magic in his eyes lit up the contours of his face. A large draft horse materialized above them, glowing a delicate blue against the night sky, and ran upward into the canopy.
The spectral horse would serve as a beacon for Malcolm.
#emph[I hope the rest of the crew arrives soon.]
The air was thick and windless. It smelled of growing things. Sap, rot, things dying and eating and growing on top of all the other dying and eating things. Vraska remembered her crew would sing on windless nights like these when they were trapped at sea. She loved those communal moments most. She and her tribe, enemies of all but each other.
"#emph[A castle grows in Old Below] ," she sang.
Jace looked at her like she had grown a second head. Vraska smiled and finished the verse.
"#emph[Its windows shine with an ancient glow,Some wander its maze, a mess of decay] —"
Vraska paused. Jace was listening carefully.
"You want me to keep going?" she asked with a tired smile. Jace smiled.
She sat up and kept her voice down. Perhaps music would deter whatever dinosaurs might be listening.
"#emph[. . .] #emph[ and the Kingdom of Rot will rise one day.] "
Jace made a tired little noise of approval. "Cheery song."
"The Golgari have little to be cheery about." Vraska sat back with her eyes closed.
Jace's voice was slow with sleep. "Breeches taught me a song."
"The one about figs?"
"It's a rude song. Very rude. He's a rude little goblin."
Jace went quiet after that, and a moment later he was lost to sleep. Vraska wondered if he could do that on command.
Small winged creatures chirped overhead, and night birds sang deep in the jungle.
She opened a golden eye and gazed back at Jace. At the second-most-dangerous telepath in the Multiverse.
#emph[He could break my mind as easily as I can sing a song.]
And yet . . . he would not. He would never. Not when he listened like he had (like #emph[no one ] had).
Vraska knew in that moment that, memory or no, this was a man she could trust—and one who would trust her in return. She did not #emph[need ] another person to be complete, nor did she #emph[need ] validation of who she was. And if he weren't interested, that was fine—she had a history book at home to finish. But if he were interested, Vraska imagined he would make her tea if she was upset. He would #emph[listen] when she needed to be heard, cheer her on to her own victories. All in all, not a bad prospect. Perhaps she'd ask him on a date when all this was over. She hadn't been on one of those in a while. For now, though, Vraska was happy as things were. A simple, straightforward race with a good friend at her side—#emph[that ] was what she needed.
Vraska couldn't wait to petrify whoever had stolen his memories.
The glow of the plants around and the stars above made their cold little boat feel warm in the shadow of the jungle, and as Vraska closed her eyes, she felt the cool breeze of invisibility cover her once more.
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
= JACE
Jace slept soundly after his watch. The quiet and the open air were welcome changes from the months he had spent sleeping in a hammock surrounded by the rest of the crew.
Vraska and Jace abandoned their boat the next morning. They rowed to the shore and left the ship on the riverbanks.
Masses of rock and forest floor seemed to jut out at odd angles, and any semblance of a path was lost in the noise and chaos of the jungle in daylight. Vraska got out her sword as a makeshift machete to clear the way.
Eventually, the two came to a wide, clear path. Vraska put her sword away in relief.
"About time. Sword blisters fall in about the same place as oar blisters," she groaned.
Jace's brows furrowed. "We may not want to walk here."
He pointed up at how the clearing ran through the canopy. "This path was probably made by dinosaurs."
Vraska sighed. "So all of this path was worn by dinosaurs crossing it?"
"No, it was cut by dinosaur loggers," Jace explained further in perfect, sarcasm-less deadpan. Vraska snorted a laugh.
Jace shook his head gravely. "Don't insult the noble trade of dinosaur forestry."
Vraska's laugh was interrupted by an odd smell in the air.
A thick cloud of dark smoke suddenly flooded the grove around them.
The smoke was cloying, an inky mist with the vague scent of myrrh that enveloped the trees, obscuring what little light broke through the canopy above and turning day suddenly into night.
Jace shouted in surprise, then reached out to sense what he could with his mind.
Vraska was standing in the center of the path, grappling with a barely-visible foe. The mist was too thick for vision—he reached out to the enemy's mind, sensed the spell that was casting the darkness, and pinched it off.
The dark smoke dissipated, leaving a conquistador standing exposed. The vampire was snarling, chin covered in crusted blood, and her gold and black armor shone. The sigil of a rose was embossed on her chest plate, and the points of her helmet loomed jagged and sharp over the gorgon. The crust of dried salt on her armor led Jace to believe this was one of the survivors of the other shipwreck.
Jace held up a hand and created the illusion of a massive, disorienting storm.
Thick rain poured from the canopy above, the vibrant green of the path turned dark, and a clap of thunder sounded overhead.
Vraska appeared unfazed, but the vampire was startled. She jumped, slightly, but came to just in time to block a strike from Vraska's sword with her armored shoulder. The vampire left her sword sheathed, moved in close, and attacked in a frenzy of kicks and blows. Vraska tried to swing her sword in response, but was interrupted by a sharp jab to her jaw. She began to draw the magic necessary to petrify the vampire.
Jace again threw out his hand, reaching for the vampire's mind, but the chaos of the tussle was too much—he was too unpracticed—and a gauntlet landed a blow to his forehead. He fell back to the ground, concentration shattered.
The illusory rainstorm vanished, and the dappled sunlight flickered back into view.
Jace groggily watched as the vampire reached down to the forest floor and felt around, snagging the thaumatic compass from the ground at Jace's feet, and ran into the thick of the jungle.
Vraska swore and struggled to her feet, a hand over her own eyes and hissing in pain. She blinked away her own magic and growled in frustration.
She kicked a tree.
Jace shut his eyes and concentrated.
"We can tail her."
He opened his eyes and looked up, sending another massive horse charging into the air as a beacon for the crew.
Vraska was still fuming. "That damn vampire must have learned what I did to that other captain. We shouldn't have left the crew alive."
Jace sighed. "Objectively, you're not wrong."
Vraska kicked the tree again.
"I can find her, and we can retrieve it. And then you can kick all the trees you want." Jace said with determination.
The gorgon took a deep breath, took a moment, and nodded. She looked at Jace with a slight crease between her brows.
"Are you sure you can track her?"
"Absolutely certain."
Jace closed his eyes and concentrated.
He listened for the vampire's mind.
What he heard instead was a pair of furious internal monologues.
Tishana is too far ahead, how does that elemental move so quickly? Urge left, dodge vine, there—up ahead—Brazen Coalition man standing back to us—is that the green-skinned pirate?!
—Slow and foolish, typical Sun Empire sloppiness. Green-skinned woman ahead, she is rumored to possess the compass. Follow the illusion, summoning a serpent to fight them off . . .—
His eyes snapped open in surprise, and in one fluid motion, Jace whipped around with his arms crossed in front of him.
An immense, illusory, flying snake crashed into his arms and split on either side of Jace's psychic defense.
The source of the illusion was a merfolk standing precariously on the back of a massive elemental.
He looked to the source of the other mental voice, a woman wearing steel armor plated with the same feather pattern as the dinosaur she rode. A half-circle blade hung at her side, and her long braid flicked in the air as she barreled toward him.
#figure(image("007_The Race, Part 2/02.jpg", width: 100%), caption: [Slash of Talons | Art by Magali Villeneuve], supplement: none, numbering: none)
Jace's thought process leapt from idea to conclusion. He held up a hand at the oncoming human, a tingle ran down his neck, and the woman pulled back sharply on her dinosaur's reins. The beast skid to a halt. The woman atop it looked frantically to either side.
"Where did they go?!"
The merfolk's fins fluttered. "It is an illusion!"
She held out a hand, and vines whipped up from the forest floor to wrap themselves around Jace's legs.
He fell to the ground in a heap, and the invisibility he had projected vanished.
Vraska stepped out and stood in front of him. She called out to the knight and merfolk. "Wait!"
"Why are you chasing us?" she asked.
Jace gave himself permission to sweep the surface of the merfolk's mind.
"The merfolk knows about the compass."
The merfolk's fins fanned in surprise and anger.
Vraska's lip curled. "Who are you?"
Jace stood, and the vines around his feet receded. He took a place by Vraska's side and stared down their opponents.
Tishana's elemental readied itself to attack. The merfolk laid a reassuring hand on its side. "My name is Tishana, an elder of the River Heralds and protector of Orazca. One of our own heard a fruitful rumor about you, pirate."
Jace silently berated himself. That merfolk in the corner at High and Dry had overheard their conversation after all.
The knight next to the merfolk straightened her shoulders. "I am Huatli of the Sun Empire, Warrior Poet and vanquisher of interlopers."
Jace couldn't help but notice Huatli's eye twitch at the words "warrior poet."
Tishana glared at Vraska. "None may possess the city or what lies within it. Hand over the compass or die where you stand."
"If you insist," Vraska purred, her eyes beginning to shine with magical intent.
Jace held out a hand to block her gaze.
"We don't have it," he blurted.
Vraska huffed a noise of frustration and gently moved his hand away from her eyes. She crossed her arms in impatience.
The merfolk must have heard him, but her face did not betray her thoughts. Instead, she tilted her head to one side as if she were listening.
Curious, Jace dipped back into the surface of the merfolk's mind. Through some unseen connection she was sensing an intruder move through the jungle ahead. Her tether to the trees and soil underfoot was delicate, and every step of the intruder left a trail through the rainforest. Experiencing it first-hand was elating—Jace had not known that such power was possible.
The merfolk looked to Jace. "A vampire is near. Did she take the device from you and abscond?"
The knight on top of the dinosaur had a subtle amber haze about her, and her dinosaur rumbled a deep growl. Jace began to hear the movement of other dinosaurs around them.
He centered his weight and balled his fists. "The vampire took the compass from us."
Something snapped its jaws in the jungle behind them. Both Vraska and Jace jumped from the noise.
The knight smiled and turned her dinosaur to the side. She flashed a winning smile. "Thank you for your cooperation."
The merfolk swiftly climbed atop her elemental, and the two women bounded into the jungle.
As soon as they left, Vraska snapped her head to Jace.
"Can you track the vampire?"
Jace nodded, listened briefly for the vampire's mind.
He smiled.
"I can track more than just that."
Vraska nodded, and the two of them took off into the thick of the forest. As Jace ran, he sent another signal to the rest of their crew—and the illusory draft horse charged along the same path as its caster below.
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
= HUATLI
Huatli placed a hand on her mount as they ran and sent a short charge of magic through their connection.
A dinosaur perceives through scent what a human sees with their eyes, and years of training had taught Huatli how best to communicate with her mount.
#emph[Find. Blood. Decay. Vampire.]
The dinosaur sniffed the air, lowered her head for the hunt, and increased her speed.
Leaves whipped past, and Huatli allowed her eyes to adjust as the branches above began to give way to wider trees with a thinner canopy. Smaller creatures dodged as she passed, and Huatli could hear birds and dinosaurs in the canopy above screeching warnings to each other as she and her predator moved below.
"This may take some time," Huatli said.
It took nine hours.
#figure(image("007_The Race, Part 2/03.jpg", width: 100%), caption: [Unclaimed Territory | Art by Dimitar], supplement: none, numbering: none)
#figure(image("007_The Race, Part 2/04.jpg", width: 100%), caption: [Island | Art by Raoul Vitale], supplement: none, numbering: none)
They passed down steep hillsides and through empty vales, and at one point, they waded their mounts through the shallows of a lake. Every time they got close, the vampire would pull farther ahead, and every time they stopped for breath, they marveled at the tenacity of their foe.
"She's fast for a dead woman, isn't she?" Huatli panted, massaging a cramp in her thigh while her dinosaur drank thirstily from the lake.
Tishana seemed unimpressed. "The intricacies of the universe care not for how quickly the tapestry is made, only the ultimate connecting of its fibers."
For the sixth time that day, Huatli rolled her eyes.
#figure(image("007_The Race, Part 2/05.jpg", width: 100%), caption: [Forest | Art by Raoul Vitale], supplement: none, numbering: none)
#figure(image("007_The Race, Part 2/06.jpg", width: 100%), caption: [Sunpetal Grove | Art by Dimitar], supplement: none, numbering: none)
The merfolk and the knight eventually emerged on the other side of the lake.
Huatli felt the dinosaur's glee—their prey was almost within reach. Sure enough, she soon saw a gold-clad figure ahead, leaning against a tree and panting with exhaustion.
"Let me have her, Tishana!" Huatli yelled. The merfolk slowed her elemental's jog and held back.
The dinosaur drew itself closer to the ground for the attack as they approached. The vampire turned her head to see what was approaching, but she didn't have time to respond as the dinosaur opened its jaws and seized her around the waist.
The vampire screamed in surprise as Huatli's dinosaur tossed her aside into the trunk of a massive tree.
Huatli dismounted and walked toward the vampire.
Her foe was taller than she, and an angry streak of blood stained her collar. What lace poked out from her armor was damp with perspiration, and she had the look of a child who refused to wear anything other than their favorite outfit, no matter how inconvenient it may be.
"What you lack in blood you make up for in sweat," Huatli said, landing a kick square in the vampire's chest. The vampire stumbled back into the tree with a breathless grunt. She panted and pulled at the collar of her armor.
Huatli grinned. "What, were there no jungles in Torrezon? Are you uncomfortable?"
Her eyes lit with an amber glow, and her dinosaur rumbled a low growl.
#emph[Seize] , Huatli instructed. The dinosaur jolted forward and grabbed the vampire once more with her jaws.
The bite was not hard enough to pierce the armor, but it was sufficient to hoist the vampire off the ground. The vampire was flailing in protest, trying to draw her sword while batting and clawing at the dinosaur's thick hide.
"Shake," Huatli said aloud.
The dinosaur shook the vampire up and down, and the conquistador wailed in response.
An odd-looking compass flung out of her pocket and hit the ground.
Huatli kneeled over and picked it up. It was beautiful and elaborate, humming with an energy she could feel through the palm of her hand.
#emph[Release] , Huatli instructed.
The vampire, drenched in spit, hit the ground with a slimy thud. Huatli sensed for the nearest meat-eating predator and summoned it with a burst of magic and an invitation—#emph[Feast!] She felt as the raptor bounded toward her in the jungle. Huatli quickly climbed up on her saddle, and her mount took off into the jungle.
The greatest warriors of the Sun Empire never killed, but they would never let a hungry beast go without a meal.
Huatli trotted toward Tishana with a grin on her face. "Run, before the vampire can catch up! I got the compass!"
The merfolk smiled in response. Her teeth were like little knives in a well-organized row. "Wonderful!"
Tishana took the compass and looked it over, turning it over in her hands and investigating it as carefully as one would a sacred text.
She narrowed her eyes at the object, then gave Huatli a sly look.
The compass began to emit a pulsing amber light straight ahead.
The fins on the side of Tishana's face fluttered, and she closed her eyes.
Huatli closed her mouth and waited. She knew the River Herald was sensing something she couldn't see. After a moment, the merfolk's eyes fluttered open with an astonished wonder.
"The end of our peregrination is near."
Huatli was too excited to roll her eyes this time. "Really?"
"It is part of the land around it, yet separate, to keep it hidden. It does not move, but the way #emph[to] it is enchanted to change . . ." Tishana closed her eyes again and pointed. Her finger was parallel to the line of the compass. "It is a half day's travel that way."
Huatli nodded resolutely. "Then we must not wait!"
Tishana paused.
Her mount shifted ever so slightly away from Huatli. Her eyes darted to the compass.
Huatli's mood turned defensive. "Tishana, we had a deal we would go together."
"Yes," said the merfolk, "we did."
Huatli lunged for the compass, but just as she grabbed it she was interrupted by the slap of a large flap of canvas covering her face and knocking her off her mount.
Huatli hit the ground, her body entirely wrapped up in a massive sheet of cloth. She tried to wiggle her way free, but the canvas around her grew tighter. Beyond the fabric, she could hear her dinosaur screech and cry before being suddenly silenced. The silence was broken by the cheers of about a dozen people.
The Brazen Coalition.
A familiar female voice laughed. "Let her out, Amelia."
The fabric lifted Huatli back to her feet and spun her around until she was free, stumbling with dizziness from the release.
A pirate helmsmage stood with her hands at the ready, and the canvas—did she drag that sail all the way from the beach?—bound itself around Huatli's hands.
Huatli gasped. Her clawfoot was in front of her, crouching for an attack, jaws wide open . . . and entirely encased in stone.
The green-skinned pirate from before ran her hand along the side of the newly petrified statue. She knelt to Huatli's level and smiled.
"I'll be taking that compass back now." The tendrils and vines of the woman's hair writhed in smug pleasure. She took the dropped compass from Huatli's feet.
"How did you catch up to us?!" Huatli spat.
The green woman tutted and shook her head. "The vampire you were tracking followed the compass in a straight line. Across this terrain I find that is not terribly effective. It's much easier to find shortcuts with an eye in the sky and a telepath on the ground."
The siren behind her preened, and the man in blue very politely bowed his head with a smile.
"Any other questions?" asked the captain.
Huatli concentrated her fury and channeled as much energy as she could into a spell. Her eyes lit with an amber glow, and from behind her a herd of clawfoots screeched in the jungle. She would never be without a mount in these jungles.
As the dinosaurs made their way toward her, the pirates fled in the other direction. Huatli wrestled her way out of the ropes and looked for Tishana. Damn merfolk! Where did that traitor go?!
Her answer came with a faraway rumble.
Huatli didn't want to wait to find out what it was.
#figure(image("007_The Race, Part 2/07.jpg", width: 100%), caption: [Grasping Current | Art by Yongjae Choi], supplement: none, numbering: none)
Behind her she saw Tishana, standing with her arms outstretched, and the trees moaned with bent wood and the smash of water as she summoned a flood to careen through the jungle.
Huatli had just enough time to order the dinosaurs away again, and she sighed in relief when Tishana's summoned river rushed past her and toward her fleeing enemies.
The pirates screamed and scattered, and Huatli could have sworn she saw the green-skinned woman and the man in blue escape.
"You're on your own, Warrior Poet," Tishana said dramatically. "I must stop Kumena on my own."
Huatli rolled her eyes once more just as Tishana vanished into the thick of the jungle.
Fine! If she wants to break our bond, then that is on her! Huatli let out a colorful curse.
She once again began a spell to summon a new mount. She needed to follow the scent of the green-skinned woman. Huatli's merfolk guide may have left, but she was close enough to her goal that she didn't need Tishana any longer.
A voice caused her to gasp in surprise.
"PLANESWALKER! STOP!"
Angrath stood as tall as a tree and as wide as a snubhorn. His head was that of a horned beast, and his body rippled with barely contained power. Red-hot chains were draped over his shoulders, and he huffed in exhaustion.
Angrath.
All of this had started when the pirate had attacked her before. All of it came from whatever #emph[that] pirate did that made her see what she did. Huatli grimaced and ran in the same direction the pirates had fled.
Angrath pursued her.
"WAIT! I WANT TO TALK TO YOU!"
"I DON'T WANT TO HEAR IT!" Huatli yelled back.
Huatli looked to her right. Angrath was close behind.
She quickened her pace, but a chain whacked and wrapped around her ankle, yanking Huatli to the forest floor.
She masked her fear with a brave face, drew up a hand, and began charging a spell to summon as many dinosaurs and beasts as she could.
"Stop!" Angrath said.
He walked forward and kneeled, laying his chains cold and dark on the forest floor.
Huatli's heart was racing. She was more terrified than she ever had been. What was this killer playing at?
"You're like me," he said.
"I'll never be like you!" Huatli yelled defiantly, dramatically.
"No, idiot, not like that," Angrath replied, his eyes stern with impatience. "I will not harm you, fellow Planeswalker." Angrath stood, looking down at her.
Huatli was about to demand answers, but Angrath spoke calmly and resolutely. "Whatever prevents us from leaving this plane is locked in that city. We can help each other escape to different worlds if we find it."
A small glimmer of wonder burst through Huatli's confusion.
Angrath continued, ". . . And all we have to do is kill everyone who tries to steal Orazca from our grasp."
Huatli's hope vanished, and a sick malaise filled her gut.
#emph[Spectacular] , she thought to herself, #emph[the murder monster wants to be my friend.]
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
= VRASKA
The thaumatic compass began to vibrate in Vraska's hand.
Her heart leapt as she ran, Jace at her side and her crew at her back.
The merfolk's flood had been a smart distraction, but the crew of #emph[The] #emph[Belligerent] was not so easily washed away.
Malcolm flew up and ahead and returned with a gasp. "It's on the hills ahead!"
"Keep running!" Vraska yelled to her crew. They were so close, so incredibly close.
The trees grew differently in this part of Ixalan. Vraska and her crew had passed through a mountain range and ran now through a maze of fog and plants. Occasionally, they would run past a tree with beautiful yellow leaves, and in the rocks, beside them, veins of precious gold glittered from under the lichen and moss.
The land itself seemed eager to betray the secrets it kept.
The crew of #emph[The] #emph[Belligerent] came to a clearing, and all of them stopped in their tracks. Shining as golden beacons above the green, the spires of Orazca pierced the sky.
#figure(image("007_The Race, Part 2/08.jpg", width: 100%), caption: [Spires of Orazca | Art by Yeong-Hao Han], supplement: none, numbering: none)
The points commanded the horizon. Their masses were hidden beneath an endless barrier of trees, its bulk so immense that Vraska wondered if the hills themselves were the buried city, coated with an impenetrable tangle of jungle.
She tucked away the thaumatic compass, which pulsed and glowed to mirror the immensity of magic that surrounded them now.
"It is home to more than just the Immortal Sun. Whatever enchantment keeps us here is in there, too," she heard from behind her.
Vraska turned. Jace had caught up to her while the rest of the crew took a break before the final leg of their journey.
She nodded. "I haven't quite figured out what the Immortal Sun actually does yet. There are too many rumors to raise one theory over another."
"It may quite literally be the key to us leaving."
"It may," Vraska said. "It may also give eternal life without need for the ingestion of blood. It may make the Sun Empire undefeatable. It may be a wellspring of unimaginable power too precarious for any person to control."
"I think it's something that's not supposed to be here," Jace said. "Something introduced to this world."
Jace put a hand to his chin in thought. "It might also just be a hunk of rock. And not do anything. Maybe <NAME> is a rock collector?"
"I honestly wouldn't put it past him," Vraska shrugged. "He strikes me as a man with weird hobbies."
Jace shrugged as Amelia called to him from nearby. He went over to the rest of the crew and began chatting.
He looked so different without his hood. Vraska had never seen him without it before she rescued him from the island.
She distantly wondered if his hair was as soft as it looked.
"Are you coming, Vraska?"
"Just catching my breath. Assemble the crew."
Jace called out to the rest of them and Vraska quickly rearranged her expression into something more commanding.
As Vraska approached the crew of #emph[The] #emph[Belligerent] , the ground beneath her lurched to one side.
Her crew cried out in surprise. Malcolm took to the air, and Breeches crawled up onto Amelia's shoulder. Several crewmates had started looking around frantically for something to hold on to, but there was no escape from the jarring movement. The clearing began to shake more violently, and a long crack appeared in the rock beneath them.
"Look!" Amelia was pointing to the spires in the distance.
They were beginning to rise, higher and higher, into the air. The city itself was emerging from the jungle with every jostle of the earthquake. Vines snapped, trees were violently ripped from the ground, flocks of sunwings rose into the sky, and more and more of the city came into view.
#figure(image("007_The Race, Part 2/09.jpg", width: 100%), caption: [Art by <NAME>], supplement: none, numbering: none)
Malcolm landed next to Vraska, a look of panic in his eyes.
Vraska grabbed his shoulder. "Did it do this because we approached?"
"Someone must have arrived at the city first."
He pointed to the thaumatic compass in Vraska's hand. Sure enough, all its points were glowing with a fierceness she had never seen before.
The bellow of some giant beast sounded over the shaking of the earth.
Vraska froze, the primal noise sending a jolt of terror through her heart. Her dread only intensified when she heard a similar sound of equal volume . . . then another . . . and another.
Something had awoken.
Water began to creep through the clearing, and Vraska looked to the source. A crevasse had opened nearby, and water diverted from the river poured down into the massive, newly-formed canyon at the base of the city.
The earth shook beneath Vraska's feet, and the Golden City of Orazca rose further and further from the ground.
Now that it was free of centuries of overgrowth, she had a clear view. It was incredible.
The city itself had opened like the petals of a flower. True to its name, the structure itself was a perfect, untouched gold, ornamented with turquoise, amber, and jade. Its ramps and walkways led over churning rivers and waterfalls, and high above were strange symbols and motifs carved with care.
Vraska was filled with excitement paired with a bold desire to face and conquer whatever it was that had awoken in the distance. She called to the rest of the crew to follow her, but as she began to walk forward, another earthquake took hold and she fell to the ground.
"Vraska!"
She turned her head and gasped. The edge of the clearing they stood at had been cleaved in two, and Jace was clinging to a shifting boulder, trying not to fall over the edge.
The other pirates moved aside as water from the nearby river began to eek its way closer. The water increased in volume, and soon a torrent began threatening to wash away what remained of the ledge.
Vraska waded into the river as far as she could, then swam with the current toward Jace's position. She spat out river water and reached for his outstretched hand.
As soon as her fingers grazed his, the ground jerked sideways once more, and Jace lost his grip.
"JACE!"
Vraska watched as Jace tumbled down the side, eyes wide with terror and hands outstretched in desperation.
Vraska screamed in fury and grief, for she could not see the bottom of the waterfall.
#figure(image("007_The Race, Part 2/10.jpg", width: 100%), caption: [Art by <NAME>], supplement: none, numbering: none)
She lurched forward to try and spot his descent, and the boulder underneath her gave way.
Vraska fell, mist stinging her arms as she flailed, her hands desperately searching for a grip.
She didn't have time to scream—only enough time to rearrange her body so her feet hit the surface of the water first.
Vraska was swallowed up by the pool at the bottom.
She grasped and grabbed at the water around, trying to pull her way to the surface.
Churning water squeezed her sides, and the slamming of the waterfall dared to push her further under, but Vraska did not die so easily. Not when the object of her quest was so close.
She felt her fingers break the surface and kicked herself upward, desperate for air. She emerged, sucking in air and spitting out river water. Her feet stung through her shoes with the impact, and she could sense a bruise starting to form on her legs as she kicked. Vast walls of stone and gold had burst through the earth on either side of the water, and the risen city of Orazca towered high overhead.
Suddenly, a searing stabbing slicing pain through her temples—she cried out—and an image appeared in her mind with a flash.
#figure(image("007_The Race, Part 2/11.jpg", width: 100%), caption: [Island | Art by Richard Wright], supplement: none, numbering: none)
The image vanished and Vraska gasped in surprise and pain.
Panic flooded her veins once more and she desperately kicked toward shore, craning her neck to get her bearings as she swam. Vraska was still in Ixalan, but the image in her mind was of Ravnica.
#emph[What was that?!]
She was alarmed and confused, desperately paddling her way to where the brand-new river met the newly exposed stone of the city.
Vraska spotted Jace. He was clinging to a rock near the riverbank, blood gushing from a wound in his head and his eyes alight with magic. His face was flush with confusion and pain, a faraway look in his eyes.
#emph[Did he see it too—?!]
"Jace!" she howled, swimming closer, pulling herself and her heavy soggy clothes through the murky water, paddling to stay out of the swift current brought by the waterfall. "Jace, your head—AHH!"
#figure(image("007_The Race, Part 2/12.jpg", width: 100%), caption: [Seal of the Guildpact | Art by <NAME>], supplement: none, numbering: none)
Vraska gasped.
She wore a blue cloak and hood, and lay on the central dais of the Forum of Azor. Niv-Mizzet, the parun of the Izzet, was looking down at her, and she could make out the faces of the maze-runners from every guild of Ravnica. #emph[This is a memory] , Vraska realized. The memory was colored with meaning, belonging, responsibility. It was the day Jace became the Living Guildpact.
Suddenly, the image dissipated, vanished, and Vraska was swimming in the river once again.
#emph[He is remembering everything] , Vraska realized with dread.
Jace's memory was returning all at once, returning as a flood and spilling over the sides, and he would soon remember everything about what Vraska was. He would soon remember their grudge, remember her guild, remember his #emph[job] , and then none of what had happened over the last few months would matter. He would remember he was the Guildpact and she was an assassin and their friendship would certainly dissolve.
Vraska choked on river water as she frantically swam toward Jace on the shore. He was bleeding, broken, eyes alight and lost in the agony of remembering.
#emph[It's over] , Vraska lamented with a heavy heart as she waded through the shallows toward the mind mage. A pang of headache warned her another memory was about to flood her perception, and she shut her eyes tight to brace herself as Jace's past slipped through his control and rammed into her mind.
|
|
https://github.com/skylee03/skylee-homework-typst | https://raw.githubusercontent.com/skylee03/skylee-homework-typst/main/example.typ | typst | MIT License | #import "skylee-hw.typ": *
#show: skylee-hw.with(
institute: [Pokfulam University],
course: [TPST1001 Introduction to Typst],
session: [Autumn 2024],
title: [Assignment 1],
author: [#smallcaps("Chan") <NAME>],
)
#problem[
*#lorem(2) [17 Marks].*
#lorem(50)
$ 1 + 1 = 2. $
#lorem(10)
#parts[
(7 marks) #lorem(15)
][
(4 marks) #lorem(20)
][
(3 marks) #lorem(35)
][
(3 marks) #lorem(10)
]
#solution[
#parts[
#lorem(60)
][
#lorem(80)
][
#lorem(50)
][
#lorem(35)
]
]
]
#problem[
*#lorem(4) [12 Marks].*
#lorem(30)
#parts[
(7 marks) #lorem(25)
#solution[#lorem(45)]
][
(5 marks) #lorem(30)
#solution[#lorem(55)]
]
] |
https://github.com/yasemitee/Teoria-Informazione-Trasmissione | https://raw.githubusercontent.com/yasemitee/Teoria-Informazione-Trasmissione/main/appunti.typ | typst | // Setup
#import "template.typ": project
#show: project.with(
title: "Teoria dell'informazione e della trasmissione"
)
#pagebreak()
// Appunti
// Lezione 06/10/2023
#include "2023-10-06.typ"
#pagebreak()
// Lezione 10/10/2023
#include "2023-10-10.typ"
#pagebreak()
// Lezione 13/10/2023
#include "2023-10-13.typ"
#pagebreak()
// Lezione 20/10/2023
#include "2023-10-20.typ"
#pagebreak()
// Lezione 24/10/2023
#include "2023-10-24.typ"
#pagebreak()
//Lezione 31/10/2023
#include "2023-10-31.typ"
//pagebrak non messo perché la lezione dopo è un continuo
//Lezione 03/11/2023
#include "2023-11-03.typ"
#pagebreak()
//Lezione 11/07/2023
#include "2023-11-07.typ"
//Lezione 10/11/2023
#include "2023-11-10.typ"
#pagebreak()
//Lezione 14/11/2023
#include "2023-11-14.typ"
//Lezione 17/11/2023
#include "2023-11-17.typ"
#pagebreak()
//Lezione 24/11/2023
#include "2023-11-24.typ" |
|
https://github.com/ufodauge/master_thesis | https://raw.githubusercontent.com/ufodauge/master_thesis/main/src/template/components/cover-section/index.typ | typst | MIT License | #import "@preview/oxifmt:0.2.0" : strfmt
#import "../common/page.typ" : Page
#import "paper-code.typ" : PaperCode
#import "../../utils/date.typ" : toJapaneseCalendar
#import "../../utils/parse-student-number.typ": parseStudentNumber
#let CoverSection(
student-number: "00MM000",
title : "タイトル",
mentor : "◯◯ ◯◯",
mentor-post : "教授",
date : datetime.today(),
author : "◯◯ ◯◯",
laboratry : "◯◯研究室",
) = Page[
// Paper code
#align(right, [
#PaperCode(
parseStudentNumber(student-number)
)
]),
// Main text
#align(center,
[
#set text(size: 16.5pt)
#v(1.6em)
#title
#v(2.5em)
指導教員 #h(2.3em) #mentor#mentor-post
#v(2.48em)
#toJapaneseCalendar(date) 提出
#v(1.8em)
理工学研究科 数理電子情報系専攻 \
情報システム工学コース
#v(3.9em)
#student-number \
#author
#v(3.4em)
埼玉大学 理工学研究科・工学部 \
#laboratry \
埼玉県さいたま市桜区下大久保255
]
)
]
|
https://github.com/katamyra/Notes | https://raw.githubusercontent.com/katamyra/Notes/main/Compiled%20School%20Notes/CS3001/Modules/ProfessionalEthics.typ | typst | #import "../../../template.typ": *
= Professional Ethics
#definition[
*Professional ethics* refers to the moral principles that govern the behavior of individuals in a professional context.
]
*How are Professional Ethics Different?*
- Customers don't know the domain, so they rely on the honesty, knowledge, and expertise of the professional
- Professionals' products affect lots of people
- People affected may be connected indirectly
- Didn't have a chance to choose the professional
- Special responsibility not just to the customer, but to the public
#let se = "Software Engineers"
== ACM Code of Ethics
+ *Pubilc* - #se shall act consistently with the public interest
+ *Client and Employer* - #se shall act in a manner that is in the best interests of their client and employer
+ *Product* - #se shall ensure that their products meet the highest possible standards possible
+ *Judgement* - #se shall maintain integrity and independence in a professional Judgement
+ *Management* - leaders shall subscribe to and promote an ethical approach to software development and maintenance
+ *Profession* - #se shall advance the integrity and reputation of the profession consistent with public interest
+ *Colleagues* - #se shall be fair to and supportive of their Colleagues
+ *Self* - #se shall participate in lifelong learning regarding the practice of their profession and shall promote an ethical approach to the profession
#definition[
*Exclusive Responsibility*: "I am responsible for paying this bill"
*Not exclusive*: "My boss is responsible for this, and so am I"
Moral responsibility is typically not exclusive
]
When the public can trust the competence and integrity of the members of a profession, every one of its members benefit. Thus, professionals have a stake in ensuring that fellow members of profession are capable and act properly.
Especially because software engineers don't have formal certification and licensing, it is important that they act professional and take professional responsibility to develop their ethical decision making skills.
|
|
https://github.com/EpicEricEE/typst-marge | https://raw.githubusercontent.com/EpicEricEE/typst-marge/main/assets/example.typ | typst | MIT License | #import "/src/lib.typ": sidenote
#set text(size: 14pt)
#set par(justify: true)
#set page(
width: 12cm,
height: 8.05cm,
margin: 1em,
background: pad(0.5pt, box(
width: 100%,
height: 100%,
radius: 4pt,
fill: white,
stroke: white.darken(10%),
)),
)
#set page(margin: (right: 5cm))
#let sidenote = sidenote.with(numbering: "1", padding: 1em)
The Simpsons is an iconic animated series that began in 1989
#sidenote[The show holds the record for the most episodes of any
American sitcom.]. The show features the Simpson family: Homer,
Marge, Bart, Lisa, and Maggie.
Bart is the rebellious son who often gets into trouble, and Lisa
is the intelligent and talented daughter #sidenote[Lisa is known
for her saxophone playing and academic achievements.]. Baby
Maggie, though silent, has had moments of surprising brilliance
#sidenote[Maggie once shot Mr. Burns in a dramatic plot twist.].
|
https://github.com/rabotaem-incorporated/calculus-notes-2course | https://raw.githubusercontent.com/rabotaem-incorporated/calculus-notes-2course/master/sections/05-complex-functions/07-conformal-maps.typ | typst | #import "../../utils/core.typ": *
== Конформные отображения
#let tOmega = $tilde(Omega)$
#ticket[Конформные отображения. Сохранение углов. Теорема о голоморфном образе области.]
#def[
Пусть $Omega$ и $tOmega$ --- две области. $f$ называется _конформным отображением_ $Omega$ на $tOmega$, если $f in H(Omega)$ и $f$ --- биекция.
]
#th[
$f in H(Omega)$, $f'(a) != 0$. Тогда $f$ сохраняет углы между кривыми, проходящими через $a$. Под "углом между кривыми" имеется в виду угол между касательными в точке пересечения.
]
#proof[
Пусть $gamma : [alpha, beta] --> CC$, и $gamma(alpha) = a$.
$
(f(gamma(z)))' = f' (gamma(z)) dot gamma'(z),\
(f compose gamma)'(alpha) = f'(a) dot gamma'(alpha).
$
Вектор $gamma'(alpha)$ повернулся на $arg f'(a)$. А аргумент производной --- это и есть направление касательной в точке. Значит обе касательные повернуться одинаково, и угол сохранится.
]
#th[
Пусть $f in H(Omega)$, $f equiv.not const$. $Omega$ --- область, значит $f(Omega)$ --- область.
]
#proof[
Линейная связность сохраняется: как была кривая между точками на области, так она в кривую и переходит.
Проверим, что $f(Omega)$ открыто. Берем $b in f(Omega)$, то есть $b = f(a)$. Существует $eps > 0$ такой, что $abs(f(z) - b) != 0$ на $abs(z - a) = eps$, так как иначе на каждой окружности найдется точка, в которой значение равно $b$, и по теореме о единственности, $f(z) equiv b$, а это не так по условию. Итак, найдется $eps > 0$ такой, что $abs(f(z) - b) > 0$ на $abs(z - a) = eps$.
Положим $r := min_(abs(z - a) = eps) abs(f(z) - b)$. Рассмотрим кружочек вокруг $b$ радиусом $r/2$, и поинтересуемся количеством решений уравнения $f(z) = w$, $space w in B_(r/2)(b)$ на $|z - a| < eps$. Можно расписать
$
(f(z) - w) = (f(z) - b) + (b - w).
$
Первое слагаемое имеет хотя бы один корень $z = a$. А еще $abs(f(z) - b) >= r > abs(b - w)$ на $abs(z - a) = eps$, тогда по теореме Руше $Nn_(f - w) = Nn_(f - b) >= 1$. Значит любая точка в окрестности $b$ радиуса $r/2$ лежит в образе. Вот и открытость.
]
#ticket[Однолистные функции. Необходимое условие однолистности (в том числе и в окрестности $oo$). Теорема Римана о конформных отображениях (без доказательства). Обобщение теоремы Лиувилля.]
#def[
$f$ --- _однолистная_ в $Omega$, если $f in H(Omega)$ и инъективная.
]
#notice[
1. Если $f$ --- конформное отображение $Omega$ на $f(Omega)$, то $f$ --- однолистная в $Omega$.
2. Если $f$ --- однолистная в $Omega$, то $f$ --- конформное отображение $Omega$ на $f(Omega)$.
]
#th[
Если $f$ однолистная, то производная нигде не $0$: $f'(z) != 0 space forall z in Omega$.
]
#proof[
От противного. Пусть $f'(a) = 0$ и $f(a) = b$. Проделаем все тоже рассуждение из доказательства предыдущей теоремы для точки $a$. Тогда количество нулей внутри какой-то окрестности $a$ функций $f - b$ и $f - w$ равны и
$
Nn_(f - w) = Nn_(f - b) >= 2,
$
так как $a$ --- кратный корень ($f'(a) = 0$). Тогда возможно 2 случая:
- $f$ в двух точках принимает значение $w$, но тогда она не однолистная (не инъективная).
- $f(z) = w$ имеет кратный корень. Но в такой точке $z_w$ обнуляется производная: $f'(z_w) = 0$.
И производная обнуляется в каждом круге, на границе которого не достигается значение $b$. Значит производная --- тождественный нуль, и $f equiv const$, а это не инъекция.
]
#follow[
1. Конформное отображение сохраняет углы между кривыми.
Это усиленная версия теоремы о сохранении углов, не требующая того, что производная в точке не $0$.
2. Если $f(z) = c_0 + c_1/z + c_2/z^2 + ...$ однолистна в окрестности $oo$, то $c_1 != 0$.
3. Если $a$ --- изолированная (неустранимая) особая точка $f$, и $f$ однолистна в проколотой окрестности $a$, то $a$ --- полюс первого порядка.
4. Если $f in H(overline(CC) without {z_0})$ однолистна, то $f$ --- дробно линейная.
]
#proof[
1. Из конформности следует однолистность, значит $f' != 0$ нигде, значит, она сохраняет углы.
2. Пусть $g(z) := f(1/z)$ однолистна в проколотой окрестности $0$. Тогда $g(z) = c_0$ не более чем в одной точке. Уменьшим окрестность так, что эта точка в нее не попадет. Доопределим
$
g(z) = cases(c_0\, &z = 0, f(1/z)\, space &z != 0).
$
Такая функция однолистна (и голоморфна) в окрестности $0$, значит $g'(0) = c_1 != 0$.
3. Пусть $a$ --- существенная особая точка. Тогда по теореме Сохоцкого, замыкание множества значений в окрестности --- все $overline(CC)$. Возьмем $z_0$ близко к $a$, и $f(z_0) = b$. Тогда можно рассмотреть небольшую окрестность $a$ радиуса $2r$, в которой не лежит $z_0$. А еще $f(0 < abs(z - a) < r)$ --- открытое множество (образ области --- область). С другой стороны, $f(abs(z - z_0) < r)$ --- тоже открытое. Из однолистности (инъективности) следует, что
$ f(abs(z - z_0) < r) sect f(0 < abs(z - a) < r) = nothing ==> f(0 < abs(z - a) < r) subset CC without f(abs(z - z_0) < r). $
Справа написано замкнутое множество, значит и замыкание лежит в нем:
$
CC = Cl f(0 < abs(z - a) < r) subset CC without f(abs(z - z_0) < r) subset CC without {b}.
$
Противоречие.
Значит $a$ --- полюс. Рассмотрим $g(z) = 1/f(z)$ --- голоморфную функцию в окрестности $a$. Тогда $g(z)$ --- однолистна в окрестности $a$, следовательно $g'(a) != 0$. Значит $а$ --- нуль первого порядка $g$, и $a$ --- полюс первого порядка $f$.
4.
- Пусть $z_0 != oo$. Если $f in H(overline(CC))$, то $f equiv const$. Если $z_0$ --- особая, то по предыдущему следствию это полюс первого порядка. Тогда рассмотрим $g(z) = f(z) - c/(z - z_0)$. Мы вычли из $f$ главную часть ее ряда Лорана, значит остаток $g$ --- голоморфная на $overline(CC)$, и $g equiv const$.
- Пусть $z_0 = oo$. Это полюс первого порядка, по предыдущему следствию (даже для бесконечности это верно --- можно подставить $1/z$ и рассмотреть окрестность $0$). Тогда аналогично $g(z) = f(z) - c z in H(overline(CC))$, и $g equiv const$. Значит $f(z) = const + c z$. В этом случае $f$ даже линейна.
]
#th(name: "Римана о конформных отображениях")[
Пусть $Omega$ и $tilde(Omega)$ --- односвязные области, и их границы составляют более чем одну точку. Пусть $z_0 in Omega$, $tilde(z)_0 in tilde(Omega)$, $alpha in RR$. Тогда существует единственное конформное отображение $f: Omega --> tilde(Omega)$ такое, что $f(z_0) = tilde(z)_0$, и $arg f'(z_0) = alpha$.
]
#notice[
Неодноточечность границы нужна. Не существует конформного отображения $CC$ в $DD = {abs(z) < 1}$, так как $f: CC --> DD$ голоморфна, и значит по теореме Лиувилля $f equiv const$. Значит нет однолистности.
]
#notice[
Односвязность тоже нужна. Доказывать не будем.
]
#follow[
Пусть $f in H(CC)$ и $f$ не принимает значения на неодноточечной кривой $gamma$. Тогда $f$ --- константа.
]
#proof[
Рассмотрим $CC without gamma$ и по теореме Римана отобразим ее в единичный круг $DD$ преобразованием $g$. Рассмотрим $g compose f$. Это отображение голоморфно на $CC$, и образ ограничен. Значит по теореме Лиувилля $g(f(z)) equiv const$. А $g$ --- биекция, значит $f equiv g^(-1) (const) = const$.
]
#th(name: "<NAME>")[
Если $f in H(CC)$ и $f$ не константа, то $f$ принимает все значения, кроме, возможно, одного, на $CC$.
]
#proof[
Не-а. Без доказательства.
]
#example[
Пусть $e^z in H(CC)$ и не обращается в $0$, поэтому одна точка и правда может не приниматься.
]
#follow[
Если $f: CC --> overline(CC)$ мероморфная, и $f$ не константа, то $f$ принимает все значения, кроме, возможно, не более двух.
]
#proof[
Пусть $f$ не принимает значения $a$, $b$ и $c$, и пусть $a != oo$. Рассмотрим
$
g(z) := 1/(f(z) - a) in H(CC).
$
Голомфорность следует из того, что $f(z) != a$. А еще $g != 1/(b - a)$ и $g != 1/(c - a)$. По малой теореме Пикара, это значит, что $g$ --- константа. Значит $f$ --- тоже.
]
#example[
$tg$ мероморфен в $CC$, и $tg != plus.minus i$. Поэтому две точки действительно могут не приниматься.
]
|
|
https://github.com/binhtran432k/ungrammar-docs | https://raw.githubusercontent.com/binhtran432k/ungrammar-docs/main/contents/literature-review/bdd.typ | typst | #import "/components/glossary.typ": gls
== Behavior-Driven Development (BDD) <sec-bdd>
#gls("bdd", mode:"full") is a collaborative approach to software development
that focuses on defining and delivering software features based on the desired
behavior as understood by stakeholders. Gherkin (@sec-gherkin), a #gls("dsl"),
is commonly used to describe these behaviors in a human-readable format. We
will explore the key principles, benefits, and challenges of #gls("bdd") with
Gherkin.
#gls("bdd") with Gherkin offers a valuable approach to software development by
focusing on behavior, collaboration, and testability. By effectively leveraging
#gls("bdd") and Gherkin, we can improve their ability to deliver
high-quality software that meets the needs of our users @bib-bdd.
=== Key Principles of BDD
- *Collaboration*: #gls("bdd") emphasizes collaboration among stakeholders,
including business analysts, developers, and testers, to ensure a shared
understanding of the system's requirements.
- *Focus on Behavior*: #gls("bdd") shifts the focus from technical
implementation details to the observable behavior of the system from the
user's perspective.
- *#gls("tdd", mode:"full")*: #gls("bdd") often incorporates #gls("tdd")
(@sec-tdd) practices, where tests are written before the corresponding code,
driving development towards the desired behavior.
- *Living Documentation*: Gherkin scenarios can serve as living documentation,
providing a clear and up-to-date record of the system's requirements.
=== Benefits of Using BDD with Gherkin
- *Improved Communication*: #gls("bdd") and Gherkin provide a common language
for stakeholders, facilitating effective communication and reducing
misunderstandings.
- *Enhanced Collaboration*: By focusing on shared understanding of behavior,
#gls("bdd") fosters collaboration among team members.
- *Early Validation*: Gherkin scenarios can be used to validate requirements
early in the development process, preventing costly rework later.
- *Testability*: Gherkin's structured format makes it well-suited for automated
testing, ensuring that the system meets the specified requirements.
- *Living Documentation*: Gherkin scenarios can serve as living documentation,
providing a clear and up-to-date reference for the system's behavior.
=== Gherkin in Practice
Gherkin has been successfully adopted in various development methodologies and
projects, including Agile, DevOps, and #gls("tdd") (@sec-tdd). It is commonly
used to:
- Define user stories and acceptance criteria.
- Create executable specifications for automated testing.
- Facilitate communication between stakeholders.
- Improve the overall quality and reliability of software systems.
|
|
https://github.com/LDemetrios/Conspects-4sem | https://raw.githubusercontent.com/LDemetrios/Conspects-4sem/master/typst/sources/header.typ | typst | #import "/typst/lib/externation.typ": *
#import "/typst/styles/theme.typ": *
//#show : theme-show-rule
#import "@preview/codelst:2.0.0": *
#import "@preview/tablex:0.0.8": *
#do-not-render()
#let nobreak(body) = block(breakable: false, body)
#let centbox(body) = align(center)[
#box[
#align(left)[
#body
]
]
]
// author: gaiajack
#let labeled-box(lbl, body) = block(above: 2em, stroke: 0.5pt + foreground, width: 100%, inset: 14pt)[
#set text(font: "Noto Sans")
#place(
top + left,
dy: -.8em - 14pt, // Account for inset of block
dx: 6pt - 14pt,
block(fill: background, inset: 2pt)[*#lbl*],
)
#body
]
#let marked(fill: lucid(230), stroke: (foreground + 0.25em), body) = {
let stroke = if type(stroke) == length {
foreground + stroke
} else if type(stroke) == color {
stroke + 0.25em
} else {
stroke
}
rect(fill: fill, stroke: (left: stroke), width: 100%, body)
}
#let quote(pref: none, author: none, text) = {
[#pref]
marked(fill: lucid(235), stroke: foreground + 3pt)[
#text
#if author != none {
align(right)[--- _ #author _]
}
]
}
#let dx = $upright(d)x$
#let dy = $upright(d)y$
#let dz = $upright(d)z$
#let dw = $upright(d)w$
#let du = $upright(d)u$
#let dv = $upright(d)v$
#let dp = $upright(d)p$
#let dt = $upright(d)t$
#let slfrac(a, b) = box(baseline: 50% - 0.3em)[
#cetz.canvas({
import cetz.draw : *
content((0, 0), a, anchor: "bottom-right")
line((.5em, .5em), (-.2em, -1em), stroke: 1pt)
content((.35em, -.4em), b, anchor: "top-left")
})
]
#let cyrsmallcaps(body) = [
#show regex("[а-яёa-z]") : it => text(size: .7em, upper(it))
#body
]
#let all-math-display = rest => [
#show math.equation: it => {
if it.body.fields().at("size", default: none) != "display" {
math.display(it)
} else {
it
}
}
#rest
]
#let smallcaps-headings(..level-descriptions) = (body) => {
let descr = level-descriptions.pos()
show heading : (it) => [
#set text(size: descr.at(it.level - 1).at(0))
#set align(descr.at(it.level - 1).at(1))
#cyrsmallcaps(it)
]
body
}
#let TODO(x) = rect(width: 100%, height: 5em, fill: red, stroke: 3pt + foreground)[
#set align(center + horizon)
#text(size: 1.5em, "TODO!")\ #x
]
#let to-code(data) = {
if type(data) == str {
data
} else if type(data) == content {
if data.func() == raw {
data.text
} else {
assert(false)
}
} else if type(data) == none {
""
} else {
assert(false)
}
}
#let offset(off, ..args, body) = pad(left: 2em, ..args, body)
#let codefragmentraw(start: 1, fragment) = {
sourcecode(frame: it=>it, numbers-start: start, fragment)
}
#let codefragment(start: 1, fragment, lang: none) ={
codefragmentraw(start: start, raw(fragment, lang: lang))
}
#let codeblock(body) = offset(2em, y: .5em, codefragmentraw(body))
#let full-externation-log(
files,
commands,
foreground: black,
error: rgb("#770000"),
hide-files: (),
hide-commands: (),
) = {
for file in files.keys() {
if not hide-files.contains(file) {
labeled-box(file, codefragment(files.at(file), lang: file.split(".").last()))
}
}
exec(
files,
commands,
(result) => {
let x = for i in range(calc.min(commands.len(), result.len())) {
if not hide-commands.contains(i) {
({
` $ `
let command = commands.at(i).map(arg => {
if arg.contains(regex("[^a-zA-Z0-9\-/.]")) {
"'" + arg.replace("'", "'\''") + "'"
} else { arg }
})
raw(command.join(" "), lang: "bash")
[\ ]
for line in result.at(i).output {
let clr = if (line.color == "output") { foreground } else { error }
text(fill: clr, raw(line.line))
[\ ]
}
`Process finished with exit code `
raw(str(result.at(i).code))
},)
}
}
x.join([\ #line(length: 50%, stroke: .25pt + maroon) ])
},
)
}
#let extract(file, what) = {
let ext = file.split(".").last()
let file = read(file)
let fragments = search-fragments(what, file)
// assert(fragments.len() == 1)
let fragment = fragments.at(0)
let pos = file.position(fragment)
assert(pos != 0)
let lines-before = file.slice(0, pos).matches(regex("\r\n|\r|\n")).len()
codefragment(start: lines-before + 1, fragment, lang: ext)
}
#let general-style = (body) => [
#show : theme-show-rule
#show math.ast: math.dot
#show: all-math-display
#show: smallcaps-headings(
(1.8em, center),
(1.6em, center),
(1.4em, center),
(1.4em, left),
(1.4em, left),
)
#set par(justify: true)
#set raw(align: left)
#body
]
|
|
https://github.com/fredguth/abnt-typst | https://raw.githubusercontent.com/fredguth/abnt-typst/main/cap5.typ | typst | = O poder da comunicação em um mundo em crise
O estudo da fotografia de <NAME> obriga a compreensão de diversos aspectos que envolvem a imagem. O nosso contato com a fotografia foi, inicialmente, na rede social Facebook, mas em seguida foi vista em outros *sites* de redes sociais, nas capas dos jornais e de revistas, assim como na televisão e mesmo no rádio, que deu destaque ao fato. De maneira geral, a mídia destacou, a respeito de <NAME>, o aspecto simbólico de sua morte, muito em função da divulgação das imagens fotográficas.
Além da fotografia -- que durante muitos dias esteve entre os assuntos mais procurados nas buscas no Google[^59] --, as manifestações artísticas que se seguiram reforçaram a relevância do tema na internet e foram destaques em jornais impressos e *sites* de redes sociais. Em consequência, o interesse dos internautas pela crise migratória cresceu exponencialmente, o que transformou a morte de pessoas que tentavam atravessar o mar em busca de refúgio em outros países em um dos assuntos de maior interesse do ano de 2015 na internet.
// ![][52]
// []{=_Toc507740910 .anchor}**Figura 25 -- Destaques do Google em 2015 -- Principais Perguntas**
// Fonte: \< https://trends.google.com/trends/story/US_cu_QzMZM1EBAAALgM_en\>
// ![][53]
// []{=_Toc507740911 .anchor}**Figura 26 -- Destaques do Google em 2015 -- Principais Perguntas Globais sobre Aylan Kurdi**
// Fonte: \< https://trends.google.com/trends/story/US_cu_QzMZM1EBAAALgM_en\>
// ![][54]
// []{=_Toc507740912 .anchor}**Figura 27 -- Destaques do Google em 2015 -- Principais Perguntas sobre Refugiados no Brasil**
// Fonte: \< https://trends.google.com/trends/story/US_cu_QzMZM1EBAAALgM_en\>
// ![][55]
// []{=_Ref494896997 .anchor}**Figura 28 -- Destaques do Google em 2015 -- Interesse Global de Pesquisa por "Migrante" X "Refugiado"**
// Fonte: \< https://trends.google.com/trends/story/US_cu_QzMZM1EBAAALgM_en\>
Na Figura 28, há dois picos de interesse, ocorridos em 6 de setembro e 15 de novembro. No começo de setembro, a morte de <NAME> chamou a atenção do mundo. Em 15 de novembro, o interesse foi despertado pelas consequências do atentado assumido pelo Estado Islâmico (grupo extremista que atua em regiões do Iraque e da Síria) em Paris, entre as 21 horas e 16 minutos do dia 13 de novembro e a primeira hora da madrugada do dia 14 de novembro. Nesse atentado, morreram 129 pessoas e 350 ficaram feridas. Os ataques ocorreram de forma coordenada, em seis pontos diferentes da capital francesa: quatro restaurantes, a casa de espetáculos Bataclan (onde a tragédia foi maior) e as imediações de um estádio de futebol.
O atentado em Paris complicou o desafio político mais sensível da Europa nos últimos tempos: a acolhida de refugiados. Um passaporte sírio encontrado perto do corpo de um dos extremistas morto no atentado reforçou a rejeição de parte dos europeus pelos refugiados. A imprensa destacou esse aspecto do fato e, de novo, o tema voltou a ser destaque na internet.
Esses dados merecem atenção porque revelam comportamentos característicos da sociedade em rede (CASTELLS, 2015) e a forma de atuação de indivíduos e grupos na sociedade nesse começo de século XXI. Mais que isso, "medem" o poder da comunicação (CASTELLS, 2015) na sociedade em rede.
Estamos tratando de objetos interligados que só podem ser compreendidos na sua relação: a guerra, a crise de refugiados, a morte, a fotografia, a internet, as redes sociais, a mídia tradicional, as artes, o século XXI e seu contexto social, político, econômico, cultural e tecnológico, bem como os indivíduos, os grupos, os movimentos sociais, a comunicação. Daí por que é importante não olhar separadamente para esses objetos, mas procurar as suas conexões: por que há refugiados? Por que a foto de <NAME> alcançou tanto destaque? Como as artes e os artistas participaram desse acontecimento? A internet desempenha que papel nesse contexto? E a mídia tradicional? Quem são e como se relacionam indivíduos e grupos na sociedade em rede? Quais são e como se organizam os movimentos sociais na era digital? Que poder realmente tem a comunicação nesse momento histórico?
Certamente, essas questões compõem um quadro complexo cuja apreensão é tarefa que extrapola os objetivos estabelecidos para este trabalho. Portanto, o que se pretende a seguir é compreender alguns desses objetos em relação uns com os outros, destacando aqueles mais relacionados à comunicação, sem que a ordem em que aparecem assuma qualquer sentido valorativo.
== A internet
> *\[\...\] é o tecido de comunicação de nossas vidas, para o trabalho, para a conexão pessoal, para a formação de redes sociais, para informação, para diversão, para serviços públicos, para a política e até para a religião.*
(CASTELLS, 2015, p. 111)
Ao contrário do que se pode pensar, a internet não é uma tecnologia nova: existe desde 1969. Mas foi só a partir dos anos 1990 que seu uso se popularizou e recentemente esse uso foi impulsionado pela difusão de novas gerações de comunicação sem fio, sobretudo *smartphones*.
Segundo Castells (2015, p. 37), a rápida difusão da internet a partir de meados dos anos 1990 "resultou da interação entre o novo paradigma tecnológico, centrado nas tecnologias da informação e comunicação, e algumas outras grandes mudanças socioculturais. Uma primeira dimensão dessa mudança é a que passou a ser classificada como a ascensão da sociedade Eu-centrada \[\...\]".
Por sociedade Eu-centrada -- ou individuação --, o autor compreende o declínio das formas tradicionais de comunidade em termos de espaço, trabalho, família etc. e a construção de relações sociais que originaram uma forma de comunidade fundada em interesses, valores e projetos individuais. Esse processo é baseado na transformação do espaço (o surgimento das grandes cidades), do tempo (a compressão do tempo), do trabalho (a ascensão dos empreendimentos em rede), da cultura (a mudança da comunicação de massa baseada em mídia de massa para a intercomunicação individual baseada na internet), na crise da família patriarcal e na globalização. Esse conjunto de mudanças inclui, ainda, o desenvolvimento tecnológico: as tecnologias de rede são o suporte para essa nova estrutura social e para essa nova cultura.
De início, a internet disponibilizava conteúdos em páginas que quase não interagiam com os internautas: eles acessavam, liam e viam o que era de interesse e, no máximo, se manifestavam via e-mail. Esse estágio foi logo substituído pela chamada Web 2.0, mais aberta à colaboração dos usuários, que se tornaram protagonistas de compartilhamento de conteúdos. Foi então que surgiram serviços de relacionamentos sociais, páginas de vídeos, wikis, blogs e outros serviços com um traço em comum: a participação efetiva do usuário nos dois sentidos do tráfego de informação. Já adentramos a era da Web 3.0, focada em tecnologias que permitem personalizar a experiência do usuário e nos aproximarmos do mundo da inteligência artificial, onde a máquina passará a aprender com as ações do usuário.
Hoje, conforme Castells (2015),
> \[\...\] a atividade mais importante na internet ocorre através das redes sociais \[\...\] para todos os tipos de atividades, não só para relações de amizade ou bate-papo, mas para distribuição de marketing, e-commerce, educação, criatividade cultural, mídia e entretenimento, aplicativos de saúde e ativismo sociopolítico. (CASTELLS, 2015, p. 40)
Daí por que foi possível que artistas do mundo inteiro, em poucas horas, fizessem circular mais de cem ilustrações que reproduziam a imagem de <NAME>, num esforço claro de demonstrar sua indignação com a tragédia que abatia milhões de pessoas que fugiam de conflitos em seu país de origem e, ao mesmo tempo, de não deixar que o mundo se esquecesse muito rapidamente da imagem que se tornou símbolo dessa tragédia.
== Movimentos sociais na sociedade em rede
> *O legado dos movimentos sociais em rede terá sido afirmar a possibilidade de reaprender a conviver. Na verdadeira democracia.*
(CASTELLS, 2013, p. 177)
A *hashtag* =kiyiyavuraninsanlik parece ter nascido na Turquia, considerando o título que recebeu, e certamente não foi por acaso, uma vez que Aylan foi encontrado morto naquele país. Nem por isso constitui-se como movimento local, apenas. Em poucas horas, tornou-se um movimento global, graças à conexão que hoje permite a relação entre indivíduos e grupos que não se conhecem, falam diferentes línguas, usam o inglês como língua franca na rede, mas se inspiram uns nos outros e se comprometem com um debate global na internet. Esses indivíduos e grupos demonstram consciência sobre problemas humanitários e exibem cultura cosmopolita.
Em seu livro *Redes de indignação e esperança -- Movimentos sociais na era da internet*, Castells (2013) analisa vários movimentos que eclodiram no mundo, como a Primavera Árabe, os Indignados na Espanha, os movimentos Occupy nos Estados Unidos, Islândia, Tunísia e Egito. Embora em contextos diferentes, esses movimentos apresentaram características comuns que também estiveram presentes na *hashtag* =kiyiyavuraninsanlik. São elas: conexão e comunicação horizontais; ocupação do espaço público urbano; criação de tempo e de espaço próprios; ausência de lideranças e de programas; caráter ao mesmo tempo local e global. E tudo isso propiciado pelo modelo da internet.
Esses movimentos são espontâneos em sua origem, em geral desencadeados por uma faísca de indignação relacionada a um evento específico ou pela reprovação às ações dos governantes. Em todos os casos, mostra Castells (2013; 2015), se originaram de um "chamado" à ação, cuja origem é irrelevante, porque o que importa é a disseminação de uma mensagem que desafia o poder de autoridades políticas e da mídia.
Não é novo que indivíduos e grupos atendam a despersonificados "chamados". Já evocamos aqui a cruzada das crianças, relato do século XIII em que crianças de várias regiões da Europa, também atendendo a um "chamado", partem em direção a Jerusalém com o propósito de libertar a Terra Santa. Uma cruzada contemporânea, digital, parece ter sido empreendida por artistas que reproduziram a cena da morte de Aylan que chocou o mundo. A Figura 29 e a Figura 30 são exemplos da ação que a foto de Demir propôs tão logo foi publicada. São também parte do movimento artístico que nasceu nas redes sociais e que guardam muitos pontos em comum com os movimentos sociais de que Castells se ocupa.
// ![][57]
// []{=_Ref504841480 .anchor}**Figura 29 -- Aylan Kurdi em Mural na China**
// Fonte: \<http://www.scmp.com/news/world/middle-east/article/2061484/photo-dead-boy-beach-aylan-kurdi-boosted-donations-syrian\>
// ![][58]
// []{=_Ref504841489 .anchor}**Figura 30 -- Manifestantes Prestam Homenagem a Aylan em Marrocos**
// Fonte: \<http://www.express.co.uk/news/world/603682/beach-Aylan-Kurdi-Morocco-tribute-Syria-refugee-crisis-Turkey-David-Cameron-migrants\>
Em acordo com a lógica das redes de internet, esses movimentos são virais, não só pela rapidez da difusão da mensagem, mas também pelo efeito imitador, que resulta em movimentos brotando em toda parte. A escultura de areia da Figura 31 cumpre muito bem o papel de algo viral não só porque foi feita em menos de 3 dias, mas também por ter sido obra de um conhecido artista indiano, conectado, que se apressou a criar e a divulgar o resultado do seu trabalho, atendendo ao chamado das redes*.*
// ![][59]
// []{=_Ref504841773 .anchor}**Figura 31 -- Escultura de Areia na Praia de Puri, na Índia, Obra do Artista Sudarsan Pattnaik**
// Fonte: \<http://www.mirror.co.uk/news/world-news/drowned-syrian-boy-aylan-kurdi-6385962\>
Os movimentos sociais em rede são autorreflexivos, não são violentos, pelo menos inicialmente, mas frequentemente se envolvem em desobediência civil. Visam modificar os valores da sociedade e são políticos em um sentido fundamental. "O que esses movimentos sociais em rede estão propondo em sua prática é a nova utopia no coração da cultura da sociedade em rede: a utopia da autonomia do sujeito em face das instituições da sociedade" (CASTELLS, 2015, p. 52).
Por fim, carregam as marcas de seu tempo e de seu espaço: são amplamente realizados por jovens adultos que lidam com tranquilidade com as tecnologias digitais e não poderiam existir sem a internet e as redes horizontais de comunicação.
== O poder na sociedade em rede
> *Envolvendo-se na produção cultural da mídia de massa e desenvolvendo redes autônomas de comunicação horizontal, os cidadãos da Era da Informação se tornarão capazes de inventar novos programas para suas vidas com os materiais de seu sofrimento, medos, sonhos e* esperanças.
(CASTELLS, 2015, p. 485)
O controle da comunicação e da informação confere poder. Assim, o poder depende do controle da comunicação tanto quanto o contrapoder depende do rompimento desse controle. Nessa afirmação se resume a teoria de Castells (2015) de que o processo de formação e exercício das relações de poder se transforma no contexto organizacional e tecnológico que se origina com o surgimento de redes digitais globais de comunicação.
Isto é: nos últimos 40 anos, a autonomia dos sujeitos comunicantes em relação às corporações de comunicação -- os usuários passaram a ser emissores e receptores de mensagens -- resultou em um deslocamento do poder político, antes detido pelo Estado (poder macro) e por organizações da sociedade (poder micro), em benefício dos indivíduos que hoje se comunicam de maneira muito mais livre, seja na produção, seja na transmissão de mensagens, não sem dificuldade e não sem censura, mas com um grau muito maior de liberdade se consideradas mensagens submetidas ao controle da mídia tradicional e de censores de governo, que legitimam as formas existentes de relações de poder.
Isso porque, para o autor (2015, p. 21), "\[\...\] a forma mais fundamental de poder está na capacidade de moldar a mente humana. A maneira como sentimos e pensamos determina a maneira como agimos, tanto individual quanto coletivamente". Portanto, compreender a forma como produzimos e compartilhamos símbolos na nossa era pode significar compreender as relações de poder na sociedade em rede.
Até recentemente, a comunicação de massa se caracterizava por uma relação de um para muitos, ou seja, um sistema vertical, unidirecional e centralizado no emissor -- a televisão é o melhor exemplo desse modelo e continua a ser o principal meio de comunicação de massa nesse começo de século (CASTELLS, 2015). Nesse modelo, as possibilidades de ação (e reação) do receptor são pequenas, apesar de já se terem expandido em consequência das TIC.
O advento da autocomunicação de massa transformou esse modelo em um sistema em que todos, potencialmente, são emissores e receptores, as redes de comunicação são horizontais e interativas e têm como base a internet e a comunicação sem fio. Definitivamente, esse modelo ampliou a possibilidade de ação dos usuários.
Se concordarmos com o autor que o poder depende da capacidade de produzir símbolos e moldar a mente humana -- mais até do que a subordinação de grupos por intimidação ou violência --, reconheceremos que, quanto maior for a autonomia dada aos usuários pelas tecnologias de comunicação, maiores serão as chances de que novos valores e novos interesses circulem e atinjam a mente pública.
O movimento veiculado via redes sociais -- *hashtag* =kiyiyavuraninsanlik --, analisado sob a perspectiva da teoria do poder da comunicação de Castells, é um exemplo de como os usuários das redes sociais e da comunicação sem fio exerceram um contrapoder diante das políticas da União Europeia para refugiados.
Em julho de 2014, a Anistia Internacional publicou um relatório -- *O custo humano da "Fortaleza Europa": violações de direitos humanos de migrantes e refugiados nas fronteiras europeias*[^60] -- em que denunciava o fechamento das fronteiras da União Europeia, numa atitude clara de violação de direitos humanos de migrantes e refugiados por meio de práticas de controle de fronteiras. Segundo o relatório, a União Europeia gastou quase dois bilhões de euros na proteção de suas fronteiras externas entre 2007 e 2013. Além disso, financiou países vizinhos, como Turquia, Marrocos e Líbia, na criação de zonas-tampão em volta da Europa, num esforço para parar migrantes e refugiados antes de estes chegarem às fronteiras europeias. Aqueles que, mesmo assim, chegavam à Europa, se arriscavam a ser devolvidos imediatamente aos seus países de origem, o que constitui prática ilegal porque nega às pessoas o direito de requerer asilo.
Diante dos obstáculos cada vez maiores para chegar à Europa por terra, refugiados e migrantes se lançam por vias marítimas e todos os anos milhares de pessoas morrem tentando chegar à costa europeia.
Foi assim que <NAME> entrou para as estatísticas de refugiados mortos durante a tentativa de chegar à Europa, fugindo da guerra na Síria. E teria permanecido apenas como mais um, sem nome ou identificação, como certamente a maioria dos que têm o mesmo destino, se dependesse de estudos, relatórios, declarações, protocolos, assembleias, conselhos, organizações. Aylan continuaria no anonimato não fossem as fotografias de Demir e suas reproduções, as capas dos jornais, a internet, as redes sociais, mas, sobretudo, os usuários que se sentiram indignados e, num exercício de liberdade, se manifestaram, desafiando autoridades e fazendo ver ao mundo o que estava se passando.
Questionada sobre problemas éticos que envolveriam a distribuição de fotografia tão chocante, a fotógrafa informou, em entrevista à imprensa:
> Naquele momento, quando vi <NAME>, eu fiquei petrificada \[\...\]. A única coisa que eu poderia fazer era tornar seu clamor ouvido. \[\...\] eu pensei que poderia fazer isso ao acionar minha câmera e fazer sua foto \[\...\]. Eu testemunhei muitos incidentes com imigrantes nesta região, suas mortes, seus dramas. Espero que isso agora mude. Fiquei chocada, me senti mal por eles. A melhor coisa a fazer era tornar sua tragédia conhecida.[^61]
Revoltados com o descaso das autoridades diante da situação dramática de migrantes e refugiados, os artistas reproduziram a fotografia de Demir, destacando aspectos da política da União Europeia para refugiados ou o constrangimento imposto às Nações Unidas diante de uma situação que já se prolongava, sem que medidas emergenciais de proteção aos direitos dos migrantes e refugiados fossem tomadas.
// ![][60]
// []{=_Toc507740917 .anchor}**Figura 32 -- Rafat Alkhateeb -- o cartunista jordaniano**
// Fonte: \<https://www.counterpunch.org/2015/09/04/regime-change-refugees-on-the-shores-of-europe/\>
// ![][66]
// []{=_Toc507740918 .anchor}**Figura 33 -- Nações Unidas**
// Fonte: \<http://www.independent.com.mt/articles/2015-09-04/world-news/21-cartoons-about-dead-child-on-beach-who-is-haunting-and-frustrating-the-world-6736141577\>
Em 24 horas, o mundo inteiro conhecia a tragédia de Aylan Kurdi. A mídia tradicional e, principalmente, a internet fizeram circular o acontecimento, e as autoridades políticas responderam às críticas.
> "Nenhuma pessoa decente, sobretudo se é pai, pode deixar de se mexer com essas imagens", diz o ministro espanhol dos Negócios Estrangeiros, <NAME>lo. "No que não podemos cair", acrescenta, parafraseando o Papa, "é na globalização da indiferença". Margallo, porém, não é um espectador, mas um líder político e, como tal, reconhece que a dureza do drama o obriga a "procurar as soluções mais imediatas e contundentes possível".[^62]
>
> A socialista <NAME>, presidente da subcomissão de direitos humanos do Parlamento Europeu, foi mais enfática. "É a imagem do fracasso da Europa, do mundo desenvolvido", concluiu. "A imagem é a ponta do iceberg e, à medida que se aproxima da Europa, tomamos consciência do problema, mas na Turquia ou na Líbia enfrentam essa situação há anos", disse Valenciano, para quem a Europa não responde ao "primeiro êxodo do século XXI".[^63] [^64]
As imagens de Aylan são a síntese do drama dos refugiados e isso só foi possível porque o modelo de comunicação dos tempos atuais assim o permite. Somos uma sociedade que depende fundamentalmente da comunicação mediada para interagir com ambientes sociais e naturais. E é essa comunicação que veicula as mensagens que vão moldar a mente dos indivíduos. Nesse sentido, afirma Castells (2015, p. 2): "Se a batalha fundamental pela definição das normas da sociedade e a aplicação dessas normas no cotidiano gira em torno da formação da mente humana, a comunicação é essencial para essa batalha."
Mas não a comunicação de massa tradicional. O autor está se referindo a um universo caracterizado pela prevalência das redes horizontais de comunicação, de onde emergem múltiplas mensagens e múltiplos sentidos, construídos por atores que podem concordar sobre o sentido e discordar sobre a construção desse sentido, mas que são independentes em relação à estratégia de estabelecimento de agendas institucionais.
Já nos aproximando do momento de concluir este estudo, parece oportuno contrapor as opiniões dos autores citados no capítulo 3 -- Slovic et al. (2017), Bozdağ e Smets (2017) e Pusseti (2017) -- à visão já tão consolidada do sociólogo espanhol <NAME> sobre a sociedade em rede. A crescente desconfiança no poder da imagem, bem como na nossa capacidade de responder pelos graves problemas humanitários da atualidade, observada na sequência dos autores estudados no capítulo anterior, certamente não se harmoniza com a teoria do poder da comunicação desenvolvida por Castells e, neste capítulo, sintetizada. E é Castells, em entrevista ao jornal *<NAME>*, em 11 de maio de 2015, que vai indicar o porquê dessa desarmonia, ao mesmo tempo em que aponta possibilidades de um mundo mais justo:
> O essencial é que agora todo o planeta está conectado. Existem sete bilhões de números de telefones celulares no mundo e 50% da população adulta do planeta tem um smartphone. O percentual será de 75% em 2020. Consequentemente, a rede é uma realidade generalizada para a vida cotidiana, as empresas, o trabalho, a cultura, a política e os meios de comunicação. Entramos plenamente numa sociedade digital (não o futuro, mas o presente) e teremos que reexaminar tudo o que sabíamos sobre a sociedade industrial, porque estamos em outro contexto.[^65]
É possível que ainda estejamos muito voltados ao passado recente para enxergar no atual contexto características e tendências capazes de produzir melhores resultados para a humanidade. Temas como direitos humanos, meio ambiente, igualdade entre minorias têm ganhado mais espaço nos últimos tempos. Segundo Castells, ainda nessa entrevista, "\[\...\] a comunicação em rede oferece enormes possibilidade de incrementar a participação cidadã \[\...\]". É certo que isso não garante o despontar de um mundo mais justo, mais em paz, assim como a tecnologia não trará essa transformação por si só. Estão dadas, contudo, as ferramentas para a mudança.
|
|
https://github.com/EunTilofy/NumComputationalMethods | https://raw.githubusercontent.com/EunTilofy/NumComputationalMethods/main/Chapter3/Chapter3-1.typ | typst | #import "../template.typ": *
#show: project.with(
course: "Computing Method",
title: "Computing Method - Chapter3",
date: "2024.4.1",
authors: "<NAME>, 3210106357",
has_cover: false
)
// #show: rest => columns(2, rest)
*Problems:2,7,9*
#HWProb(name: "2")[
给出数据
#tablex(
columns: 10,
// auto-hlines: false,
// auto-vlines: false,
[$x$], [-1.00], [-0.75], [-0.50], [-0.25], [0], [0.25], [0.50], [0.75], [1.00],
[$y$], -0.2209, 0.3295, 0.8826, 1.4392, 2.0003, 2.5645, 3.1334, 3.7061, 4.2836
)
用一次、二次、三次多项式及最小二乘原理拟合这些数据,并写出正规方程组。
]
#solution[
#set enum(numbering: "(1)")
令 $xm_1 = [x_1, dots.c, x_9]^T, xm_2 = [x_1^2, dots.c, x_9^2]^T,
xm_3 = [x_1^3, dots.c, x_9^3]^T, vm = [1, dots.c, 1]^T,
ym = [y_1, dots.c, y_9]^T in bb(R)^9$。
+ $vb("一次拟合")$ \
设拟合多项式为 $y = a x + b$,
那么正规方程组为:
$
cases(
xm_1^T (ym - a xm_1 - b vm) = vb("0"),
vm^T (ym - a xm_1 - b vm) = vb("0").
)
arrow.double
cases(
xm_1^T xm_1a + xm_1^T vm b = xm_1^T ym,
vm^T xm_1 a + vm^T vm b = vm^T ym
)
$
解得:$[a, b] = [2.25164667, 2.01314444]$。
所以 $y^\*_1 = 2.25165 x + 2.01314$。
+ $vb("二次拟合")$ \
设拟合多项式为 $y = a x^2 + b x + c$,
那么正规方程组为:
$
cases(
xm_1^T (ym - a xm_2 - b xm_1 - c vm) = vb("0"),
xm_2^T (ym - a xm_2 - b xm_1 - c vm) = vm("0"),
vm^T (ym - a xm_2 - b xm_1 - c vm) = vb("0").
)
arrow.double
cases(
xm_1^T xm_2 a + xm_1^T xm_1 b + xm_1^T vm c = xm_1^T ym,
xm_2^T xm_2 a + xm_2^T xm_1 b + xm_2^T vm c = xm_2^T ym,
vm^T xm_2 a + vm^T xm_1 b + vm^T vm c = vm^T ym,
)
$
解得:$[a, b, c] = [0.03130563, 2.25164667, 2.00010043]$,
所以 $y_2^\* = 3.13056e-2 x^2 + 2.25165 x + 2.00010$。
+ $vb("三次拟合")$ \
设拟合多项式为 $y = a x^3 + b x^2 + c x + d$,
那么正规方程组为:
$
cases(
xm_1^T (ym - a xm_3 - b xm_2 - c xm_1- d vm) = vb("0"),
xm_2^T (ym - a xm_3 - b xm_2 - c xm_1 - d vm) = vm("0"),
xm_3^T (ym - a xm_3 - b xm_2 - c xm_1 - d vm) = vm("0"),
vm^T (ym - a xm_3 - b xm_2 - c xm_1 - d vm) = vb("0").
)
arrow.double
cases(
xm_1^T xm_3 a + xm_1^T xm_2 b + xm_1^T xm_1 c + xm_1^T vm d = xm_1^T ym,
xm_2^T xm_3 a + xm_2^T xm_2 b + xm_2^T xm_1 c + xm_2^T vm d = xm_2^T ym,
xm_3^T xm_3 a + xm_3^T xm_2 b + xm_3^T xm_1 c + xm_3^T vm d = xm_3^T ym,
vm^T xm_3 a + vm^T xm_2 b + vm^T xm_1 c + vm^T vm d = vm^T ym,
)
$
解得:$[a, b, c, d] = [2.08484848e-3, 3.13056277e-02, 2.25010909, 2.00010043]$,
所以 $y^\*_3 = 2.08485e-3 x^3+ 3.13056e-2 x^2 + 2.25011 x + 2.00010$。
]
#HWProb(name: "7")[
设 $f(x), phi_1 (x), phi_2(x), dots.c, phi_p (x) in C_([a, b])$,
且 (3.29) 式成立,试求 $phi(x) = alpha_1 phi_1(x) + dots.c alpha_p phi_p (x)$ 达到
$
min_(alpha_1, dots. alpha_p) (f-phi, f - phi)
$
之参数 $alpha_1, alpha_2, dots.c, alpha_p$。
]
#solution[
令 $g(a_1, dots.c a_p) = (f - phi, f - phi)= integral_a^b [f(x) - sum_i^p a_i phi_i (x)]^2 dx$,对 $a_i (i = 1, dots.c p)$ 求偏导得到:
$
(partial g) / (partial a_i) &= - 2 integral_a^b f(x) phi_i (x) dx + 2 sum_(j eq.not i, j= 1)^p a_j integral_a^b phi_i (x) phi_j (x) dx + 2 a_i integral_a^b phi_i^2 (x) dx \
&= -2 integral_a^b f (x) phi_i (x) dx + 2 a_i sigma_i.
$
令 $(partial g) / (partial a_i) = 0, (i = 1, dots.c p)$ 得到,
$
a_i = (integral_a^b f(x) phi_i (x) dx)/(sigma_i), i = 1, dots.c p.
$
]
#HWProb(name: "9")[
试证明如下给出的多项式是正交多项式系。
$
P_0(x) = 1, P_1(x) = x - alpha_0 \
P_(k+1) (x) = (x - alpha_k) P_k (x) - beta_(k-1) P_(k-1) (x), \
alpha_k = ((x P_k, P_k))/((P_k, P_k)), beta_k = ((x P_(k), P_(k+1)))/((P_k, P_k)) = ((P_(k+1), P_(k+1)))/((P_k, P_k)) , k = 0, 1, 2, dots.c
$
]
#Proof[
设 $(f, p) = integral_a^b w(x) f(x) g(x) dx$,
$
((x - alpha_k) P_k, P_k) = (x P_k, P_k) - ((x P_k, P_k))/((P_k, P_k)) (P_k, P_k) = 0,
$
因为 $P_1 = (x - alpha_0) P_0$,所以 $(P_0, P_1) = 0$。
下用数学归纳法证明,设当 $n leq k(k geq 1)$ 时,满足 $P_0, P_1, dots, P_n$ 是正交多项式系,对于 $P_(k+1) (x) = (x - alpha_k) P_k (x) - beta_(k-1) P_(k-1) (x)$,
$
(P_(k+1), P_(k)) &= ((x - alpha_k) P_k, P_k) - beta_(k-1) (P_(k-1), P_k) = 0, \
(P_(k+1), P_(k-1)) &= ((x - alpha_k) P_k, P_(k-1)) - beta_(k-1) (P_(k-1), P_(k-1)) \
&= (x P_k, P_(k-1)) - alpha_k (P_k, P_(k-1)) - ((x P_(k-1), P_(k)))/((P_(k-1), P_(k-1)))(P_(k-1), P_(k-1)) \
&= (x P_k, P_(k-1)) - (P_(k-1), x P_k) \
&= integral_a^b w(x) x P_k P_(k-1) dx - integral_a^b w(x) P_(k-1) x P_(k) dx = 0,
$
对于 $j = 0, dots.c , k-2$,
$
(P_(k+1), P_j) &= ((x - alpha_k) P_k, P_j) - beta_(k-1) (P_(k-1), P_j) \
&= (x P_k, P_j) - alpha_k (P_k, P_j) \
&= (x P_k, P_j) = (P_k, x P_j) \
&= (P_k, P_(j+1) + beta_(j-1) P_(j-1) + alpha_j P_j) \
&= (P_k, P_(j+1)) + beta_(j-1) (P_k, P_(j-1)) + alpha_j (P_k, P_j) \
&= 0
$
所以,$P_0, dots.c , P_(n+1) $ 也是正交多项式系。
因此题中给出的多项式是正交多项式系。
] |
|
https://github.com/next-generation-cartographers/ngc-flyer | https://raw.githubusercontent.com/next-generation-cartographers/ngc-flyer/main/main.typ | typst | #import "@preview/cades:0.3.0": qr-code
#let width = 210mm
#let height = 297mm
#let card_width = width / 2
#let card_height = height / 4
#set page(width: width, height: height, margin: 0mm)
#set text(font: "Source Sans Pro")
#let card() = {
rect(
height: card_height,
width: card_width,
stroke: luma(240),
radius: 2mm,
inset: 10mm,
{
place(
left + horizon,
{
image("Links/ngc-rgb-logotype-black.svg", width: card_width / 3)
v(3em)
qr-code("https://next-generation-cartographers.github.io/", width: 1.25cm)
place(
left + bottom,
dx: 2cm,
box(
width: card_width / 1.5,
)[
A welcoming space for the next generation of cartographers to share work, find
inspiration, and connect with each other.
],
)
},
)
},
)
}
#for c in (0, 1) {
for r in (0, 1, 2, 3) {
place(top + left, dx: c * card_width, dy: r * card_height, card())
}
}
|
|
https://github.com/RhenzoHideki/psd | https://raw.githubusercontent.com/RhenzoHideki/psd/main/trabalhoPSD_DFT/trabalhoPSD.typ | typst | #import "../typst-ifsc/template/article.typ": article
#show: doc => article(
title: "Trabalho de DFT",
subtitle: "Processamento de sinais digitais (PSD029007)",
// Se apenas um autor colocar , no final para indicar que é um array
authors:("<NAME>",),
date: "17 de março de 2024",
doc,
)
= Questão 1
As duas sequências de oitos $x_1 [n]$ e $x_2 [n]$ mostradas na figura a seguir têm DFT's $X_1 [k]$ e $X_2 [k]$, respectivamente. Determine a relação entre $X_1 [k]$ e $X_2[k]$
== Resolução Teórica
- Observando $x_1 [n]$ temos:
$x_1 [n] = 0 delta(n) + a delta(n-1) + b delta(n-2) + c delta(n-3) + d delta(n-4) + e delta(n-5) \ + 0 delta(n-6) +0 delta(n-7) + 0 delta(n-8)$
- Observando $x_2 [n]$ temos:
$x_2 [n] = d delta(n) + e delta(n-1) + 0 delta(n-2) + 0 delta(n-3) + 0 delta(n-4) + a delta(n-5) \ + b delta(n-6) + c delta(n-7) + 0 delta(n-8)$
Observando ambos, podemos desconsiderar o $delta(n-8)$ e considerar a janela de 0 a 7 , sendo assim uma janela de 8 pontos.
Com essas considerações podemos concluir que:
$x[(n-4) mod 8] -> X_2 = e^((-j 2pi)/8 dot 4) dot X_1[k] $
== Resolução Matlab
```Matlab
%Questão 1
pkg load signal;
close all;
clear all;
clc;
N= 8;
k=0:N-1;
n=0:N-1
%Cria o vetor de impulso dos sinais
x1 = [0,1,2,3,2,1,0,0];
x2 = [2,1,0,0,0,1,2,3];
%Faz a DFT de x1
X = fft(x1);
%Desloca X1 em 4
Y = exp(j*pi*2*4*k/8).*X;
%Faz a inversa
y = ifft(Y);
%Compara o resultado com x2
subplot(211)
stem(n,x2)
title('x_2 (n)')
subplot(212)
stem(n,y)
title('x_1 [(n-4) mod 8]')
```
#figure(
image("./Figuras/q1.svg",width:100%),
caption: [
Resultados da Questão 1 \ Fonte: Elaborada pelo autor
],
supplement: "Figura"
);
#pagebreak()
= Questão 2
Suponha que temos duas sequências de quantro pontos $x[n]$ e $h[n]$, da seguinte foma:
#align(center)[
$x[n]= cos(pi n / 2)$
$h[n]=2^n$
n = 0,1,2,3.
]
\ a) Calcule a DFT de quatro pontos 𝑋[𝑘].
\ b) Calcule a DFT de quatro pontos 𝐻[𝑘].
\ c) Calcule 𝑦[𝑛] = 𝑥[𝑛]④ℎ[𝑛] (realizando a convolução circular diretamente).
\ d) Calcule 𝑦[𝑛] do item (c) multiplicando as DFT’s de 𝑥[𝑛] e ℎ[𝑛] e realizando uma DFT
inversa.
== Resolução Teórica
#set enum(numbering: "a)")
+
$
\ n = 0 -> cos(0) = 1
\ n = 1 -> cos(pi/2) = 0
\ n = 2 -> cos(pi) = -1
\ n = 3 -> cos(3/2 pi) = 0
"logo:"
x[n] = 1delta[n] - delta[n-2]
\ X[k] = 1 - e ^(-j (2pi)/4 dot 2k)
$
+
$
\ n = 0 -> 2⁰ = 1
\ n = 1 -> 2¹ = 2
\ n = 2 -> 2^2 = 4
\ n = 3 -> 2^3 = 8
"logo:"
h[n] = 1delta[n]+2delta[n]+4delta[n]+ 8delta[n-2]
\ H[k] = 1+ 2 dot e ^(-j (2pi)/4 dot 1k) + 4 dot e ^(-j (2pi)/4 dot 2k) + 8 dot e ^(-j (2pi)/4 dot 3k)
$
+
$
"temos:"
\ x[n]=[1,0,-1,0]
\ h[n]=[1,2,4,8]
"Relizando convolução circular com h[-n]: "
\ h[0]=[1,8,4,2]
\ h[1]=[2,1,8,4]
\ h[2]=[4,2,1,8]
\ h[3]=[8,4,2,1]
\ "Podemos calcular y[n] agora:"
\ y[0] = 1 dot 1 + 0 dot 8 + (-1) dot 4 + 0 dot 2 = 1 -4 = -3
\ y[1] = 1 dot 2 + 0 dot 1 + (-1) dot 8 + 0 dot 4 = 2-8 = -6
\ y[2] = 1 dot 4 + 0 dot 2 + (-1) dot 1 + 0 dot 8 = 4-1 = 3
\ y[3] = 1 dot 8 + 0 dot 4 + (-1) dot 2 + 0 dot 1 = 8-2 = 6
\ y[n] = -3delta[n] - 6delta[n-1] +3delta[n-2]+6delta[n-3]
$
+
$
\ Y[K] = (1- e ^(-j (2pi)/4 dot 2k))(1+ 2 dot e ^(-j (2pi)/4 dot 1k) + 4 dot e ^(-j (2pi)/4 dot 2k) + 8 dot e ^(-j (2pi)/4 dot 3k) )
\ 1+ 2 dot e ^(-j (2pi)/4 dot 1k) + 4 dot e ^(-j (2pi)/4 dot 2k) + 8 dot e ^(-j (2pi)/4 dot 3k) - e ^(-j (2pi)/4 dot 2k) - 2 dot e ^(-j (2pi)/4 dot 3k) -4 dot e ^(-j (2pi)/4 dot 4k) -8 dot e ^(-j (2pi)/4 dot 5k)
\ 1+ 2 dot e ^(-j (2pi)/4 dot 1k) + 3 dot e ^(-j (2pi)/4 dot 2k) + 6 dot e ^(-j (2pi)/4 dot 3k) -4 dot e ^(-j (2pi)/4 dot 4k) -8 dot e ^(-j (2pi)/4 dot 5k)
\ "temos um deslocamento de 4 e de 5 porém está sendo trabalho em apenas uma faixa de 4"
\ 1+ 2 dot e ^(-j (2pi)/4 dot 1k) + 3 dot e ^(-j (2pi)/4 dot 2k) + 6 dot e ^(-j (2pi)/4 dot 3k) -4 dot e ^(-j (2pi)/4 dot (4 - 4)k) -8 dot e ^(-j (2pi)/4 dot (5 - 4 )k)
\ 1+ 2 dot e ^(-j (2pi)/4 dot 1k) + 3 dot e ^(-j (2pi)/4 dot 2k) + 6 dot e ^(-j (2pi)/4 dot 3k) -4 dot e ^(-j (2pi)/4 dot (0)k) -8 dot e ^(-j (2pi)/4 dot (1)k)
\ Y[k] = -3 - 6 dot e ^(-j (2pi)/4 dot 1k) + 3 dot e ^(-j (2pi)/4 dot 2k) + 6 dot e ^(-j (2pi)/4 dot 3k)
\ y[n] = -3delta[n]-6delta[n-1]+3delta[n-2]+6delta[n-3]
$
== Resolução Matlab
```Matlab
%Questão 2
pkg load signal;
close all;
clear all;
clc;
N= 4;
k=0:N-1;
n=0:N-1;
%criando sinais
x = [1,0,-1,0]
h = [1,2,4,8]
X = fft(x);
H = fft(h);
%conv pela DFT
convDFT = ifft(X.*H)
%conv utilizando cconv
convDIR = cconv(x,h,4)
%plotando os sinais e comparando
subplot(211)
stem(n,convDFT);
title('Convolução pela DFT')
subplot(212)
stem(n,convDIR)
title('Convolução Dirata')
```
#figure(
image("./Figuras/q2.svg",width:100%),
caption: [
Resultados da Questão 2 \ Fonte: Elaborada pelo autor
],
supplement: "Figura"
);
#pagebreak()
= questão 3
Dois sinais de comprimento finito, $x_1 [n]$ e $x_2 [n]$, são esboçados na figura a seguir. Suponha que $x_1 [n]$ e $x_2 [n]$ sejam nulos fora da região mostrada na figura. Seja $x_3 [n]$ a convolução circular de oito pontos de $x_1 [n]$ com $x_2[n]$. Determine $x_3[n]$
== Resolução Teórica
- Os sinais dados tem a seguinte sequências:
$
x_1 [n] = [1,2,1,1,2,1,1,2]
x_2 [n] = [0,1,3,2,0,0,0,0]
$
- Realizando a inversão temporal em $x_2 [n]$:
$
\ x_2 [0] = [0,0,0,0,0,2,3,1]
\ x_2 [1] = [1,0,0,0,0,0,2,3]
\ x_2 [2] = [3,1,0,0,0,0,0,2]
\ x_2 [3] = [2,3,1,0,0,0,0,0]
\ x_2 [4] = [0,2,3,1,0,0,0,0]
\ x_2 [5] = [0,0,2,3,1,0,0,0]
\ x_2 [6] = [0,0,0,2,3,1,0,0]
\ x_2 [7] = [0,0,0,0,2,3,1,0]
$
- Calculando $x_3[n]$:
$
\ x_3 [0] = 1 dot 0 + 2 dot 0 + 1 dot 0 + 1dot 0 + 2 dot 0 + 1 dot 2 + 1dot 3 + 2 dot 1 = 7
\ x_3 [1] = 1 dot 1 + 2 dot 0 + 1 dot 0 + 1dot 0 + 2 dot 0 + 1 dot 0 + 1dot 2 + 2 dot 3 = 9
\ x_3 [2] = 1 dot 3 + 2 dot 1 + 1 dot 0 + 1dot 0 + 2 dot 0 + 1 dot 0 + 1dot 0 + 2 dot 2 = 9
\ x_3 [3] = 1 dot 2 + 2 dot 3 + 1 dot 1 + 1dot 0 + 2 dot 0 + 1 dot 0 + 1dot 0 + 2 dot 0 = 9
\ x_3 [4] = 1 dot 0 + 2 dot 2 + 1 dot 3 + 1dot 1 + 2 dot 0 + 1 dot 0 + 1dot 0 + 2 dot 0 = 8
\ x_3 [5] = 1 dot 0 + 2 dot 0 + 1 dot 2 + 1dot 3 + 2 dot 1 + 1 dot 0 + 1dot 0 + 2 dot 0 = 7
\ x_3 [6] = 1 dot 0 + 2 dot 0 + 1 dot 0 + 1dot 2 + 2 dot 3 + 1 dot 1 + 1dot 0 + 2 dot 0 = 9
\ x_3 [7] = 1 dot 0 + 2 dot 0 + 1 dot 0 + 1dot 0 + 2 dot 2 + 1 dot 3 + 1dot 1 + 2 dot 0 = 8
$
Temos que, $x_3 [2] = 9$
== Resolução Matlab
```Matlab
%questão 3
pkg load signal;
close all;
clear all;
clc;
N= 8;
k=0:N-1;
n=0:N-1;
%criando os sinais
x1 = [1,2,1,1,2,1,1,2]
x2 = [0,1,3,2,0,0,0,0]
%Fazendo a convolução de 8
x3 = cconv(x1,x2,8)
%Plotando o sinal
stem(n,x3)
ylim([0 9]);
%Valor de x3[2] = 9
```
#figure(
image("./Figuras/q3.svg",width:100%),
caption: [
Resultados da Questão 2 \ Fonte: Elaborada pelo autor
],
supplement: "Figura"
);
#pagebreak()
= Questão 4
Na figura é mostrada uma sequência de tempo discreto com seis pontos $x[n]$. Suponha que $x[n] = 0$ fora do intervalo mostrado. O valor de $x[4]$ nãp ´e conhecido e é representado como b. Observe que a amostra mostrada como b na figura não está necessariamente na escala. Sejam $X(e^(j omega))$ a TFTD de $x[n]$ e $X_1 [k]$ as amostras de $X(e^(j omega))$ a cada $pi/2$, isto é.
#align(center)[
$X_1 [k] = X(e^(j omega)) |_(omega = (k pi)/2),0<= k <= 3$
]
A sequência com quatros pontos $x_1 [n]$ que resulta da invesa com quatro pontos de $X_1 [k]$ é mostrada a seguir. Com base nessa figura, você pode determinar b de modo único? Caso afirmativo,dê esse valor de b.
Resolução:
sendo $x[n] = delta[n] + 2delta[n-2] + 2delta[n-3] + b delta[n-4] + delta[n-5]$
e a DFT de $X[e^(j (2 k pi)/4)] = e^(j (2 k pi)/4) + 2 e^(j (2 k pi)/4 2) + e^(j (2 k pi)/4 3)+b e^(j (2 k pi)/4 4) + e^(j (2 k pi)/4 5)$
\ $X[e^(j (2 k pi)/4)] = e^(j (2 k pi)/4) + 2 e^(j (2 k pi)/4 2) + 2e^(j (2 k pi)/4 3)+b e^(j (2 k pi)/4 4) + e^(j (2 k pi)/4 5) $
\ $X[e^(j (2 k pi)/4)] = 1 + 2 e^(j (2 k pi)/4 2) + 2e^(j (2 k pi)/4 3)+b e^(j (2 k pi)/4 0) + e^(j (2 k pi)/4) $
\ $X[e^(j (2 k pi)/4)] = 1 + 2 e^(j (2 k pi)/4 2) + 2e^(j (2 k pi)/4 3)+b + e^(j (2 k pi)/4) $
Sendo
$x[n] = 4delta[n] + delta[n-1] + 2delta[n-2] + 2 delta[n-3]$
Fazendo a DTF de X_1[k]
\ $X_1 [e^(j (2 k pi)/4)] = 4 + e^(j (2 k pi)/4) + 2e^(j (2 k pi)/4 2)+ 2e^(j (2 k pi)/4 3) $
Substituindo em $X_1 [k] = X[e^(j (2 k pi)/4)]|_(omega = k pi/2) $
\ $ X_1 [k] = X[e^(j omega)] -> 1 + 2 e^(j (2 k pi)/4 2) + 2e^(j (2 k pi)/4 3)+b + e^(j (2 k pi)/4) = 4 + e^(j (2 k pi)/4) + 2e^(j (2 k pi)/4 2)+ 2e^(j (2 k pi)/4 3)
\ 1 + b = 4
\ b = 4 - 1 = 3
\ b = 3
$
#pagebreak()
= Questão 5
Na figura a seguir são mostradas duas sequências de comprimento finito $x_1 [n]$ e $x_2 [n]$. Qual é o menor N tal que a convolução circular de N pontos de $x_1 [n]$ e $x_2 [n]$ seja igual à uma convolução linear dessas sequências.
== Resolução Teórica
- Dada as figuras, obtém-se os seguintes pontos:
$
\ x_1 [n] = [1,-2.-1.3.0.0]
\ x_2 [n] = [0,2,0,0,-1,1]
$
Para que as covoluções tenham a mesma sequência
$
\ x_1 [n] = [1,-2.-1.3.0.0] -> N_1 = 4
\ x_2 [n] = [0,2,0,0,-1,1] -> N_2 = 6
\ N = N_1 + N_2 - 1 = 4 + 6 - 1 = 9
$
== Resolução Matlab
```Matlab
%questão 5
pkg load signal;
close all;
clear all;
clc;
N= 9;
k=0:N-1;
n=0:N-1;
%Cria o sinal com tamanho 9 para conv circular
x1 = [1,-2,-1,3,0,0,0,0,0]
x2 = [0,2,0,0,-1,1,0,0,0]
cc = cconv(x1,x2,9);
%Ajusta os sinais para os tamanhos originais
%Faz a convolução linear
x1 = [1,-2,-1,3]
x2 = [0,2,0,0,-1,1]
cl = conv(x1,x2);
%Compara os resultados das convoluções linear e circular
subplot(211)
stem(n,cc);
title('Convolução circular');
subplot(212)
stem(n,cl);
title('Convolução Linear');
```
#figure(
image("./Figuras/q5.svg",width:100%),
caption: [
Resultados da Questão 2 \ Fonte: Elaborada pelo autor
],
supplement: "Figura"
);
#pagebreak()
= Questão 6
Na figura a seguir é mostrada ua sequência $x[n]$ para a qual o valor de $x[3]$ é uma constante desconhecida c.
O valor da amostra com amplitude c não está necessariamente representada na escada. Considere:
$X_1 [k] = X[k] e^(j 2pi/3 3k) $
Sendo $X[k]$ a DFT de cinco pontos de $x[n]$. A sequência $x_1 [n]$ é a DFT inversa de $X_1 [k]$. Qual o valor de c?
multiplicando ambos os lados por $e^(-j 2pi/5 3k)$
$e^(-j 2pi/5 3k) X_1[k] = X[k] $
Sendo $X_1 [n]$
$\ x_1[n] = 2 delta[n] + delta[n-1] + 2delta[n-2] - delta[n-3] + 0delta[n-4]$
Sendo $x[n]$
$ \ x[n] = 2delta[n] - delta[n-1] + 0delta[n-2] + c delta[n-3] + delta[n-4]$
Calculando:
$
\ X[k] = X_1[k] dot e^(-j 2pi/5 3k)
\ x_1[n] = x[(n-2) mod 5] -> \ x_1[(n-3) mod 5] = 2 delta[(n-2) mod 5] - delta[(n-3) mod 5] \ + 0delta[(n-4) mod 5] - c delta[(n-5) mod 5] + delta[(n-6) mod 5]
\ x_1[(n-3) mod 5] = 2 delta[n-2] - delta[n-3] \ + 0delta[n-4] - c delta[n] + delta[n-1]
\ x_1[n] = 2 delta[n] + delta[n-1] + 2delta[n-2] - delta[n-3] + 0delta[n-4]
= 2 delta[n-2] - delta[n-3] \ + 0delta[n-4] - c delta[n] + delta[n-1]
c =2
$
#pagebreak()
= Questão 7
Suponha que tenhamos uma sequência de 1025 pontos de dados ( 1 a mais do que $N = 2^(10)$). Em vez de descartar o valor final, vamos preencher a sequência com zeros até ser comprimento seja $N = 2^(11)$, de modo que possamos usar um algoritmo FFT de raiz 2
+ Quantas multiplicações complexas são necessárias para se computar a DFT usando um algoritmo de FFT raiz 2?
+ Quantas multiplicações complexas seriam necessárias para se computar diretamente a DFT de 1025?
== Resolução Teórica
$
\ N = 2^(11) = 2048 ; N
$
+
$
(N/2)log_2(N) -> (1025/2)log_2(1025) = 5215.72
$
+
$
N^2 -> 1025^2 = 1050625
$
== Resolução Matlab
```Matlab
%questao 7
pkg load signal;
close all;
clear all;
clc;
N = 2^10 + 1;
valor1 = (N/2)*log2(N)
valor2 = N^2
```
= Quetão 8
Considere a sequência de comprimento finito real $x[n]$ mosrada na Figura a seguir
+ Esboce a sequência de comprimento finito $y[n]$ cuja DFT de seis pontos seja
\ $ Y[k] = W_6 ^(5k) X[k] $
\ sendo $X[k]$ a DFT de seis pontos de x[n]
+ Esboce a sequência de comprimento finito $w[n]$ cuja DFt de seis pontos seja
\ $ W[k] = Im{X[k]} $
+ Esboce a sequência de comprimento finito $q[n]$ cuja DFT de três pontos seja
\ $ Q[k] = X[2k +1], k = 0,1,2 $
== Resolução Teórica
+
$
\ x[n] = [4,3,2,1,0,0]
\ Y[k] = W_6 ^(5k) X[k]
\ y[n] = x[(n-5) mod 6]
\ y[n] = 4 delta[(n-5) mod 6] + 3 delta[(n-6) mod 6] + 2 delta[(n-7) mod 6] \ + 1 delta[(n-8) mod 6] + 0 delta[(n-9) mod 6] + 0 delta[(n-8) mod 6]
\ y[n] = 4 delta[n-5] + 3 delta[n-6] + 2 delta[n-1] + 1 delta[n-2] + 0 delta[n-3] + 0 delta[n-4]
$
+
$
\ W[k] = Im{X[k]}
\ W[k] = Im{4 e^(j 2pi/6 0k) + 3 e^(j 2pi/6 1k) + 2 e^(j 2pi/6 2k) + e^(j 2pi/6 4k)}
\ W[k] = Im{4 + 3cos(2pi/6 1k) + 3j sin(2pi/6 1k) + 2cos(2pi/6 2k) + 2j sin(2pi/6 2k) + 1 cos(2pi/6 4k) + j sin(2pi/6 4k) }
\ W[k] = Im{3j sin(2pi/6 1k) 2j sin(2pi/6 2k) + j sin(2pi/6 4k) }
\ W[k] = 3j sin(2pi/6 1k) 2j sin(2pi/6 2k) + j sin(2pi/6 4k)
$
+
$
\ Q[k] = X[2k + 1], k=0,1,2;
\ X[k] = 4 + 3 e^(j (2 k pi)/6 1) + 2e^(j (2 k pi)/6 2) + 1e^(j (2 k pi)/6 3)
\ k = 0 , X[2 dot 0 + 1 ] -> X[1] = 4 + 3 e^(j (2 pi)/6 1) + 2e^(j (2 pi)/6 2) + 1e^(j (2 pi)/6 3)
\ k = 1 , X[2 dot 1 + 1 ] -> X[3] = 4 + 3 e^(j (2 pi)/6 1 dot 3) + 2e^(j (2 pi)/6 2 dot 3) + 1e^(j (2 pi)/6 3 dot 3)
\ X[3] = 4 + 3 e^(j (2 pi)/6 3) + 2e^(j (2 pi)/6 ) + 1e^(j (2 pi)/6 3)
\ k = 2 , X[2 dot 2 + 1 ] -> X[5] = 4 + 3 e^(j (2 pi)/6 1 dot 5) + 2e^(j (2 pi)/6 2 dot 5) + 1e^(j (2 pi)/6 3 dot 5)
\ X[5] = 4 + 3 e^(j (2 pi)/6 5) + 2e^(j (2 pi)/6 4) + 1e^(j (2 pi)/6 3)
$ |
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/tuhi-course-poster-vuw/0.1.0/README.md | markdown | Apache License 2.0 | # tuhi-course-poster-vuw
A Typst template for VUW course posters. To get started:
```typst
typst init @preview/tuhi-course-poster-vuw:0.1.0
```
And edit the `main.typ` example.

## Contributing
PRs are welcome! And if you encounter any bugs or have any requests/ideas, feel free to open an issue.
|
https://github.com/GuilloteauQ/typst-slides-unibas | https://raw.githubusercontent.com/GuilloteauQ/typst-slides-unibas/main/main.typ | typst | #import "theme.typ": *
#show: unibas-theme.with()
#let pres_title = "Longevity of Artifacts in Leading Parallel and Distributed Systems Conferences: a Review of the State of the practice in 2023"
#let short_title = "Longevity of Artifacts: a Review of the State of the practice in 2023"
//My awesome topic which has a very very long title"
#let presenter = "<NAME>"
#let authors = (
(name: [<NAME>], affiliation: [University of Basel, Switzerland], is_presenter: true),
(name: [<NAME>], affiliation: [University of Basel, Switzerland], is_presenter: false),
(name: [<NAME>], affiliation: [Univ. Toulouse, CNRS, IRIT], is_presenter: false),
(name: [<NAME>], affiliation: [Univ. Grenoble Alpes, INRIA, CNRS, LIG], is_presenter: false),
(name: [<NAME>], affiliation: [Univ. Grenoble Alpes, INRIA, CNRS, LIG], is_presenter: false),
)
#let dslide(raw-font-size: 20pt, ..args) = {
show raw: set text(font: "Inconsolata", weight: "semibold", size: raw-font-size)
slide(presenter: presenter, presentation-title: short_title, ..args)
}
#title-slide(title: pres_title, authors: authors)
#dslide(title: "A hilarious slide")[
You didnt expect that!
]
#new-section-slide("Intro")
#dslide(title: "A second slide")[
This is a second slide
]
#dslide(title: "A slide with enum")[
- foo
- bar
- plop
- plap
- baz
]
#new-section-slide("Conclusion")
#dslide(title: "This is the end")[
The end
]
|
|
https://github.com/BeiyanYunyi/resume | https://raw.githubusercontent.com/BeiyanYunyi/resume/main/modules_zh/skills.typ | typst | #import "../brilliant-CV/template.typ": *
#cvSection("技能与兴趣")
#cvSkill(
type: [前端框架],
info: [React 18 #hBar() Vue 3 #hBar() Solid #hBar() Svelte #hBar() Flutter],
)
#cvSkill(
type: [服务端渲染框架],
info: [Next.js 14 #hBar() Nuxt 3 #hBar() Waku #hBar() Astro],
)
#cvSkill(
type: [后端框架],
info: [Express #hBar() Fastify #hBar() Gin (Go) #hBar() Actix (Rust)],
)
#cvSkill(type: [语言], info: [JavaScript #hBar() TypeScript #hBar() Go #hBar() Rust #hBar() Dart #hBar() Python #hBar() C #hBar() FORTRAN #hBar() SQL])
#cvSkill(
type: [数据库],
info: [PostgreSQL #hBar() MySQL #hBar() SQLite #hBar() MongoDB #hBar() Redis],
)
#cvSkill(
type: [ORM],
info: [Mongoose #hBar() Typegoose #hBar() Prisma #hBar() Drizzle #hBar() Mikro-ORM #hBar() SeaORM (Rust) #hBar() gorm (Go)],
)
#cvSkill(type: [个人兴趣], info: [游泳 #hBar() 烹饪 #hBar() 阅读]) |
|
https://github.com/eduardz1/UniTO-typst-template | https://raw.githubusercontent.com/eduardz1/UniTO-typst-template/main/template/chapters/conclusions.typ | typst | MIT License | = Conclusions
A good conclusions chapter really helps to tie up the whole thesis.
#pagebreak(weak: true, to: "odd")
|
https://github.com/imtsuki/resume | https://raw.githubusercontent.com/imtsuki/resume/master/README.md | markdown | MIT License | # My Resume
   
This is my resume based on [this template](https://github.com/skyzh/typst-cv-template).
## Build
```bash
make
```
## Download
- [English version (pdf)](./resume.pdf)
- [Typst Source](./resume.typ)
|
https://github.com/typst/templates | https://raw.githubusercontent.com/typst/templates/main/badformer/lib.typ | typst | MIT No Attribution | #import "@preview/cetz:0.1.2"
// Settings.
#let pxw = 1000pt
#let pxh = 700pt
#let fov = 90deg
#let near = 0.01
#let far = 100
#let move-speed = 1
#let rot-speed = 10deg
#let jump-power = 1.3
// Geometry of a cube.
#let cube = (
vertices: (
(-0.5, 0, 0.5),
( 0.5, 0, 0.5),
( 0.5, -1, 0.5),
(-0.5, -1, 0.5),
(-0.5, 0, -0.5),
( 0.5, 0, -0.5),
( 0.5, -1, -0.5),
(-0.5, -1, -0.5),
),
faces: (
(0, 1, 2, 3, 0),
(4, 5, 6, 7, 4),
(1, 5),
(0, 4),
(2, 6),
(3, 7),
),
)
// Other geometry.
#let other = (
vertices: (
(-5, -3, 0),
(0, 0, 0),
(1, 3, 0),
(4, -3, 0),
(-1, -2, 0),
(-3, 2, 0),
(-3.5, 0.5, 0),
(1.5, 4, 0),
),
faces: (
(0, 1, 2, 3, 4, 0),
(3, 1, 5, 6),
(2, 7),
),
)
// All platforms.
#let world = (
(pos: (0, 0, -4), size: (8, 0.5, 12), color: green),
(pos: (-2.5, 0, -20), size: (3, 10, 10), color: blue),
(pos: (2.5, 0, -20), size: (3, 10, 10), color: blue),
(pos: (10, 0, -30), size: (5, 5, 5), color: eastern),
(pos: (4, 0, -40), size: (6, 100, 6), color: yellow),
(pos: (4, -5, -50), size: (3, 3, 3), color: aqua),
(pos: (4, -5, -55), size: (3, 3, 3), color: aqua),
(pos: (4, -5, -60), size: (3, 3, 3), color: aqua),
(pos: (4, -3, -70), size: (6, 100, 6), color: yellow),
(pos: (-10, -3, -70), size: (15, 100, 3), color: purple),
(pos: (19, -3, -70), size: (15, 100, 3), color: purple),
(pos: (-16, -3, -85), size: (3, 100, 15), color: green),
(pos: (-10, -3, -90), size: (3, 3, 3), color: aqua),
(pos: (-10, -1, -95), size: (3, 3, 3), color: aqua),
(pos: (-5, 1, -95), size: (3, 3, 3), color: aqua),
(pos: (-5, 3, -90), size: (3, 3, 3), color: aqua),
(pos: (-5, 5, -85), size: (3, 3, 3), color: aqua),
(pos: (-5, 7, -80), size: (3, 3, 3), color: aqua),
(pos: (23, 0, -75), size: (3, 3, 3), color: aqua),
(pos: (15, 2.75, -80), size: (3, 3, 3), color: aqua),
(pos: (10, 5.75, -87), size: (2.25, 2.25, 2.25), color: aqua),
(pos: (3, 8.75, -88), size: (1.5, 1.5, 1.5), color: aqua),
(pos: (3.5, 9, -80), size: (5, 100, 5), color: red),
)
// Creates a matrix that describes a perspective transformation.
#let perspective(aspect, fov, far, near) = {
let t = calc.tan(fov / 2)
let x = 1 / (t * aspect)
let y = -1 / t
let c = -far / (far - near)
let d = (-far * near) / (far - near)
(
x, 0, 0, 0,
0, y, 0, 0,
0, 0, c, d,
0, 0, -1, 0,
)
}
// Creates a matrix that describes a translation.
#let translation(x, y, z) = {
(
1, 0, 0, x,
0, 1, 0, y,
0, 0, 1, z,
0, 0, 0, 1,
)
}
// Creates a matrix that describes a scaling.
#let scaling(x, y, z) = {
(
x, 0, 0, 0,
0, y, 0, 0,
0, 0, z, 0,
0, 0, 0, 1,
)
}
// Creates a matrix that describes a rotation around the Y axis.
#let rotation-y(r) = {
let c = calc.cos(r)
let s = calc.sin(r)
(
c, 0, s, 0,
0, 1, 0, 0,
-s, 0, c, 0,
0, 0, 0, 1,
)
}
// Multiplies a 4x4 matrix and a 3D vector, producing a 4D vector
// with a homogenous coordinate.
#let multiply-mat-vec(m, v) = {
let (
m11, m12, m13, m14,
m21, m22, m23, m24,
m31, m32, m33, m34,
m41, m42, m43, m44,
) = m
let (v1, v2, v3) = v
let x = v1 * m11 + v2 * m12 + v3 * m13 + m14
let y = v1 * m21 + v2 * m22 + v3 * m23 + m24
let z = v1 * m31 + v2 * m32 + v3 * m33 + m34
let w = v1 * m41 + v2 * m42 + v3 * m43 + m44
(x, y, z, w)
}
// Multiplies two 4x4 matrices.
#let multiply-mat-mat(a, b) = {
let (
a11, a12, a13, a14,
a21, a22, a23, a24,
a31, a32, a33, a34,
a41, a42, a43, a44,
) = a
let (
b11, b12, b13, b14,
b21, b22, b23, b24,
b31, b32, b33, b34,
b41, b42, b43, b44,
) = b
let c11 = a11 * b11 + a12 * b21 + a13 * b31 + a14 * b41
let c12 = a11 * b12 + a12 * b22 + a13 * b32 + a14 * b42
let c13 = a11 * b13 + a12 * b23 + a13 * b33 + a14 * b43
let c14 = a11 * b14 + a12 * b24 + a13 * b34 + a14 * b44
let c21 = a21 * b11 + a22 * b21 + a23 * b31 + a24 * b41
let c22 = a21 * b12 + a22 * b22 + a23 * b32 + a24 * b42
let c23 = a21 * b13 + a22 * b23 + a23 * b33 + a24 * b43
let c24 = a21 * b14 + a22 * b24 + a23 * b34 + a24 * b44
let c31 = a31 * b11 + a32 * b21 + a33 * b31 + a34 * b41
let c32 = a31 * b12 + a32 * b22 + a33 * b32 + a34 * b42
let c33 = a31 * b13 + a32 * b23 + a33 * b33 + a34 * b43
let c34 = a31 * b14 + a32 * b24 + a33 * b34 + a34 * b44
let c41 = a41 * b11 + a42 * b21 + a43 * b31 + a44 * b41
let c42 = a41 * b12 + a42 * b22 + a43 * b32 + a44 * b42
let c43 = a41 * b13 + a42 * b23 + a43 * b33 + a44 * b43
let c44 = a41 * b14 + a42 * b24 + a43 * b34 + a44 * b44
(
c11, c12, c13, c14,
c21, c22, c23, c24,
c31, c32, c33, c34,
c41, c42, c43, c44,
)
}
// Turns normalized coordinates into screen coordinates.
#let screenify(x, y) = ((x + 0.5) * pxw, (y + 0.5) * pxh)
// Renders a single object in the level.
#let render-obj(obj, ts) = {
ts = multiply-mat-mat(ts, translation(..obj.pos))
ts = multiply-mat-mat(ts, scaling(..obj.scale))
let lines = ()
let (vertices, faces) = obj.geom
for face in faces {
let len = face.len()
for i in range(len - 1) {
let i2 = calc.rem(i + 1, len)
let v1 = vertices.at(face.at(i))
let v2 = vertices.at(face.at(i2))
let (x1, y1, z1, w1) = multiply-mat-vec(ts, v1)
let (x2, y2, z2, w2) = multiply-mat-vec(ts, v2)
if (w1 >= near or w2 >= near) and w1 <= far and w2 <= far {
// Find center point if one of the points is off-screen.
if w1 <= 0 {
let n = (w2 - near) / (w2 - w1)
x1 = (n * x1) + ((1-n) * x2)
y1 = (n * y1) + ((1-n) * y2)
z1 = (n * z1) + ((1-n) * z2)
w1 = near
} else if w2 <= 0 {
let n = (w1 - near) / (w1 - w2)
x2 = (n * x2) + ((1-n) * x1)
y2 = (n * y2) + ((1-n) * y1)
z2 = (n * z2) + ((1-n) * z1)
w2 = near
}
lines.push((
start: screenify(x1 / w1, y1 / w1),
end: screenify(x2 / w2, y2 / w2),
stroke: {
// Interpolate alpha and thickness between near and far plane.
let alpha = (far - calc.min(w1, w2)) / far
let color = rgb(..obj.color.components(alpha: false), alpha * 100%)
let thickness = 2pt * alpha
color + thickness
},
z: (z1 / w1 + z2 / w2) / 2,
))
}
}
}
lines
}
// Renders all objects into lines.
#let render-lines(ts) = {
for platform in world {
let obj = (
geom: cube,
pos: platform.pos,
scale: platform.size,
color: platform.color,
)
render-obj(obj, ts)
}
render-obj(
(
geom: other,
pos: (0, 10, 50),
scale: (-3, 3, 3),
color: rgb(255, 255, 255),
),
ts,
)
}
// Renders all objects.
#let render-world(ts) = {
for l in render-lines(ts).sorted(key: l => -l.z) {
let _ = l.remove("z")
place(line(..l))
}
}
// Renders a minimap.
#let render-minimap(state) = cetz.canvas(length: 3pt, {
import cetz.draw: *
rect((-25, -5), (30, 100), stroke: gray, fill: black)
for platform in world {
let (x, _, z) = platform.pos
let (sx, _, sz) = platform.size
rect((x - sx/2, -z + sz/2), (x + sx/2, -z - sz/2), fill: platform.color)
}
let xz((x, _, z)) = (x, -z)
arc(
xz(state.pos),
start: 90deg - state.rot - fov / 2,
stop: 90deg - state.rot + fov / 2,
fill: yellow,
anchor: "origin",
radius: 3,
mode: "PIE",
)
circle(xz(state.pos), radius: 3pt, fill: red, name: "player")
})
// Finds the distance to solid ground.
#let sonar((x, y, z)) = {
let alt = 100
for obj in world {
let (ox, oy, oz) = obj.pos
let (sx, _, sz) = obj.size
if (
(ox - sx/2 <= x and x <= ox + sx/2) and
(oz - sz/2 <= z and z <= oz + sz/2) and
(oy <= y + 0.01)
) {
alt = y - oy
}
}
alt
}
// Determines the movement delta.
#let delta((x, y, z), rot, d) = {
let beta = 90deg - rot
let dx = d * calc.cos(beta)
let dz = d * calc.sin(beta)
(x + dx, y, z - dz)
}
// Whether the player is in the winzone.
#let within-winzone((x, y, z)) = (
1.5 < x and x < 3.5 and 8.75 < y and y < 9.25 and -82.5 < z and z < -79.5
)
// Updates the game state.
#let update(state, u) = {
if u == "e" {
state.minimap = not state.minimap
return state
}
if state.dead or state.won {
return state
}
state.steps += 1
if u == "w" {
state.pos = delta(state.pos, state.rot, move-speed)
} else if u == "s" {
state.pos = delta(state.pos, state.rot, -move-speed)
} else if u == "a" {
state.rot -= rot-speed
} else if u == "d" {
state.rot += rot-speed
}
// What is the distance to solid ground?
let ground-distance = sonar(state.pos)
if u == " " and ground-distance < 0.1 {
state.accel = jump-power
}
state.pos.at(1) += calc.max(state.accel, -ground-distance)
state.accel = calc.max(state.accel - 0.3, -1)
if state.pos.at(1) < -10 {
state.dead = true
}
if within-winzone(state.pos) {
state.won = true
}
state
}
// Renders a popup.
#let render-popup(body, subtitle: none) = place(center + horizon, rect(
fill: black,
width: 200pt,
height: 100pt,
stroke: 1pt + white,
{
text(20pt, underline(stroke: 2pt, offset: 4pt, upper(body)))
if subtitle != none {
v(15pt, weak: true)
subtitle
}
}
))
// Renders the game UI.
#let render-ui(state) = block(inset: 10pt, width: 100%, height: 100%, {
place(top + left, {
[Mission status: ]
if state.dead [Failed]
else if state.won [Success]
else [Active]
})
place(top + center, {
[Assignment: Reach the goal]
})
place(top + right, {
if state.minimap {
render-minimap(state)
} else {
[Toggle GPS with E]
}
})
if state.dead {
render-popup[You died!]
} else if state.won {
render-popup(subtitle: [In #state.steps steps])[You win!]
}
})
// Renders the game in a given state.
#let render(state) = {
// Determine the view transformation.
let (x, y, z) = state.pos
let ts = multiply-mat-mat(
perspective(pxw / pxh, fov, far, near),
multiply-mat-mat(
rotation-y(state.rot),
translation(-x, -y - 1, -z),
),
)
// Render world and UI.
render-world(ts)
render-ui(state)
}
// Parses the document's body into an array of strings containing
// only the seven updates "w", "a", "s", "d", " ", and "e".
#let parse-updates(source) = {
lower(source
.replace(regex("(//|#).*\n?"), "")
.split("\n")
.join())
.clusters()
.filter(c => c in "wasd e")
}
// Entry point into the game.
#let game(source) = {
set page(fill: black, width: pxw, height: pxh, margin: 0pt)
set text(font: "Cascadia Code", fill: green, 11pt)
// The initial state.
let state = (
pos: (0, 0, 0),
steps: 0,
accel: -1,
rot: 0deg,
minimap: false,
dead: false,
won: false,
)
// Handles game updates.
let updates = parse-updates(source)
for u in updates {
state = update(state, u)
}
render(state)
}
|
https://github.com/jgm/typst-hs | https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/layout/flow-orphan-00.typ | typst | Other | #set page(height: 100pt)
#lorem(12)
= Introduction
This is the start and it goes on.
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/unichar/0.1.0/ucd/block-0500.typ | typst | Apache License 2.0 | #let data = (
("CYRILLIC CAPITAL LETTER KOMI DE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI DE", "Ll", 0),
("CYRILLIC CAPITAL LETTER KOMI DJE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI DJE", "Ll", 0),
("CYRILLIC CAPITAL LETTER KOMI ZJE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI ZJE", "Ll", 0),
("CYRILLIC CAPITAL LETTER KOMI DZJE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI DZJE", "Ll", 0),
("CYRILLIC CAPITAL LETTER KOMI LJE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI LJE", "Ll", 0),
("CYRILLIC CAPITAL LETTER KOMI NJE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI NJE", "Ll", 0),
("CYRILLIC CAPITAL LETTER KOMI SJE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI SJE", "Ll", 0),
("CYRILLIC CAPITAL LETTER KOMI TJE", "Lu", 0),
("CYRILLIC SMALL LETTER KOMI TJE", "Ll", 0),
("CYRILLIC CAPITAL LETTER REVERSED ZE", "Lu", 0),
("CYRILLIC SMALL LETTER REVERSED ZE", "Ll", 0),
("CYRILLIC CAPITAL LETTER EL WITH HOOK", "Lu", 0),
("CYRILLIC SMALL LETTER EL WITH HOOK", "Ll", 0),
("CYRILLIC CAPITAL LETTER LHA", "Lu", 0),
("CYRILLIC SMALL LETTER LHA", "Ll", 0),
("CYRILLIC CAPITAL LETTER RHA", "Lu", 0),
("CYRILLIC SMALL LETTER RHA", "Ll", 0),
("CYRILLIC CAPITAL LETTER YAE", "Lu", 0),
("CYRILLIC SMALL LETTER YAE", "Ll", 0),
("CYRILLIC CAPITAL LETTER QA", "Lu", 0),
("CYRILLIC SMALL LETTER QA", "Ll", 0),
("CYRILLIC CAPITAL LETTER WE", "Lu", 0),
("CYRILLIC SMALL LETTER WE", "Ll", 0),
("CYRILLIC CAPITAL LETTER ALEUT KA", "Lu", 0),
("CYRILLIC SMALL LETTER ALEUT KA", "Ll", 0),
("CYRILLIC CAPITAL LETTER EL WITH MIDDLE HOOK", "Lu", 0),
("CYRILLIC SMALL LETTER EL WITH MIDDLE HOOK", "Ll", 0),
("CYRILLIC CAPITAL LETTER EN WITH MIDDLE HOOK", "Lu", 0),
("CYRILLIC SMALL LETTER EN WITH MIDDLE HOOK", "Ll", 0),
("CYRILLIC CAPITAL LETTER PE WITH DESCENDER", "Lu", 0),
("CYRILLIC SMALL LETTER PE WITH DESCENDER", "Ll", 0),
("CYRILLIC CAPITAL LETTER SHHA WITH DESCENDER", "Lu", 0),
("CYRILLIC SMALL LETTER SHHA WITH DESCENDER", "Ll", 0),
("CYRILLIC CAPITAL LETTER EN WITH LEFT HOOK", "Lu", 0),
("CYRILLIC SMALL LETTER EN WITH LEFT HOOK", "Ll", 0),
("CYRILLIC CAPITAL LETTER DZZHE", "Lu", 0),
("CYRILLIC SMALL LETTER DZZHE", "Ll", 0),
("CYRILLIC CAPITAL LETTER DCHE", "Lu", 0),
("CYRILLIC SMALL LETTER DCHE", "Ll", 0),
("CYRILLIC CAPITAL LETTER EL WITH DESCENDER", "Lu", 0),
("CYRILLIC SMALL LETTER EL WITH DESCENDER", "Ll", 0),
)
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/lovelace/0.1.0/examples/autobahn.typ | typst | Apache License 2.0 | #import "../lib.typ": *
#set page(width: 40em, height: auto, margin: 1em)
#show: setup-lovelace.with(line-number-supplement: "Zeile")
#let pseudocode = pseudocode.with(indentation-guide-stroke: .5pt)
#let algorithm = algorithm.with(supplement: "Algorithmus")
#algorithm(
caption: [Spurwechsel nach links auf der Autobahn],
pseudocode(
<line:blinken>,
[Links blinken],
[In den linken Außenspiegel schauen],
[*wenn* niemand nähert sich auf der linken Spur, *dann*], ind,
[Spur wechseln], ded,
[Blinker aus],
)
)
Der Schritt in @line:blinken stellt offenbar für viele Verkehrsteilnehmer eine
Herausforderung dar.
|
https://github.com/Jollywatt/typst-fletcher | https://raw.githubusercontent.com/Jollywatt/typst-fletcher/master/tests/edge-shift/test.typ | typst | MIT License | #set page(width: 5cm, height: auto, margin: 1em)
#import "/src/exports.typ" as fletcher: diagram, node, edge
#(3.4pt, 0.1).map(shift => [
Edge shift by #type(shift):
#diagram(
node((0,0), $A$),
edge((0,0), (1,0), "->", shift: +shift),
edge((0,0), (1,0), "<-", shift: -shift),
node((1,0), $B$),
)
#diagram(
node((0,0), $A$),
edge((0,0), (1,0), "->", shift: +shift, bend: 40deg),
edge((0,0), (1,0), "<-", shift: -shift, bend: 40deg),
node((1,0), $B$),
)
#diagram(
node-stroke: 1pt,
node((0,0), $A$),
edge((0,0), (1,0), (1,1), "->", shift: +shift),
edge((0,0), (1,0), (1,1), "->", shift: -shift),
edge((0,0), (1,1), "->", corner: left, shift: +shift),
edge((0,0), (1,1), "->", corner: left, shift: -shift),
node((1,1), $A B C$),
)
]).join(pagebreak())
|
https://github.com/katamyra/Notes | https://raw.githubusercontent.com/katamyra/Notes/main/Compiled%20School%20Notes/CS3001/Sections/Section3.typ | typst | #import "../../../template.typ": *
#set page(
header: align(right)[
<NAME>
]
)
#align(center)[
= Section 3 Guide
]
#set text(
font: "New Computer Modern",
size: 11pt
)
#set heading(
numbering: "1."
)
= Review - Utilitarianism and Deontology
#note[
Act Utilitarianism focuses on performing utilitarian calculus every time we have to make a decision to try to have the biggest net positive through our decisions.
On the other hand, in Rule Utilitarianism we try to define rules that make it so that we can bypass having to perform utilitarian calculus every time.
]
#note[
*Formulation 1:*
Act only in ways where you wouldn't want your actions to become universal law for all. Can everyone, in principle do this without any issue?
*Formation 2:*
Act in a way such that you treat humanity not just as a means of an end but as individual people that you respect. This emphasizes dignity and respecting others for who they are.
]
= Social Contract Theory
#definition[
The *social contract theory* is a theory that addresses the origin and legitimacy of political authority and governments. It is an agreement between individuals and their government that they give up some of their rights for the protection of their other rights, because humans are believed to have entered into with one another to form a society and create governing structures.
]
The creation of rules in the social contract theory is generally followed by the process where rational individuals would collectively accept the rule as binding because of its benefit to the community.
*Pros:*
- It is framed in the language of rights
- It is based on a solid understanding of human nature, recognizing that people often act out of self interest in the absence of common agreement
- It explains why under certain circumstances civil disobedience is the morally correct decision
*Cons:*
- No one signed the social contract
- Unsure what to do when conflicting rights
- Unjust to those who are struggling to maintain their part of the contract
I think that those other questions are important to ask but I think that people focusing on making their own actions more moral is much more effective in improving society.
= My Quote
#quote[
Just as Jake is confident the judge would agree that stealing is the right thing for Heinz to do, so Amy is confident that, "if Heinz and the druggist had talked it out long enough, they could reach something besides stealing ." As he considers the law to "have mistakes," so she sees this drama as a mistake, believing that "the world should just share things more and then people wouldn't have to steal ."
]
*Both children recognize the need for agreement but see it as mediate d
in different ways — he impersonally through systems of logic and law, she personally through communication in relationship *
The conclusion underscores the common ground between the two children — the acknowledgment of the necessity for agreement. However, their divergence lies in the methods of mediation. Jake favors an impersonal approach, relying on *systems of logic and law being utilitarian*, whereas Amy advocates for a more personal route, emphasizing the *importance of communication and relationship-building* to address societal issues. Overall, the passage illustrates how individuals may perceive and approach problem-solving through distinct lenses, influenced by their views on established systems and personal connections. *Even when we feel like laws are generally correct there are a lot of gray areas where some people think laws are at fault and others think our application of laws are at fault*. KANTIANISM SHE IS KANTIANISM
= Ethical Dilemma
You are clinging to a tree with your two children in a tsunami, and you can't hold on any longer to both children. You need to choose a child to let go of, how do you choose? Is it ethical to let go of the older child because they have a better chance of living?
Based on an actual story |
|
https://github.com/jgm/typst-hs | https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/ops-invalid-05.typ | typst | Other | // Error: 3-9 cannot apply 'not' to array
#(not ())
|
https://github.com/justmejulian/typst-documentation-template | https://raw.githubusercontent.com/justmejulian/typst-documentation-template/main/sections/caseStudy.typ | typst | = Case Study / Evaluation
#rect(
width: 100%,
radius: 10%,
stroke: 0.5pt,
fill: yellow,
)[
Note: If you did an evaluation / case study, describe it here.
]
== Design
#rect(
width: 100%,
radius: 10%,
stroke: 0.5pt,
fill: yellow,
)[
Note: Describe the design / methodology of the evaluation and why you did it like that. E.g. what kind of evaluation have you done (e.g. questionnaire, personal interviews, simulation, quantitative analysis of metrics, what kind of participants, what kind of questions, what was the procedure?
]
== Objectives
#rect(
width: 100%,
radius: 10%,
stroke: 0.5pt,
fill: yellow,
)[
Note: Derive concrete objectives / hypotheses for this evaluation from the general ones in the introduction.
]
== Results
#rect(
width: 100%,
radius: 10%,
stroke: 0.5pt,
fill: yellow,
)[
Note: Summarize the most interesting results of your evaluation (without interpretation). Additional results can be put into the appendix.
]
== Findings
#rect(
width: 100%,
radius: 10%,
stroke: 0.5pt,
fill: yellow,
)[
Note: Interpret the results and conclude interesting findings
]
== Discussion
#rect(
width: 100%,
radius: 10%,
stroke: 0.5pt,
fill: yellow,
)[
Note: Discuss the findings in more detail and also review possible disadvantages that you found
]
== Limitations
#rect(
width: 100%,
radius: 10%,
stroke: 0.5pt,
fill: yellow,
)[
Note: Describe limitations and threats to validity of your evaluation, e.g. reliability, generalizability, selection bias, researcher bias
]
|
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/unichar/0.1.0/ucd/block-1950.typ | typst | Apache License 2.0 | #let data = (
("TAI LE LETTER KA", "Lo", 0),
("TAI LE LETTER XA", "Lo", 0),
("TAI LE LETTER NGA", "Lo", 0),
("TAI LE LETTER TSA", "Lo", 0),
("TAI LE LETTER SA", "Lo", 0),
("TAI LE LETTER YA", "Lo", 0),
("TAI LE LETTER TA", "Lo", 0),
("TAI LE LETTER THA", "Lo", 0),
("TAI LE LETTER LA", "Lo", 0),
("TAI LE LETTER PA", "Lo", 0),
("TAI LE LETTER PHA", "Lo", 0),
("TAI LE LETTER MA", "Lo", 0),
("TAI LE LETTER FA", "Lo", 0),
("TAI LE LETTER VA", "Lo", 0),
("TAI LE LETTER HA", "Lo", 0),
("TAI LE LETTER QA", "Lo", 0),
("TAI LE LETTER KHA", "Lo", 0),
("TAI LE LETTER TSHA", "Lo", 0),
("TAI LE LETTER NA", "Lo", 0),
("TAI LE LETTER A", "Lo", 0),
("TAI LE LETTER I", "Lo", 0),
("TAI LE LETTER EE", "Lo", 0),
("TAI LE LETTER EH", "Lo", 0),
("TAI LE LETTER U", "Lo", 0),
("TAI LE LETTER OO", "Lo", 0),
("TAI LE LETTER O", "Lo", 0),
("TAI LE LETTER UE", "Lo", 0),
("TAI LE LETTER E", "Lo", 0),
("TAI LE LETTER AUE", "Lo", 0),
("TAI LE LETTER AI", "Lo", 0),
(),
(),
("TAI LE LETTER TONE-2", "Lo", 0),
("TAI LE LETTER TONE-3", "Lo", 0),
("TAI LE LETTER TONE-4", "Lo", 0),
("TAI LE LETTER TONE-5", "Lo", 0),
("TAI LE LETTER TONE-6", "Lo", 0),
)
|
https://github.com/fenjalien/metro | https://raw.githubusercontent.com/fenjalien/metro/main/tests/unit/power-half-as-sqrt/test.typ | typst | Apache License 2.0 | #import "/src/lib.typ": unit, metro-setup
#set page(width: auto, height: auto)
#unit("Hz tothe(0.5)")
#unit("Hz tothe(0.5)", power-half-as-sqrt: true)
#unit("Hz tothe(2)", power-half-as-sqrt: true) |
https://github.com/TGM-HIT/typst-thesis-workshop | https://raw.githubusercontent.com/TGM-HIT/typst-thesis-workshop/main/slides/theme.typ | typst | #import "@preview/touying:0.5.2": *
#import themes.metropolis: *
#import "@preview/codly:1.0.0": *
#let config = (
lang: sys.inputs.at("lang", default: "de"),
handout: {
let bools = ("true": true, "false": false)
let handout = sys.inputs.at("handout", default: "false")
assert(handout in bools, message: "--input handout=... must be set to true or false if present")
bools.at(handout)
},
)
#let metropolis-theme = metropolis-theme.with(
aspect-ratio: "16-9",
// footer: self => [
// #self.info.author, #self.info.date
// ],
// footer-right: self => {
// {
// set text(size: 0.8em)
// [made with #link("https://typst.app/")[Typst]
// and #link("https://touying-typ.github.io/")[Touying]]
// h(1em)
// }
// [#context utils.slide-counter.display() / #utils.last-slide-number]
// },
footer-right: self => {
{
set text(size: 0.8em)
[#self.info.author, #self.info.date]
h(1em)
}
[#context utils.slide-counter.display() / #utils.last-slide-number]
},
config-common(
handout: config.handout,
),
// config-colors(
// primary: rgb("#cc8833"),
// primary-light: rgb("#d6c6b7"),
// secondary: rgb("#bbbbbb"),
// neutral-lightest: rgb("#191919"),
// neutral-dark: rgb("#bbbbbb"),
// neutral-darkest: rgb("#dddddd"),
// ),
config-methods(
init: (self: none, body) => {
set text(self.colors.neutral-dark, font: "Liberation Sans", lang: config.lang)
// set raw(theme: "assets/Visual Studio.tmTheme")
show raw: set text(0.9em)
show: codly-init.with()
show quote.where(block: true): set text(0.9em)
show quote.where(block: true): set block(spacing: 1em)
show quote.where(block: true): it => {
show: block.with(
inset: (left: 0.6em, y: 0.4em),
stroke: (left: 3pt + gray),
)
[
#it.body
#set align(right)
#set text(0.8em)
-- #it.attribution
]
}
// show quote.attribution: set text(0.8em)
body
},
),
)
#let title-slide = title-slide.with(
extra: {
set text(size: 0.8em)
[made with #link("https://typst.app/")[Typst]
and #link("https://touying-typ.github.io/")[Touying]]
h(1em)
},
)
|
|
https://github.com/polarkac/MTG-Stories | https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/045%20-%20Kamigawa%3A%20Neon%20Dynasty/009_The%20Foes%20Who%20Make%20Us.typ | typst | #import "@local/mtgstory:0.2.0": conf
#show: doc => conf(
"The Foes Who Make Us",
set_name: "Kamigawa: Neon Dynasty",
story_date: datetime(day: 09, month: 02, year: 2022),
author: "<NAME>",
doc
)
EIGANJO. TEMPLE GARDENS
Ten minutes before the attack
"Heiko, #emph[wait] ." Norika puts every ounce of authority she can muster into her ten-year-old voice and, as always, it has zero effect on her cousin.
Ahead, Heiko pushes through the vines on the arbor to see the teahouse down the path. For the past week, pilgrims visiting Eiganjo to petition the emperor have whispered about an apparition in the tea garden. A shimmer in the air that devours birds midflight. It is the beginning of a merge, the pilgrims predict. An opening to the spirit realm.
Heiko, only eight, is not allowed to leave the family compound, and neither of them are permitted to venture all the way to the garden district, but Heiko has a way of talking Norika into adventures. And though Norika understands well her responsibilities as the elder cousin, the eldest daughter of the Yamazaki household, the more Heiko pesters her, the more she has to admit—she wants to see the merge, too.
So now she finds herself, as usual, trying to stop Heiko from going too far.
Heiko reaches a hand down and taps the backs of two fingers against the side of her leg. Their secret signal for#emph[ You need to chill out] . They both have a lot of practice giving, and ignoring, this signal.
Norika does not chill out. "Heiko get back here right"—but she breaks off because there #emph[is] something strange by the teahouse. A glimmer in the air, like the play of light on water. And—is it getting bigger?
A branch cracks, and Norika realizes that Heiko has crept through the arbor. The slash of light stretches broader than her arm span and glows white-hot. Staring up, Heiko gives another signal. The toe of her right foot casually taps the heel of her left#emph[. Just go with it.]
She takes a step toward the merge, one hand outstretched. The slash quivers and widens. Something is shouldering its way through the gap. A figure twice the size of a human, wrapped in ghostly robes. A sword protrudes from its stomach, the hilt buried in its back. It has no face, only a swirl of dark vapor that reaches out toward the girls. Heiko stares, transfixed.
#figure(image("009_The Foes Who Make Us/01.jpg", width: 100%), caption: [Unforgiving One | Art by: <NAME>], supplement: none, numbering: none)
The vapor condenses, spirals, becomes a vortex of shadow. The kami swoops forward, driving the vortex toward Heiko.
Norika sees the kami dive and, without thinking, flings herself forward to knock her cousin out of the path of the tunnel of darkness. She leaps, but never lands. Instead, she is suspended within a beam of pain—no thought, no muscles, no skin, no bone—and then it sweeps past. The pain is yanked from all the spaces between her cells. From now on, she will be filled with gaps, a hundred thousand papercuts that never heal.
Without seeing, she feels herself hit the ground.
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
SOKENZANSHI. CITY CENTER
Ten years after the attack
Wind whistles down the alley, and Heiko swears aloud as the cold penetrates her jacket. She thought she was prepared for winter, but she's realizing now that winter means one thing in the gardens of Eiganjo, and something entirely different in mountainous Sokenzanshi.
Part of her relishes the chill. Serves her right for coming alone to this city where she knows no one and nothing. Serves her right for fleeing her family the moment her eighteenth birthday arrived. But she couldn't have stayed in Eiganjo a minute longer than she had to. Couldn't bear the way her parents spoke her name like a mark of shame. Couldn't bear her relatives blaming her for Norika's injuries.
But worst of all, for a long time, Heiko couldn't bear to meet Norika's eyes. Every time she looked at her cousin, she saw Norika's still body lying in the hospital bed, as she had for nine months after the kami attack. Even though Norika was growing stronger every day, learning to use the neuroprosthesis given to her by the Imperial medics, studying for the Imperial samurai entry exam, doing all the things her family had feared she had lost. By the time Heiko found the courage to seek out her cousin, Norika had grown distant, preoccupied with training and new cadet friends. And Heiko, so used to being in front, found herself left behind.
So now she walks the icy streets of a strange city, without even sufficient wind-repellent tech built into her clothes. And she can only half-relish the cold, because also, she's #emph[ravenous] .
#figure(image("009_The Foes Who Make Us/02.jpg", width: 100%), caption: [Sokenzan, Crucible of Defiance | Art by: <NAME>], supplement: none, numbering: none)
Around a corner, she finds the street crowded with people, bundled in what looks like all their clothes at once. They form a winding, ragged line up to a huge cooking fire set in the middle of the street. Two people ladle rice soup from a cauldron as high as their waists.
The people remind Heiko of the lines of pilgrims seeking an audience with the emperor, back before she vanished. But those pilgrims always looked grim and exhausted, while the mood here is warm despite the chill. People smile to each other in line, and children dart between the legs of the adults.
Heiko skirts the crowd, looking for somewhere to buy a meal. She is almost past the servers at the cauldron when one of them calls to her.
"If you want food, you have to wait in line!"
She is flustered. "What?"
The server shakes their head. "Nothing I hate more than a line cutter."
"Oh no—I don't need—that," she trails off.
The server looks her up and down. "You sure look like you need it."
"I mean—other people need it more. I have money. I'm just here looking for the Uprisers."
The server raises an eyebrow. "Looking for the Uprisers, but too good to eat our cooking?"
Another gust of wind snatches away Heiko's remaining composure. "Oh, I didn't mean—you?"
The server gives a small bow. "You expected all of us to be fighters?"
They have a round face and shrewd eyes. Above their head, the shades of three long-handled teapots orbit lazily in the air. Heiko is trying to formulate a response when three Imperial mechs appear at the other end of the block.
#emph["This is an unauthorized assembly,"] their amplified voices blare.#emph[ "This street must be accessible to vehicles."]
They advance, forcing people out of the soup line and up onto the sidewalks. The server waves their ladle and calls to the crowd, "Stay calm! We'll relocate! Nobody needs to"—
But they are cut off by the mechs, who tower over the soup cauldron, their armor clacking in the wind. The lead mech spits out a glowing ticket from a slot on its chest. #emph["Unauthorized food distribution. To contest this, please report to the Imperial depot at"—]
SPLAT. A snowball hits the mech on the back of its helmet. Slush runs off white scaled armor. The mech spins around, and Heiko realizes that behind it, many of the people in line are no longer bundled in winter rags. Under their lumpy clothes, they've revealed enameled exosuits, customized and handmade, with glowing cracks running through ceramic plates.
The mech steps toward them, spewing tickets. #emph["Unauthorized possession of technological augments. To contest, please report"—]
SPLAT. Another snowball hits it, this time from the opposite direction.
Heiko presses her palm to her face, trying to ease the sting from the ice ball she just threw.
"Um, I would run." The server's voice is quiet; their teapots spin very close to their head. "#emph[Now] ." Heiko runs.
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
She spends what feels like hours ducking between buildings, darting between the warm walls of armory forges until she no longer hears the clatter of the mechs tailing her. Only when she is sure she has lost them does she drag her wet, shivery, ravenous form through the doorway of the first cafe she finds.
The inside is dark with steam and breath. Pipes in the walls hold water heated by the forge, bearing warmth through the rooms. People talk over low tables with light displays rising between them. Glowing menus and news bulletins slide across the walls.
Blinking to adjust her eyes, Heiko moves to the back counter. A server is wrapping kelp rolls and dropping them into a pot. As they work, three long-handled teapots hover above the pot and pour in boiling water one at a time. Heiko gasps, "You again?"
The server's shrewd eyes crinkle. "You must be good at running."
"I've had practice."
The server's face breaks into a smile. "Earlier, I said you looked like you needed a meal, but now you #emph[really] look like you need it."
Heiko doesn't protest. Perched on a counter stool, she inhales a bowl of steaming broth and kelp rolls, gold coins of fat shimmering on the surface. As she eats, she tries to explain herself. "Thank you. I'm sorry. I came without a plan. I just know—I'm supposed to find Risona."
The server laughs. "Everybody wants to find Risona. What makes you special?"
"Nothing, I just"—
"You don't think you're special?"
"No, but"—
"I'm teasing. You're doing great. I'm Chiye." They salute with their ladle. "And if you turn around"—they gesture behind her—"that's Risona."
In the darkest corner of the shop, a figure sits wrapped in a voluminous cloak. When Heiko turns, the figure rises and pushes back her hood. Risona is tall and grave, with red cords wound through her hair and lines around her eyes. When she speaks, her voice is warm but stern. "Throwing that snowball was a stupid thing to do."
With food in her, Heiko has the strength to be indignant. "People always say that like it's going to stop me."
Risona laughs, grimly. "You must be a soft little rich kid, if people only try to stop you with words."
Heiko wants to say something cutting in reply, but a voice in her head whispers, #emph[Wait] . She swallows her retort and decides to try honesty. "My family serves the Imperial Council. But I'm not one of them anymore. I can't be. I need a fresh start." She looks into Risona's eyes. "I'm hoping to find it with people who believe in justice."
She thinks—hopes—she sees Risona's grim expression tempered by a flicker of generosity.
"You have a good arm. You're quick on your feet. No impulse control, but"—
"I have #emph[some] impulse control!"
Risona sighs. "#emph[But] , Chiye here says they'll sponsor you."
Chiye chimes in, "I have a feeling."
Risona leans in close to Heiko. "This isn't an easy life. Every season, the Imperials insist it will get better. But every season, it falls to us to keep each other alive. Is that really what you want?"
Heiko feels a warmth deep in her belly. Maybe it's from the steaming soup. Maybe it's the satisfaction of watching the snowball hit the mech. Maybe it's the heat of Chiye's smile, which she can feel even though her back is turned. She meets Risona's eyes. "Yes."
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
EIGANJO. IMPERIAL COURT
Twelve years after the attack
Norika sits in the medic's chair, breathing deeply as small needles dart over her body and sting her skin. Her arms, legs, and back are covered with intricate turquoise designs. From far away they look like tattoos, but up close they have more dimensionality: augments laid into her skin.
After the unknown kami swept through her, she was paralyzed for weeks, wracked with pain. No one knew how or when she would recover. Then the Imperials offered newly approved neuroprosthesis to her family, and over nine months, the prosthetics grew into her like lichen. When the drugs finally wore off and she sat up, she felt euphoric. The pain was no longer a monster that shook her in its teeth but a companion to be tended.
Over the years, she lets the pain teach her about patience, and rest. She learns to value stillness as much as action. She trains when she can, and when she cannot, she reads history, poetry, botany. She learns to care for herself and tries not to think too much about the younger cousin she once tended like her own shadow. When her flare-ups grow too frequent, she returns to the medics to update her augments.
At first, her teachers at the academy don't know what to make of this student who quotes the epics like an old scholar and is absent for weeks on end. She wins them over with her willingness to listen, and to see problems from new angles. After graduation, she is accepted as an aide to the Imperial advisor Naomi. Then the emperor vanishes. As Naomi's influence grows, so do Norika's responsibilities. She becomes chief handler of the merge in Eiganjo, the same one that once disgorged the kami that attacked her. She has held this position for the past four years.
Today, as the medical machines work on her updates, Norika contemplates her most recent conversation with Naomi. A week ago, her mentor summoned her to the strategy room, where a topographical map of the realm was overlaid with holograms of moving trains, flying mechs, flickering merge gates. When Norika entered, Naomi directed her attention to the south of the map. "The bandit problem in Sokenzanshi has gotten out of hand. I need you to take command there."
#figure(image("009_The Foes Who Make Us/03.jpg", width: 100%), caption: [Naomi, Pillar of Order | Art by: <NAME>], supplement: none, numbering: none)
"Sokenzanshi?" Something tightened in Norika's stomach.
"I know, it's far. And cold. And it won't be glamorous work." Naomi paused. "But I think a change is important. It can't be easy, serving at this gate, given your history."
Norika started to protest but broke off. Her mentor was gifted at speaking the truths others tried to avoid. Norika #emph[was] weary of Eiganjo. She was ready for a change.
In the present, the machine beeps to notify her that her updates are complete. Norika sits up. Her body feels invigorated but the tightness in her gut persists. It's not that the job will be cold and unglamorous; Norika welcomes a challenge. But she senses something awaits her in Sokenzanshi, a reckoning she can't yet name.
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
SOKENZANSHI. WAREHOUSE DISTRICT
Twelve years, six months after the attack
It's after midnight, and Heiko and Chiye are robbing a train. Someday, Heiko imagines, they might spend a different kind of evening together. People-watching over dinner in a restaurant, or dressing up to see a play. But when Chiye heard rumors of Imperial train cars sitting locked and full of provisions in the railyard on the outskirts of the city, they could think of nothing else. And so this night, like so many of their nights, is spent in light armor and dark cloaks, scouting around corners, keeping to the shadows as they run.
The moon is hidden, and the railyard is silent. The cargo car is secured with only a single rusty padlock. Chiye's teapots shoot concentrated jets of steam at the lock's hasp, and the mechanism shatters.
Chiye frowns. "That was too easy."
"Or, was it the perfect application of your unique abilities?"
Chiye rolls their eyes. "It was too easy."
They heave open the sliding door. The train car is packed with enough provisions for hundreds of people. There is barely room to walk between pallets of rice, bushels of flour and barley, crates marked with preserved fish and vegetables
Heiko shakes with disgust. "How dare they hoard this. When people are foraging in trash heaps for their children?"
Chiye touches her lightly on the arm. "Speeches later. Let's move fast. I heard a new administrator just arrived, really cracking down." They hoist a crate through the car door and onto the hover cart they have brought with them.
Heiko jumps up beside them and lifts a pallet. "The administrator should thank us, really. All this food just #emph[happened] to get forgotten, and we #emph[happened] to be in the right place to return it to its rightful owners: the hungry people of Sokenzanshi."
A voice behind them sneers, "Such dedicated citizens of the realm." There is another light touch on Heiko's arm, but this time it's not Chiye. It's a pair of automatic restraints, cinching her wrists behind her back before she can reach for her sword. Chiye's wrists are bound also. Two Imperial enforcers pull them out of the car and throw them to the ground, leaning down to hiss, "The commander wishes to deliver her thanks in person."
Cold earth grinds into Heiko's cheek. The teapots around Chiye's head whistle a constant, earsplitting shriek. Through the noise, Heiko hears light, precise footsteps and the clicking of augmented armor. The enforcers leap into a salute. "Commander!"
Heiko feels the new presence inspecting both of them. A nuanced voice says, "I didn't think the dreaded bandits of Sokenzanshi would be so~unsubtle."
Chiye spits, "I'm not a bandit, I'm a cook."
The commander responds wryly. "I hear in this city, one can be both."
Something in her tone causes a twinge in the back of Heiko's mind.
The enforcers grab Heiko's and Chiye's shoulders and wrench them up. The commander towers over them in white armor edged with gold. Her helmet resembles panels of origami, folded intricately over her face and sweeping back into twin fans.
At the sight of their disheveled faces, the commander steps back. Her armored hand flies to the hilt of her sword, releases it, hovers over the pommel. When she speaks, her voice has turned brittle. #emph["What are you doing here?"]
Heiko is ashamed that she allowed Chiye to be captured, but the question summons back her outrage. "What are we doing here? What are #emph[you] doing here, using food for a thousand people as a #emph[trap] ? What are you doing here, in this city where nobody wants you?" She lunges sideways, hits the ground again and struggles back upright, still yelling, "I hope you all starve to death, just so you can know"—
And then she breaks off, because the twinge in her brain is back. It pulls her attention toward the commander's left hand. Almost imperceptibly, the commander is tapping the backs of two fingers against her armored leg.
#emph[You need to chill out.]
The thought Heiko cannot think blooms then into a waterfall of questions tumbling one after another. It can't be Norika. It can't be. The Uprisers have instilled enough restraint in her that her face shows nothing, but her breath shifts enough that Chiye looks sharply at her.
Heiko composes herself as best she can with her arms tied behind her back. "On second thought, I have decided to become a model prisoner." She flashes a smile at the enforcers. "Thank you for your service to the realm."
The enforcers scoff, but the commander cuts in. "I need to interrogate these prisoners alone. Leave us." The enforcers hesitate just long enough for the commander to bark, "Now!" and then scurry away across the train tracks.
When they are gone, the commander bows her head, and removes her helmet. Even in the darkness, Norika's eyes are just as Heiko remembers them, though now they look out of an adult's face. Norika meets her gaze, and for a second, Heiko is hurled back to the image of her cousin motionless in the hospital bed. Again, her breath catches in her throat. Next to her, she can feel Chiye working to assemble information. In answer to the silent question, Heiko says, "This is my cousin, <NAME>. We were children together." She hopes Chiye can hear all the unspoken things that go into those last four words.
Chiye is silent for a long moment. Then: "So, I take it pigheadedness runs in the family."
Norika laughs. She draws a glyph in the air, and the restraints spring open.
Heiko stands and helps Chiye to their feet. She looks everywhere but her cousin's face—at her armor, her boots, her gleaming, unblooded sword. "Looks like you got everything you wanted."
"Not quite." Norika's voice sounds almost joyful. "I would have picked a better location for a reunion."
Sensation rushes back to Heiko's hands and, with it, her anger returns. How dare Norika laugh? "I wasn't joking, you know. Asking why you're here, when people are starving"—
"I know." Norika cuts her off. "You're right. I don't want anyone else to die. I'm here to fix this. And when I saw you, I realized—Heiko, this is an opportunity."
Norika's smooth transition to diplomacy fills Heiko with more rage. "I'm not a #emph[collaborator] ." The word slices through the quiet night.
Norika's expression tightens. "We grew up in an Imperial household."
"Yes, and I left."
"And now you're acting like a child."
"Actually, she's acting like an Upriser," Chiye interjects. "Maybe you've heard? The Uprisers take everyone. No matter how~#emph[unsavory] their origins."
Norika smirks. "Oh, I've heard plenty about the Uprisers."
Heiko feels nauseated. This is why she left. Nothing could ever be easy between them. Every word spoken only makes things unravel faster. She rounds on Norika. "Are we your prisoners?"
Norika steps away. "I would never do that to you."
"What a gift." Heiko spits. "Let's go."
"Heiko, don't walk away from me." Norika sounds desperate. "Not this time."
"You don't get to command me."
"I'm asking you. Please. #emph[Heiko, wait] ."
This sentence makes Heiko freeze. It hurts. And it hurts to admit it hurts. The way those words echo back across years. Heiko takes a deep breath and turns to Chiye.
"You should go. Find Risona. Tell her it's a trap. Tell her I'm okay."
Chiye grabs her shoulders. "No way I'm leaving you alone with an Imperial commander. I don't care whose cousin she is."
Heiko cups Chiye's face. "And that's why I love you, but please, trust me."
"That's why you what?"
"I mean—you need to leave now."
"Say it again."
"I love you, now #emph[get out of here] ."
Chiye saunters up to an inch from Norika's face. "Did you hear? That person over there just said she loves me. Now, I haven't told her yet, but I love her, too. And if you harm a hair on her head, you see these teapots? Their steam can melt the flesh off a human face in one and a half seconds. You understand?"
On the far side of the tracks, they blow a kiss to Heiko, then vanish into the shadows.
Norika looks back to Heiko. "You're really one of them now."
Heiko can't help the bitterness in her voice. "My real family didn't want me."
Norika's mouth curls. "Most people rebel against their families, but they don't rebel against the entire realm."
"You didn't care how I felt then, and you don't have to care now."
Norika closes the distance between them and grabs Heiko's shirt. "Don't you dare say I don't care about you. Don't you dare."
Heiko's heart pounds. She sees tears on the rims of Norika's eyes. She mumbles, "I didn't say—I didn't mean that."
Norika lets go. She wipes her eyes as quickly as she can. "You still know how to get to me."
Heiko's eyes narrow. "You knew I was here, didn't you? That's why you accepted this position. You thought I could help you."
Now Norika drops her eyes. "I didn't know for certain. I had a feeling. And my feelings are usually correct."
Heiko steps back. "I hear stories about you. Your ambition. The poet-warrior of the realm. Am I just another stepping stone in your rise to power?"
#figure(image("009_The Foes Who Make Us/04.jpg", width: 100%), caption: [<NAME>, the Poet | Art by: <NAME>], supplement: none, numbering: none)
"What about you? You would starve your chosen family out of pride? To spite me?"
"You're just like our family. You think I'm supposed to spend my whole life in debt to you because of the accident, no way to ever escape it."
Their faces are inches apart. Norika speaks icily. "If that's true, it's just like I have to spend my entire life remembering that moment, in every nerve of my body. I don't get to escape either."
At her words, Heiko feels a stab of pain she can't bear, so she transmutes it back to anger. "Get off me." She pushes past Norika, but her cousin blocks her again, this time with her sword barring Heiko's chest. The glint of steel makes Heiko snarl and draw her sword. Norika spins and leaps in her way a third time, and Heiko attacks in earnest, flame rippling along her blade. Her anger makes her wild, and she leaps for Norika's throat.
But Norika, with perfect form, feints, disengages, and knocks Heiko away with one blow. Heiko lands hard, her cheekbone hitting the train track. She tastes iron. She feels blood run down her face.
Norika springs on her, pressing her sword to Heiko's throat. "You idiot." She is breathing hard. "I didn't come here to make you pay. I came here to forgive you."
The pressure of the blade disappears, and the sword clatters on the tracks. Norika stands, and Heiko scrambles up into a crouch. She holds her expression rigid as a mask. If she speaks, she will cry.
Norika backs up, hands spread at her sides. "Cousin. Let the guilt go."
"I'm not"—Heiko chokes—"I'm not who I was."
Norika shakes her head. "Neither of us are. But give me a chance, Heiko."
Heiko resists. "This isn't a game. You know it as numbers in ledgers, models you push around a map, but there's a whole world here you don't understand."
"Then show me?" Norika stoops, eyes still on Heiko, picks up her sword, sheathes it. "Show me Sokenzanshi?" Her cool, even voice is shaped by years of diplomacy training.
Heiko can't help but imagine the possibility she proposes. A city with enough food for everyone. A winter where no one dies. But then the vision winks out. "I can't walk around here with you." Heiko pauses. It is easier to think about the problem at hand than to comprehend the larger tides shifting inside her. "Keep your sword out. Act like I'm your prisoner."
Norika looks skeptical. "If I were an Upriser, looking out a window, and I saw an Imperial holding my beloved Heiko at sword point, I would shoot a projectile out that window."
"That could happen to you either way."
"Then maybe we both take a risk?"
It's Heiko's turn to look skeptical. "You never take risks."
Norika's wry smile returns. "It might surprise you, cousin, but I have also changed in the past twelve years."
They leave the railyard and walk together to the heart of the city. The streets are quiet. Heiko knows they are being watched. Her heart jumps—Risona could cast her out for this. Chiye could leave. Her second family could reject her as easily as the first.
They pass blocks of tightly packed wooden buildings. Laundry hangs from balconies and snow boots line up in doorways. Heiko points to them. "These apartments house farmers who lost their lands. Families with no homes. The Uprisers keep them heated, manage water and power. Over there is the food hall where we serve breakfast. Up that hill is a school we're running for the children of displaced families." #emph[So they can learn more than Imperial propaganda] , she stops herself from saying.
Heiko realizes with irritation that she keeps checking Norika's reactions, wanting her cousin's approval. Norika listens closely, asking perceptive, logistical questions. "How do you organize harvest donations?" "Who coordinates cooking labor?"
They round a corner, discussing how geothermal wells power the forges, and Norika breaks off with a gasp. Risona holds an axe to her throat. Upriser warriors circle around them.
Risona growls, "The walking tour is over." She shifts Norika's collar with her axe. Heiko glimpses a flash of turquoise under her cousin's breastplate.
Chiye runs to Heiko and pushes back her hair to scrutinize the abrasion on her temple. They turn to stare daggers at Norika. "One and a half seconds, Commander, remember?"
Heiko chooses her words very carefully. "It's okay. Risona, Chiye. The commander and I are coming to an understanding."
"Heiko," Risona reprimands, "Imperials talk in circles. They will trap you with words."
"I know. Listen to me. The commander has agreed to leave us the contents of the Imperial storehouse. They will stop pressing the farmers for Imperial taxes."
Risona looks Norika over, her face full of disdain. "Is this true, administrator?"
Norika is quiet for a long moment. Desperately, Heiko taps her right toe against the heel of her left boot. #emph[Please, just go with it.]
Norika takes a deep breath, hampered by Risona's axe still pressing into her throat. "I will honor these agreements."
Heiko feels a wave of relief, but Risona snorts. "It is not enough. Imperial sluggishness will continue to let people die. Patching cracks on the dam does nothing, we need the dam to break"—
"You do an impressive job providing for a city, but it's not the same as ruling a realm." Norika speaks calmly. "The Council must make decisions to care for everyone, not just the people in front of us."
Heiko finds herself admiring Norika's composure, and hoping her cousin will shut up before Risona slices her open.
Risona counters, "If you let people make decisions themselves, you wouldn't have to rule them."
Norika says, "You are a respected leader. Thousands look up to you. I'm sure you know, it's never that simple."
"It could be that simple." Risona's mouth is grim. "Imperials are the ones who insist on complication."
"Let them leave us alone, then," Heiko interjects. She feels frantic that the argument not escalate, but she tries to keep her voice as level as her cousin's. Looking to Norika, she says, "The Imperials will no longer enforce restrictions on technology here. You will let us deal with thieves in our own way. Sokenzanshi will be a free city."
Norika watches Heiko with deep, unreadable eyes. "It might be easier to consider these conditions without an axe at my throat."
Heiko nods, and to her surprise, Risona sheathes her weapon.
Norika massages her clavicle. "I can take your proposals to Naomi and the Council. My word has no force without their approval."
Chiye breaks in with a laugh. "See? More talking in circles. How do we know you'll argue in earnest?"
Now Norika gives Chiye the full force of her gaze. They regard each other for a long moment. Finally, Norika says, "I know enough about pain. I don't want to see more of it than I have to."
Chiye's teapots halt in the air. They nod. "We're in agreement there. Theoretically."
Risona steps between them. "Even if you keep your word, which I doubt, it will not quell the revolt. The Imperials have caused more suffering than you can hope to make up for."
Heiko steels herself, but Norika bows. "I only hope, then, you'll remember I was honest with you."
"I have yet to see that." Risona gestures to the warriors around her. "We'll be watching you closely. Heiko, escort the administrator out of our territory."
#v(0.35em)
#line(length: 100%, stroke: rgb(90%, 90%, 90%))
#v(0.35em)
They retrace their path out of the snowy city. The warm lights of the forges shine through the windows around them. Heiko watches her cousin as they walk. She can see the wheels turning in Norika's mind. Committees to speak to and proposals to write. Bargains to offer, deals to make. Experts to query, technologies to evaluate, regulations to draft.
"You really believe in it, don't you?" Heiko asks.
Norika considers this. "Not the decisions we make, not always. But I do believe we're on the right path."
"Even if it leads to rebellion?"
Norika watches a snowflake melt on her fingertip. She turns to Heiko, and her eyes brim with grief and love. "Even then."
#figure(image("009_The Foes Who Make Us/05.jpg", width: 100%), caption: [<NAME>, the General | Art by: <NAME>], supplement: none, numbering: none)
They reach the Imperial outpost. Enforcers mill about the entrance along with mechs and drones. Norika signals them to stand down. Her gestures are graceful, decisive. #emph[She looks like a hero in a storybook, ] Heiko thinks. #emph[Soldiers rally to her. Politicians give way. Even her enemies have to respect her.]
As though she has heard this thought, Norika's eyes sharpen. "Do you think we're enemies, cousin?"
Heiko's response is the truest thing she has ever said, though, she did not know it until today. "No." She stops after that single word. There is too much more to say. There is a decade of life bearing them in opposite directions. Their secret language from childhood holds no gestures to name the distance now between them. Heiko chooses her words carefully. "But I don't yet know if we're allies."
Norika nods to this. "I agree."
They clasp hands. They do not embrace. Norika walks through the ranks of enforcers who salute her as she passes. Heiko watches until all the soldiers have followed her inside and the courtyard has fallen quiet. Then she turns and starts back up the slope, toward the firelit city where she knows her family awaits her.
|
|
https://github.com/Skimmeroni/Appunti | https://raw.githubusercontent.com/Skimmeroni/Appunti/main/Metodi%20Algebrici/Strutture/Permutazioni.typ | typst | Creative Commons Zero v1.0 Universal | #import "../Metodi_defs.typ": *
Sia $X$ un insieme. Una funzione biettiva $sigma: X |-> X$ si dice
*permutazione* su $X$. L'insieme di tutte le permutazioni che é
possibile costruire per $X$ viene indicato con $S_(X)$.
#lemma[
Sia $X$ un insieme e sia $S_(X)$ l'insieme di tutte le permutazioni
costruibili per $X$. Se $X$ é un insieme finito di cardinalitá $n$,
allora $|S_(X)| = n!$.
]
#proof[
Se $sigma$ é una permutazione su $X$ e $X = {x_(1), ..., x_(n)}$,
allora esistono $n$ possibili scelte per l'immagine $sigma(x_(1))$.
Scelto poi un secondo elemento $x_(2) != x_(1)$, questo avrá $n - 1$
scelte per $sigma(x_(2))$, perché $sigma$ é per definizione iniettiva
(essendo biettiva, quindi iniettiva e suriettiva) ed una delle scelte
é giá occupata da $x_(1)$. Ripetendo questo ragionamento per tutti gli
elementi di $X$, si ha che il numero di permutazioni su $X$ é esattamente
$n dot (n - 1) dot ... dot 1 = n!$.
]
#theorem[
Sia $X$ un insieme e sia $S_(X)$ l'insieme di tutte le permutazioni
costruibili per $X$. La struttura algebrica $(S_(X), compose)$, dove
$compose$ é l'operazione di composizione di funzioni, costituisce un
gruppo.
]
#proof[
La struttura algebrica $(S_(X), compose)$ forma un semigruppo perché,
per il @Composition-is-associative, l'operazione di composizione gode
della proprietá associativa. É inoltre un monoide, perché l'operazione
di composizione ha nella funzione identitá l'elemento neutro, come da
@Composition-identity-is-neutral. É infine un gruppo perché, essendo
biettiva per definizione, per ogni permutazione ne esiste una inversa,
e la funzione inversa é l'inverso rispetto alla composizione, come da
@Composition-inverse-is-inverse.
]
Per un insieme $X$, il gruppo $(S_(X), compose)$ viene chiamato
*gruppo simmetrico* o *gruppo delle permutazioni*.
Essendo $compose$ l'operazione piú "interessante" da applicare
alle permutazioni, si usa chiamare _prodotto_ di due permutazioni
la loro composizione. Pertanto, se $sigma$ e $tau$ sono due permutazioni
in $S_(X)$, la scrittura $sigma compose tau$ puó anche venire riportata
come $sigma tau$.
In genere, quando si parla di permutazioni su un insieme $X$, si ha
interesse a considerare $X$ come i primi $n$ numeri interi, ovvero
come $X = {1, 2, ..., n}$. Per tal motivo, viene usata la notazione
$S_(n)$ per indicare l'insieme di tutte le permutazioni su $X = {1,
2, ..., n}$, sottointentendo che l'insieme a cui $S_(n)$ si riferisce
sia quest'ultimo. Una certa permutazione $sigma in S_(n)$ viene spesso
indicata anche come:
$ sigma = mat(
x_(1), x_(2), ..., x_(i), ..., x_(n);
y_(1), y_(2), ..., y_(i), ..., y_(n);
) $
Dove, per ogni $i$, si ha $y_(i) = sigma (x_(i))$. L'ordinamento della
prima riga puó essere arbitrario, ma per convenzione viene in genere
ordinata in ordine crescente.
#example[
- Con $n = 3$, si hanno $3! = 6$ permutazioni possibili, che possono
pertanto essere facilmente enumerate:
$ S_(3) = { mat(1, 2, 3; 1, 2, 3), mat(1, 2, 3; 1, 3, 2),
mat(1, 2, 3; 3, 2, 1), mat(1, 2, 3; 3, 1, 2), mat(1, 2, 3; 2, 1, 3),
mat(1, 2, 3; 2, 3, 1) } $
- Con $n = 12$, si hanno $12! = 479001600$ permutazioni possibili. Una
di queste é:
$ sigma in S_(12) =
mat(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12;
8, 1, 2, 7, 12, 5, 9, 3, 11, 4, 6, 10) $
]
Si dice che la permutazione $sigma in S_(n)$ _muove_ un elemento
$a$ se $sigma(a) != a$, ovvero se "sposta" l'elemento $a$ in una
posizione diversa da quella in cui si trova. In caso contrario,
ovvero se $sigma(a) = a$, si dice che $sigma$ _fissa_ $a$.
L'insieme costituito dagli elementi mossi da $sigma$ prende il nome
di *supporto* di $sigma$. Due permutazioni $sigma, tau in
S_(n)$ si dicono *disgiunte* se i loro supporti sono insiemi disgiunti.
#example[
Si considerino le tre permutazioni $sigma, tau, upsilon
in S_(6)$:
#grid(
columns: (0.33fr, 0.33fr, 0.33fr),
[
$ sigma = mat(
1, 2, 3, 4, 5, 6;
1, 6, 2, 3, 5, 4;
) $
],
[
$ tau = mat(
1, 2, 3, 4, 5, 6;
5, 2, 3, 4, 1, 6;
) $
],
[
$ upsilon = mat(
1, 2, 3, 4, 5, 6;
1, 2, 3, 5, 6, 4;
) $
]
)
Il supporto di $sigma$ é ${2, 3, 4, 6}$, quello di $tau$
é ${1, 5}$ mentre quello di $upsilon$ é ${4, 5, 6}$. Si ha quindi
che $sigma$ e $tau$ sono disgiunte.
]
#theorem[
Se $sigma$ e $tau$ sono due permutazioni disgiunte,
si ha $sigma tau = tau sigma$.
]
// #proof[
// Dimostrabile, da aggiungere
// ]
Una permutazione nella forma:
$ mat(
x_(1), x_(2), ..., x_(r - 1), x_(r), x_(r + 1), ..., x_(n);
x_(2), x_(3), ..., x_(r), x_(1), x_(r + 1), ..., x_(n);
) $
Viene detta *permutazione ciclica* di lunghezza $r$, o semplicemente
*ciclo* di lunghezza $r$, con $r gt.eq 2$.
Per denotare un ciclo é sufficiente denotare quali elementi vengono
mossi ed in quale posizione, perché tutti gli elementi non menzionati
sono implicitamente fissati. Un ciclo $sigma$ di lunghezza $r$ viene
denotato con $sigma = (x_(1), x_(2), ..., x_(r))$; tale scrittura
sta ad indicare che in corrispondenza di ciascun elemento $x_(i)$
viene messo l'elemento $x_(i + 1)$, ad eccezione dell'$r$-esimo
elemento che viene messo in corrispondenza con $x_(1)$.
Si noti come la scrittura $(x_(1), x_(2), x_(3), ..., x_(r))$ sia
equivalente alla scrittura $(x_(r), x_(1), x_(2), ..., x_(r - 1))$
e alla scrittura $(x_(r - 1), x_(r), x_(1), ..., x_(r - 2))$, ecc...
perché sono tutti cicli che inducono il medesimo "spostamento",
semplicemente si prende come "riferimento iniziale" un suo elemento
diverso. Nello specifico, ogni ciclo puó essere scritto in tanti modi
diversi quant'é la sua lunghezza.
#example[
Quello presentato di seguito é un ciclo di lunghezza 5, appartenente
all'insieme delle permutazioni $S_(12)$:
$ sigma = mat(
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12;
9, 2, 3, 6, 5, 11, 7, 8, 4, 10, 1, 12;
) $
Tale ciclo mette $1$ in corrispondenza con $9$, $9$ corrispondenza
con $4$, $4$ in corrispondenza con $6$, $6$ in corrispondenza con
$11$ e $11$ in corrispondenza con $1$. Pertanto, viene denotato come
$(1, 9, 4, 6, 11)$. Si noti come tale scrittura possa essere formulata
in $5$ modi, tutti equivalenti:
#grid(
columns: (0.2fr, 0.2fr, 0.2fr, 0.2fr, 0.2fr),
[$ (1, 9, 4, 6, 11) $],
[$ (11, 1, 9, 4, 6) $],
[$ (6, 11, 1, 9, 4) $],
[$ (4, 6, 11, 1, 9) $],
[$ (9, 4, 6, 11, 1) $]
)
]
#theorem[
Ogni permutazione di $S_(n)$, diversa dalla identità, è un ciclo
oppure è il prodotto di cicli disgiunti, univocamente determinati
a meno dell’ordine.
]
// #proof[
// Dimostrabile, da aggiungere
// ]
#example[
La permutazione $sigma in S_(13)$ a sinistra puó essere scomposta
nel prodotto dei tre cicli $upsilon_(1), upsilon_(2), upsilon_(3)$
a destra:
#grid(
columns: (0.7fr, 0.3fr),
[
$ sigma = mat(
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13;
9, 12, 13, 6, 7, 11, 2, 3, 4, 10, 1, 5, 8;
) $
],
[
$upsilon_(1) = (1, 9, 4, 6, 11)$,
$upsilon_(2) = (2, 12, 5, 7)$,
$upsilon_(3) = (3, 13, 8)$
]
)
Per convincersene, é sufficiente comporre (in ordine arbitrario) i
tre cicli. Si consideri, per esempio, $upsilon_(1) compose upsilon_(2)
compose upsilon_(3)$:
$
upsilon_(1) compose upsilon_(2) compose upsilon_(3) =
upsilon_(1) upsilon_(2) upsilon_(3) =
upsilon_(1)(upsilon_(2)(upsilon_(3)))&=
upsilon_(1)(upsilon_(2)(mat(
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13;
1, 2, 13, 4, 5, 6, 7, 3, 9, 10, 11, 12, 8;
))) \ =
upsilon_(1)(mat(
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13;
1, 12, 13, 4, 7, 6, 2, 3, 9, 10, 11, 5, 8;
))&=
mat(
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13;
9, 12, 13, 6, 7, 11, 2, 3, 4, 10, 1, 5, 8;
)
$
]
|
https://github.com/kacper-uminski/math-notes | https://raw.githubusercontent.com/kacper-uminski/math-notes/main/tata76/notes.typ | typst | Creative Commons Zero v1.0 Universal | #import "@preview/physica:0.9.2": *
#show math.integral: math.limits.with(inline: false)
#show math.integral.double: math.limits.with(inline: false)
#show math.integral.triple: math.limits.with(inline: false)
#let title = [
TATA76 - Föreläsningsanteckningar
]
#let titled_block(title, txt) = align(center,block(
width: 90%,
fill: luma(230),
inset: 8pt,
radius: 4pt,
align(left)[
*#title*
#txt
]
))
#let example(num, txt) = [#titled_block([Exempel #num], [#txt])]
#let theorem(title, txt) = [#titled_block([Sats #title], [#txt])]
#set document(title: [#title], author: "<NAME>")
#set text(size: 12pt, font: "New Computer Modern")
#set heading(numbering: "1.")
#align(center, text(17pt)[
*#title*
])
#align(center, text(14pt)[
*<NAME>*
])
= Rummet $RR^3$, Grundbegrepp, och Funktioner av Flera Variabler
= Gränsvärden och Kontinuitet
= Differentierbarhet och Partiella Derivator
Kom ihåg från envariabelanalysen att
$f'(a) = lim_(h->0) (f(a+h)-f(a))/h$. $f'(a)$ är lutningen hos
tangenten i punkten $(a,f(a))$.
== Flera variabler
Låt $z=f(x,y)$. Fixera $y=b$, det vill säga studera $z=f(x,b)$. När $x$
varierar beskriver detta samband en kurva. Vi definierar:
$ f'_x (a,b) = lim_(h->0) (f(a+h,b)-f(a,b))/h $
$f'_x (a,b)$ blir lutningen i $x$-led i punkten $(a,b,f(a,b))$.
Detta betecknas $f'_x$, $pdv(f, x)$, $D_x f$. På motsvarande sätt, om vi
fixerar $x=a$ och låter $y$ variera, definieras och betecknas derivatan
analogt.
Krokiga $diff$ betecknar att funktionen beror på _flera_ variabler. Raka
d betecknar att den enbart beror på _en_ variabel.
Av definitionen framgår att räkneregler för derivata ser ut som förr.
Deriverar man med avseende på en variabel så är alla andra variabler
konstanta.
Observera att om $f$ är en funktion av två variabler, så har grafen till
$f$ inte _en_ lutning. Lutningen beror på åt vilket håll man tittar.
#example(1)[
$ f(x,y,z) = x y^2 z^3 => cases(
pdv(f,x) = x^2 z^3,
pdv(f,y) = 2 x y z,
pdv(f,z) = 3 x y^2 z^2,
) $
]
#example(2)[
$ f(x,y) = y e^(x y)+sin(x^2+2y) => cases(
pdv(f,x) = y^2e^(x y)+2x cos(x^2+2y),
pdv(f,y) = (1+x y)e^(x y)+2cos(x^2+2y),
) $
]
Om de partiella derivatorna $(f'_x, f'_y, ...)$ är kontinuerliga så sägs
$f$ vara av klass $C^1$, eller $f in C^1$. Alla elementära funktioner och
sammansättningar, summor, produkter, och kvoter av sådana är kontinuerliga
(och därför av klass $C^n, forall n =< oo$.)
Observera att i envariabelanalysen så gäller det att om $f$ är deriverbar
så är $f$ kontinuerlig. Detta gäller _inte_ i flervariabelanalysen.
#example(3)[
$ f(x,y) = cases(
(x y)/(x^2+y^2)\, (x,y) != (0,0),
0\, (x,y) = (0,0)
) $
]
= Kedjeregeln och Partiella Differentialekvationer
Kom ihåg från envariabelanalysen att $D(f(g(x))) = f'(g(x)) dot g'(x)$,
eller med $t = g(x)$ så fås $f(g(x)) = f(t)$ och
$dv(f,x) = dv(f,t) dot dv(t,x)$.
#example(1)[
Betrakta Eulerekvationen $x^2y''-2x y'+2y = 2x^2, x>0$
Byt variabel, $x = e^t$, det vill säga $t = ln x$: $
dv(y,x) = dv(y,t) dot dv(t,x) = 1/x dot dv(y,t)
<==> dv(,x) = 1/x dot dv(,t) $
Så fås: $
dv(y,x,2) & = dv(,x)(dv(y,x)) \
& = dv(,x)(1/x dot dv(y,t)) \
& = \/\/ "Produktregeln" \/\/ \
& = -1/(x^2) dot dv(y,t) + 1/x dot dv(,x)(dv(y,t)) \
& = -1/(x^2)(dv(y,t,2) - dv(y,t))
$
Detta ger den nya ekvationen: $
"VL" & = dv(y,x,2)-2x dv(y,x)+2y \
& = ... \
& = dv(y,x,2)-3dv(y,t)+2y \
& = 2x^2 = \/ x = e^t \/ =2e^(2t)
$
Alltså blir $dv(y,t,2)-3dv(y,t)+2y = 2e^(2t)$, som har lösningarna
$y = A e^t+B e^(2t)-2t e^(2t) = A x+B x^2-2x^2ln x$
]
== Kedjeregeln i flera variabler
Vi förbereder med att titta på Tangentplan.
Tangentplanets ekvation:
$ z = f(a,b) + f'_x (a,b) dot (x-a)+f'_y (a,b) dot (y-b) $
Jämför med tangentens ekvation i envariabelanalys:
$ y = f(a)+f'(a) dot (x-a) $
Med $h = Delta x, k = Delta y$, och med
$Delta z = f(a+Delta x, b+Delta y) - f(a,b)$ fås
$Delta z approx pdv(f,x) (Delta x)/(Delta t)
+ pdv(f,x) dot (Delta y)/(Delta t)$.
Om $Delta t$ är litet, $f in C^1$ och om vi låter $Delta t -> 0$ så
kan man visa att:
$ dv(z,t) = pdv(f,x) dot dv(x,t) + pdv(f,y) dot dv(y,t) <- "Kedjeregeln" $
Alltså, om $z = f(x,y)$, där $x=x(t), y=y(t)$, så är
$dv(z,t) = pdv(f,x) dot dv(x,t) + pdv(f,y) dot dv(y,t)$, eller med
$f=f(x,y)$ så fås $dv(f,t) = pdv(f,x) dot dv(x,t) + pdv(f,y) dot dv(y,t)$.
#example(2)[
Låt $f(u,v) = v sin(u v)$ där $u=x^2, v=3x$. Då är:
$f = 3x sin(3x^3)$. Kedjeregeln ger $dv(f,x)=$
]
= Tangentplan, Gradient, och Riktningsderivata
== Gradient
Om $f=f(x,y)$ har kontinuerliga partiella derivator (eg: f differentierbar)
så definieras $grad f$ (gradienten av $f$) som
$ grad f = mat(f'_x; f'_y) $
#example(1)[
$ f(x,y,z) = ln(x^2+y)+e^(y z) => grad f = mat(pdv(f,x); pdv(f,y); pdv(f,z)) $
Observera att $grad f$ är en _vektor_
]
== Tangent till kurva
Låt $Gamma$ vara en kurva i planet (eller rummet), som ges av
$(x,y) = (x(t),y(t)) = va(phi)(t)$. En tangent till $Gamma$ söks. Undersök
$t$ och $t+h$:
$ lim_(h->0) (va(phi)(t+h)-va(phi)(t))/h $
Om gränsvärdet finns, $!= va(0)$, så kommer vi att få tangentvektorn till
kurvan, $va(phi)'(t) = (x'(t),y'(t))$.
#example(2)[
Bestäm ekvationen för linjen som tangerar kurvan
$(x,y,z) = (t, t^2, t^3)$ i punkten där $t = -1$.
$t = -1$ ger $(x,y,z) = (-1,1,-1)$. En tangentvektor fås av
$ mat(x'(t); y'(t); z'(t)) = mat(1; 2t; 3t^2)
= \/ t = -1 \/ = mat(1; -2; -3) $
Så tangentens ekvation är
$ mat(x; y; z) = mat(-1; 1; -1) + t mat(1; -2; 3), t in RR $
]
Betrakta en nivåyta, $f(x,y,z) = C$ och låt $Gamma$ vara en kurva på denna
yta. $Gamma$ ges av $(x,y,z)=(x(t),y(t),z(t))$. Längs denna kurva är alltså
$f(x(t), y(t), z(t)) = C$. Derivera med avseende på $t$, så fås
$dv(f,t)=0$. Dock är
$ dv(f,t) = pdv(f,x) dot dv(x,t)
+ pdv(f,y) dot dv(y,t)
+ pdv(f,z) dot dv(z,t)
= mat(pdv(f,x); pdv(f,y); pdv(f,z)) dot mat(dv(x,t); dv(y,t); dv(z,t))
= grad f dot va(phi)'(t) = 0 $
Men $va(phi)$ ligger i ytan, så $va(phi)'$ är tangentvektor till ytan och
detta gäller allla kurvor i ytan. Alltså är $grad f$ vinkelrät mot alla
vektorer som tangerar ytan, då $grad f$ är en normalvektor till nivåytan
$f=C$.
#example(3)[
Sök tangentplanet till ytan $z = x^2+y^2$ i punkten $(-1,1)$.
$x=-1, y=1 => z=2$.
Skriv om ytan som en nivåyta.
$ z = x^2+y^2 <=> F(x,y,z) = x^2+y^2-z = 0 => grad F = mat(2x; 2y; -1) $
Så $grad F$ är en normal till ytan (i varje punkt på ytan.) Speciellt i
givna punkten är $(x,y,z)=(-1,1,2)$ så $grad F = (-2,2,-1)$ och
$(x,y,z)$ ligger i planet omm $(-2,2,-1) dot (x+1, y-1, z-2) = 0$, det
vill säga $-2x+2y-z = 2$.
]
== Riktningsderivata
Låt $z = f(x,y)$. Studera $z$ då $(x,y) = (a_1,a_2)+t(v_1,v_2)$ (beskriver en
linje i planet, genom $(a_1,a_2)$, riktning $(v_1, v_2)$) där
$|va(v)| = |(v_1, v_2)| = 1$. Då definieras (om gränsvärdet finns)
$ f'_va(x) (va(a)) = lim_(t->0) (f(va(a)+t va(v))-f(va(a)))/t $
Observera att om $va(v) = vu(x)$ så är $f'_va(v)(va(a)) = f'_x (va(a))$.
Samma sak gäller i $y$-led.
Om man låter $h(t) = f(va(a)+t va(v)) = f(va(g)(t))$, där
$va(g)(t) = va(a)+t va(v)$, så fås
$ (va(f)(va(a)+t va(v))-f(va(a)))/t = (h(t)-h(0))/t -> h'(0), t -> 0 $
men
$ & h'(0) \
& = \/ "Kedjeregeln" \/ \
& = grad f(va(g)(0)) dot va(g)'(0) \
& = grad f(va(a)) dot va(g)'(0) \
& = grad f(va(a)) dot va(v), |va(v)| = 1 $
Således blir $f'_va(v)(va(a)) = grad f(va(a)) dot vu(v)$ om $f$ differentierbar.
Observera även att $f'_va(v)(va(a)) = grad f(va(a)) dot vu(v)
= |grad f(va(a))||vu(v)| cos alpha $ där $cos alpha in [-1,1]$, så när
$|va(v)|=1$ antar
$ f'_va(v)(va(a)) = cases(
"Max: " &|grad f(va(a))| \, & alpha=0,
"Min: " -&|grad f(va(a))| \, & alpha=-pi
) $
Alltså, om $z=f(x,y)$ är en funktionsyta så pekar $grad f$ ut den riktning
i $x y$-planet där funktionen växer snabbast.
== Sammanfattning
Vi har två tolkningar av gradient:
- Funktionsyta, $z=f(x,y)$. $grad f(va(a))$ pekar ut riktning i
$x y$-planet där funktionen växer snabbast.
- Nivåyta, $F(x,y,z)=C$. $grad F(a,b,c)$ är normal till ytan.
= Kurvor, Ytor, och Funktionsdeterminanter
En kurva på parameterform har utseendet $(x,y,z) = (x(t),y(t),z(t))$. På
motsvarande sätt beskriver $va(r)(s,t) = (x(s,t), y(s,t), z(s,t))$
en yta i rummet. Fixt $s = s_0$ ger kurvan
$va(r)(s_0,t) = (x(s_0,t), y(s_0,t), z(s_0,t))$ som ligger på ytan.
Denna kurvan har tangentvektorn $pdv(va(r),s)(s_0,t)$.
Fixt $t = t_0$ ger på samma sätt en kurva på ytan, med tangentvektorn
$pdv(va(r),s)(s,t_0)$. En normalvektor till ytan, i punkten
$(s_0,t_0)$, fås av:
$ va(n) = pdv(va(r),s)(s_0,t_0) times pdv(va(r),t)(s_0,t_0) $
#example(1)[
Bestäm tangentplanet till ytan
$ cases(
x & = & 2sin theta cos phi,
y & = & 3sin theta sin phi,
z & = & 4cos theta,
phi & in & [0,2pi],
theta & in & [0,pi]
) $
i punkten $va(r) = (x,y,z) = (0,3,0)$
Vi bestämmer $phi$ och $theta$ i denna punkt.
]
= Dubbelintegraler
== Integraler i en variabel
Dela ett intervall $[a,b]$ i $n$ delar. Skapa sedan en undertrappfunktion
$phi_l (x)$ och en överfunktion $phi_u (x)$ på dessa intervall. Gränsvärdet
av dessa trappfunktioner kommer vara $sum c phi_n (x)$. Om båda dessa
konvergerar när intervallet går mot 0 existerar integralen för $f$.
== Integraler i två variabler
I två variabler sker integraler öve en rektangel,
$D={(x,y) | x in [a,b], y in [c, d]}$. Dela upp $D$ i små rektanglar.
Titta på över- och undertrappor (som i envar), addera alla "volymer"
$f(x,y)dd(x,y)$ och förfina indelningen. Det nedanstående definieras som det
gemensamma gränsvärdet om ett sådant existerar:
$ integral.double_D f(x,y)dd(x,y) $
Om $f(x,y) >= 0$ på $D$ så kan $integral.double_D f(x,y)dd(x,y)$
tolkas som volymen mellan $x y$-planet och ytan $z=f(x,y)$, där
$(x,y) in D$.
#theorem([])[
Om $D={(x,y) | x in [a,b], y in [c,d]}$ och $f$ är kontinuerlig på $D$
så är
$ integral.double_D f(x,y) dd(x,y) = integral_a^b (integral_c^d f(x,y)
dd(y))dd(x) $
]
Observera att integralen inom parentesen är "arean" på en skiva för ett
fixt $x$. $dd(x)$ är i detta sammanhang "tjockleken" av skivan.
#example(1)[
$ & integral.double_D x/(1+x y)^2dd(x,y) \
& D={(x,y) | x in [1,4], y in [0,1]} $
Integrera först med avseende på $y$ ty detta verkar enklast.
$ & integral_1^4 (integral_0^1 x/(1+x y)^2 dd(y))dd(x) \
& = \/ 1+x y = t, x dd(y) = dd(t) \/ \
& = integral_1^4 [-1/(1+x y)]_(y=0)^(y=1)dd(x) \
& = integral_1^4 (-1/(1+x)+1)dd(x) \
& = [x-ln|1+x|]_1^4 = \
& = 4-ln 5-(1-ln 2) \
& = 3-ln 5/2 $
]
Mer allmänna områden, $D = {(x,y) | x in [a,b], y in [phi(x), psi(x)]}$, ger
$ integral.double_D f(x,y)dd(x,y) =
integral_a^b (integral_phi(x)^psi(x) f(x,y) dd(y))dd(x) $
Observera att gränserna fortfarande är konstanta på den yttre integralen.
#example(2)[
$ integral.double_D (x^2+y^2)dd(x,y) $ där $D$ är en triangel med hörn i
$(0,0), (1,0), (1,2)$. Området kan beskrivas på två sätt:
#enum()[
Integrera med avseende på $y$ först. (Ska ha konstanta gränser på
$x$.) Detta ger $x in [0,1]$. För varje fixt $x$ fås $y in [0,2x]$.
Därav blir:
$ integral.double_D (x^2+y^2)dd(x,y)
& = integral_0^1 (integral_0^(2x)(x^2+y^2)dd(y))dd(x) \
& = integral_0^1 [x^2y+(y^3)/3]_(y=0)^(y=2x) dd(x) \
& = integral_0^1 (2x^3+8/3x^3)dd(x) \
& = integral_0^1 14/3x^3dd(x) \
& = [14/12x^4]_0^1 \
& = 7/6 $
],[
Integrera med avseende på $x$ först. Konstanta gränser med avseende
på $y$. Detta ger $D = {(x,y) | y in [0,2], x in [y/2,1]}$. Vi får
då:
$ integral.double_D (x^2+y^2)dd(x,y)
& = integral_0^2 (integral_(y/2)^1 (x^2+y^2)dd(x))dd(y) \
& = ... \
& = 7/6 $
]
]
#example(3)[
$ integral.double_D (x+2y)dd(x,y) $ Där $D$ är enhetscirkeln. Området
kan beskrivas som
$D = {(x,y) | x in [-1,1], y in [-sqrt(1-x^2), sqrt(1-x^2)]}$. Vi får
då:
$ integral.double_D (x+2y)dd(x,y)
& = integral_(-1)^1 (integral_(-sqrt(1-x^2))^sqrt(1-x^2)(x+2y)
dd(y))dd(x) \
& = integral_(-1)^1 [x y+y^2]_(-sqrt(1-x^2))^sqrt(1-x^2) dd(x) \
& = integral_(-1)^1 2x sqrt(1-x^2)dd(x) \
& = [-2/3(1-x^2)^(3/2)]_(-1)^1 \
& = 0 $
]
#example(4)[
Beräkna $ integral_0^2 integral_y^2e^(x^2)dd(x,y) $ Rita först området.
Det blir en triangel med kanterna i $(0,0), (0,2), (2,2)$. Alltså blir
$D = {(x,y) | x in [0,2], y in [0,x]}$. Därav blir:
$ integral_0^2 (integral_y^2e^(x^2)dd(x))dd(y)
& = integral.double_D e^(x^2)dd(x) \
& = integral_0^2 (integral_0^x e^(x^2)dd(y))dd(x) \
& = integral_0^2 [y e^(x^2)]_0^x dd(x) \
& = integral_0^2 x e^(x^2)dd(x) \
& = [1/2e^(x^2)]_0^2 \
& = (e^4 -1)/2 $
]
Ifall $f(x,y) = g(x) dot h(y)$ och om området är rektangeln
$D = {(x,y) | x in [a,b], y in [c,d]}$ så erhålls:
$ integral.double_D f(x,y) dd(x,y)
& = integral.double_D g(x) dot h(y) dd(x,y) \
& = integral_c^d (integral_a^b g(x) dot h(y) dd(x))dd(y) \
& = integral_c^d (h(y) integral_a^b g(x) dd(x))dd(y) \
& = integral_a^b g(x) dd(x) dot integral_c^d h(y)dd(y) $
= Variabelbyte i dubbelintegraler
Betrakta linjära variabelbytet:
$ cases(u = a x+b y, v = c x+d y) <=> vec(u,v) = mat(a,b; c,d)vec(x,y) $
med determinanten
$ underbrace(mu(D), "Arean av " D) = 1, \
underbrace(mu(D'), "Arean av " D') =
underbrace(mat(delim: "||", a,b; c,d), "Beloppet av\n determinanten") $
Vid linjära byten gäller det att
$ mat(delim: "|", pdv(u,x), pdv(u,y); pdv(v,x), pdv(v,y))
= mat(delim: "|", a,b; c,d) $
Betrakta nu ett allmänt byte. Om $u=u(x,y),v=v(x,y) in C^1$, så anger
$abs(dv((u,v),(x,y)))$ den lokala ytskalan vid avbildning
från $(x,y)$ till $(u,v)$.
#theorem([])[
Om $x=x(u,v), y=y(u,v)$ är en omvändbar $C^1$-avbildning av $D'$ (i
$u v$-planet) på $D$ (i $x y$-planet), sådan att
$abs(dv((x,y),(u,v))) != 0$ i $D'$ så är
$ integral.double_D f(x,y)dd(x,y)
= integral.double_(D') f(x(u,v),y(u,v))
underbrace(abs(dv((x,y),(u,v))),
"Tar hand om\n area-ändringen")
dd(u,v) $
]
Jämför i med en variabel:
$ integral_x(a)^x(b) f(x)dd(x) = \/ x=x(u), dd(x)=dv(x,u)dd(u)\/
= integral_a^b f(x(u)) dot dv(x,u)dd(u) $
#example(1)[
Beräkna
$ integral.double_D x/(x+2y)dd(x,y) \
D = {(x,y) | 2x-y in [0,2], x+2y in [1,2]} $
Om vi vill integrera utan variabelbyte så behöver vi dela upp området.
Annars blir formen för svår. Byt istället variablerna för att få ett
bättre område. Sätt $u=2x-y, v=x+2y$, det vill säga
$ vec(u,v) = mat(2,-1; 1,2)vec(x,y) $
Så fås nya området $D' = {(u,v) | u in [0,2], v in [1,2]}$ Observera att
determinanten för matrisen är nollskillt ($=5$), så bytet är inverterbart.
Då fås
$ integral.double_D x/(x+2y)dd(x,y)
= integral_(D') x(u,v)/(x(u,v)+2y(u,v))
mat(delim: "||", dv((x,y),(u,v)))dd(u,v) $
Vi får
$ vec(x,y) = 1/5 mat(2,1; -1,2)vec(u,v) $
Så $x=(2u+v)/5, y=(-u+2v)/5$. Alltså blir det ovanstående
$ integral.double_(D') (2u+v)/5/v dot 1/5 dd(u,v)
= 1/25 integral_1^2 integral_0^2 (2u/v+1)dd(u,v)
= ... = 1/25 (4ln 2 + 2) $
]
#example(2)[
Beräkna
$ integral.double_D e^(-(x^2+y^2))dd(x,y) \
D = {(x,y) | x^2+y^2 in [1,3], y-x >= 0} $
Arean blir en halv, ihålig cirkel. Använd därför polära koordinater,
$x = r cos phi, y = r sin phi, mat(delim: "||", dv((x,y),(r,phi)))=r$.
Vi har nu för det nya området
$ D' = {(r,phi) | r^2 in [1,3], r(sin phi-cos phi) >= 0} \
=> cases(
r in [1, sqrt(3)],
sin phi >= cos phi <=> phi in [pi/4, 5pi/4]
) $
Vi får därmed
$ integral.double_D e^(-(x^2+y^2))dd(x,y)
= & integral.double_(D') r e^(-r^2) dd(r,phi) \
= & integral_(pi/4)^(5pi/4) dd(phi) dot
integral_1^(sqrt(3)) r e^(-r^2) dd(r) \
= & pi [-1/2e^(-r^2)]_1^sqrt(3)
= & pi/2(e^(-1)-e^(-3)) $
]
= Trippelintegraler
$ integral.triple_D f(x,y,z)dd(x,y,z) $
Definieras som i dubbelintegral (småbitar, summera, förfina).
Kan tolkas som:
- $f(x,y,z) = 1$: $integral.triple_D 1 dd(x,y,z) =$ Volymen av $D$.
- Om $f(x,y,z)=$ Densiteten i punkten $(x,y,z)$:
$integral.triple_D f(x,y,z) =$ massan av $D$.
#theorem([])[
Om $D = {(x,y) in D_0 | g(x,y) <= z <= h(x,y)}$ och $f$ är kontinuerlig,
så är:
$ integral.triple_D f(x,y,z)dd(x,y,z)
= integral.double_(D_0) (integral_(z=g(x,y))^(z=h(x,y))f(x,y,z)
dd(z))dd(x,y) $
Speciellt om
$D = {(x,y,z) | x in [a,b], y in [phi(x), psi(x)], z in [g(x,y), h(x,y)] }$
så är:
$ integral.triple_D f(x,y,z)dd(x,y,z)
= integral_a^b (integral_phi(x)^psi(x)(integral_g(x,y)^h(x,y)
f(x,y,z)dd(z))dd(y))dd(x) $
]
#theorem([])[
Om $D = {(x,y,z) | x in [a,b], (x,y) in D_z}$ så är:
$ integral.triple_D f(x,y,z)dd(x,y,z)
= integral_a^b (integral.double_(D_z)f(x,y,z)dd(x,y))dd(z) $
Detta liknar skivformeln i envariabelanalysen.
]
#example(1)[
$ I = integral.triple_D x z e^(x y)dd(x,y,z) \
D = {(x,y,z) | x,y in [0,1], z in [0,2]} $
Vi får:
$ I & = integral_0^1 (integral_0^1 (integral_0^2 x z e^(x y)
dd(z))dd(y))dd(x) \
& = integral_0^1 (integral_0^1 [(z^2)/2x e^(x y)]dd(y))dd(x) \
& = ... \
& = 2e-4 $
]
#example(2)[
Volymen av en tetraheder med hörn i $(0,0,0), (1,0,0), (0,1,0), (0,0,2)$.
Börja med att sätta $D_0$ till projektionen av tetrahedern i $x y$-planet,
som ges av triangeln $D_0 = {x in [0,1], y in [0, 1-x]}$. I $z$-ledd blir
"golvet" $x y$ planet, det vill säga $x=0$, och "taket" blir planet genom
$(1,1,0), (0,1,0), (0,0,2)$. Alltså, enligt linjära algebran,
$2x+2y+z = 2$ eller $z = 2-x-2y$. Då blir
$D = {(x,y,z) | x in [0,1], y in [0,1-x], z in [0,2-2x-2y]}$ och
$ V = integral_0^1 (integral_0^(1-x)(integral_0^(2-2x-2y)1
dd(z))dd(y))dd(x) = ... = 1/3 $
]
#example(3)[
Volymen av kroppen som ligger inom klotet $x^2+y^2+z^2 >= 1$ och ovanför
konen $z = sqrt(x^2+y^2)$. Alternativ 1 är att projicera området där
konen och klotet skär varandra på $x y$-planet och sätta $D_0$ till
skuggan. Då området ges av $D = {(x,y,z) | x^2+y^2+z^2=1, z=sqrt(x^2+y^2)}$
så blir $D_0 = {(x,y,z) | z = 1/sqrt(2), x^2+y^2=1/2}$. För alla
$(x,y,z)$ i $D_0$ variera nu $z$ från golv, $z=sqrt(x^2+y^2)$, till tak
$x^2+y^2+z^2=1, z>=0$, så
$ V & = integral.double_(D_0) (integral_sqrt(x^2+y^2)^sqrt(1-x^2-y^2)
dd(z))dd(x,y) \
& = \/ "Polärt byte," x=r cos phi, y=r sin phi, \
& r in [0,1/2], phi in [0,2pi], dv((x,y),(r,phi))=r \/ \
& = integral.double_(D_0) (sqrt(1-r^2)-r)r dd(r,phi) \
& = integral_0^(1/sqrt(2))(r sqrt(1-r^2)-r^2) dd(r)
dot integral_0^(2pi) dd(phi) \
& = 2pi dot [-1/3 (1-r^2)^(3/2)-(r^3)/3]_0^(1/sqrt(2)) \
& = (2pi)/3 (1-1/sqrt(2)) $
]
== Variabelbyte
Variabelbyte i $integral.triple$ fungerar som i $integral.double$. Låt
$ cases(x = x(u,v,w), y = y(u,v,w), z = z(u,v,w)) $ vara en omvändbar
avbildning av $D'$ i $u v w$-rummet på $D$ i $x y z$-rummet. Då
definieras:
$ 0 != dv((x,y,z), (u,v,w)) = mat(delim: "|",
pdv(x,u), pdv(x,v), pdv(x,w);
pdv(y,u), pdv(y,v), pdv(y,w);
pdv(z,u), pdv(z,v), pdv(z,w)
) $
Om $dv((x,y,z),(u,v,w)) != 0$ så är
$ integral.triple_D f(x,y,z)dd(x,y,z)
= integral.triple_(D')f(x(u,v,w), y(u,v,w),z(u,v,w))
abs(dv((x,y,z),(u,v,w))) dd(u,v,w) $
#example(4)[
Bestäm volymen av området
$ D = {(x,y,z) | x in [0,1], x+y+z in [0,2], x+y-z in [0,3]} $
Sätt $D' = {(u,v,w) | u=x, v=x+y+z, w=x+y-z}$ så fås determinanten av
$ dv((u,v,w), (x,y,z)) =
mat(delim: "|",
1,0,0;
1,1,1;
1,1,-1)
= mat(delim: "|",
1,1;
1,-1
) = -2 $
Så $abs(dv((u,v,w), (x,y,z))) = 2 != 0$ och
$abs(dv((x,y,z), (u,v,w))) = 1/2$. Volymen blir då
$ V = integral.triple_D 1dd(x,y,z)
= integral.triple_(D') 1 dot 1/2 dd(u,v,w)
= 1/2 dot 1 dot 2 dot 3 = 3 $
]
== Rymdpolära koordinater
Sätt:
$ cases(
x = r sin theta cos phi,
y = r sin theta sin phi,
z = r cos theta
) $
Determinanten för bytet blir då:
$ abs(dv((x,y,z), (r, phi, theta))) = r^2 sin theta $
= Integraltillämpningar
#example(1)[
Beräkna $integral.triple_D (x-y)dd(x)dd(y)dd(z)$ där $D$ är tetrahedern
med hörn i $(0,0,0), (1,1,1), (1,1,0), (1,0,1)$.
Byt först till bättre variabler. Gör ett basbyte, utgående från tre av
kantvektorerna. Linjära algebran ger att ett basbyte från en gammal bas,
$underline(e)$, till en ny bas, $underline(f)$ ges av
$underline(f) = underline(e)T$ så att $T$:s kolonner består av
koordinaterna för $overline(f)_1, overline(f)_2, overline(f)_3$
Beteckningarna kommer vara:
$ cases(X = "Gamla koordinater",
Y = "Nya koordinater",
X = T Y) $
Inför:
$ underline(f) = underline(e) T = underline(e) mat(1,1,1; 1,1,0; 1,0,1) $
Så:
$ X = vec(x,y,z) = T vec(u,v,w) = mat(1,1,1; 1,1,0; 1,0,1) vec(u,v,w) $
Alltså blir:
$ cases(x = u+v+w, y = u+v, z = u+w) $
Så fås att
$ vec(u,v,w) = vec(1,0,0) <=> vec(x, y, z) = vec(1,1,1) $
och på samma sätt
$ vec(u,v,w) = (0,1,0) <=> vec(x,y,z) = vec(1,1,0) $
samt
$ vec(u,v,w) = (0,0,1) <=> vec(x,y,z) = (1,0,1) $
Origo flyttas ej.
Så nya området (i $u v w$-rummet) blir en tetraheder med hörn i
enhetsvektorerna. Det ger:
$ |dv((x,y,z),(u,v,w))| = || mat(1,1,1; 1,1,0; 1,0,1) || = |-1| = 1 $
Då fås:
$ integral.triple_D (x-4)dd(x)dd(y)dd(z) = & \
= & \/ x = u+v+w, y = u+v \/ \
= & integral.triple_(D') w dv((x,y,z), (u,v,w))dd(u)dd(v)dd(w)\
= & integral.triple_(D') w dd(u)dd(v)dd(w) \
=> I = & integral.triple_(D') w dd(u)dd(v)dd(w) \
= & integral_0^1 (
integral_0^(1-u) (
integral_0^(1-u-w)
w
dd(w))dd(v)) dd(u)\
= & ... \
= & 1/24 $
]
#example(2, [
Berkäkna $integral.triple_D x^2 dd(x)dd(y)dd(z)$, där $D$ ges av
$0 <= y <= z^2 <= x^4 <= 1$.
Försök först beskriva området på ett användbart sätt. Tittar vi bara på
$x$, så ser vi att $0 <= x^4 <= 1$, det vill säga $-1 <= x <= 1$. För dessa
$x$ gäller dessutom $0 <= y <= z^2 <= x^4$ (där $x$ är fixerat.) Så vi har:
$ cases(y >= 0, y <= z^2, z^2 <= x^4 <=> -x^2 <= z <= x^2) $
Alltså är:
$ integral.triple_D x^2dd(x,y,z)
= & integral_(-1)^1(integral.double_(D_x) x^2
dd(y,z))dd(x) \
= & integral_(-1)^1(
integral_(-x^2)^(x^2)(
integral_0^(z^2)
x^2
dd(y))dd(z))dd(x) \
= & ... \
= & 4/27 $
])
#example(4, [
$D$ är den del av enhetscirkeln som ligger i 1:a kvadranten med ytdensitet
$rho(x,y) = sqrt(x^2+y^2)$ Vad blir massan?
Ett litet element med arean $dd(x,y)$ och ytdensitet $rho(x,y)$ har
massan $rho(x,y)dd(x,y)$. Plattans totala massa är
$ m = & integral.double_D rho(x,y)dd(x,y) \
= & integral.double_D sqrt(x^2+y^2)dd(x,y) \
= & \/ "Polära koord:" x = r cos(phi), y = r sin(phi),
|dv((x,y),(r,phi))| = r, \ & D' = {phi in [0, pi/2], r in [0,1]} \/ \
= & integral.double_(D') r dot r dd(r,phi) \
= & integral_0^1 r^2 dd(r)
dot integral_0^(pi/2) dd(phi) \
= & pi/6 $
])
#example(5, [
Bestäm tyngdpunkten hos ett homogent halvklot:
$ cases(x^2+y^2+z^2 = R^2, z >= 0) $
Masscentrum (tyngdpunkten) ligger på $z$-axeln (av rättviseskäl)
och $z$-koordinaten $0 <= z_(t p) <= R$. Vi har alltså tyngdpunkt
$(x,y,z) = (0,0,z_(t p))$ där
$ z_(t p) = (integral.triple_D z dd(m))
/(integral.triple_D dd(m)) $
där $dd(m) = rho(x,y,z)dd(x,y,z)$. Kroppen är homogen, så
$rho(x,y,z) = rho = "Konstant"$. Vi har
$ z_(t p)
= (integral.triple_D z rho dd(x,y,z))/(integral.triple_D rho
dd(x,y,z))
= (integral.triple_D z dd(x,y,z))/(integral.triple_D dd(x,y,z)) $
$integral.triple_D dd(x,y,z)$ är volymen av $D = 1/2 dot (4 pi R^3)/3
= (2 pi R^3)/3$. Inför rymdpolära koordinater för den andra:
$ cases(x = r sin theta cos phi, y = r sin theta sin phi, z = r cos theta,
|dv((x,y,z), (r, theta, phi))| = r^2 sin theta) $
Vi får gränserna:
$ D' = cases(r in [0,R], theta in [0, pi/2], phi in [0, 2pi]) $
Detta ger:
$ & integral.triple_(D') underbrace(r cos theta, z)
underbrace(r sin theta, | dv((x,y,z),(r,theta,phi))|)
dd(r, theta, phi) \
= & integral_0^R r^3 dd(r)
dot integral_0^(pi/2) cos theta sin theta dd(theta)
dot integral_0^(2pi) dd(phi) \
= & [(r^4)/4]_0^R dot [(sin^2 theta)/2]_0^(pi/2) dot 2pi \
= & (pi R^4)/4
$
Alltså är:
$ z_(t p) = ((pi R^4)/4)/((2pi R^3)/3) = 3/8 R $
Så $(x_(t p), y_(t p), z_(t p)) = (0,0,3/8 R)$
])
|
https://github.com/xsro/xsro.github.io | https://raw.githubusercontent.com/xsro/xsro.github.io/zola/typst/Control-for-Integrator-Systems/7homo.typ | typst | #import "lib/lib.typ":ode45,get_signal,op,sig
#import "@preview/cetz:0.2.0"
#import cetz.plot
#import cetz.draw: *
= Homogeneity
Homogeneous control laws appear as solutions to many control problems such as a minimum time feedback control for the chain of integrators or the high-order sliding mode design. The homogeneity allows some time constraints in control systems to be fulfilled by means of a proper selection of the so-called homogeneity degree. Similar to the linear case, an asymptotic stability of a homogeneous system implies its robustness (input-to-state stability) with respect to a certain class of parametric uncertainties and exogenous perturbations.
Many lliteratures like @polyakov_generalized_2020
@马诺诺2010齐次性理论在非线性系统稳定性分析及控制设计中的应用
@shtesselSlidingModeControl2014 talks about homogeneity.
*Corollary 6.1* The global uniform finite-time stability of homogeneous differential equations (Filippov inclusions) with negative homogeneity degree is *robust with respect to locally small homogeneous perturbations*.
#pagebreak()
== Homogeneity of coordinate, function and vector field
#columns(2)[
Assign a weight(the *homogeneity degree*) of each coordinate $x_i in RR, i=1,dots,n$, where $m_i>0$. We will write
$
"deg"(x_i)=m_i.
$
The corresponding simple linear transformation
$
d_k: (x_1,x_2,dots, x_n) arrow.r.bar (kappa^(m_1)x_1,kappa^(m_2)x_2,dots,kappa^(m_n)x_n)
$
is called *homogeneity dilation*, and $kappa>0$ is called its parameter.
A *function* $f: RR^n arrow RR$ is called homogeneous of the degree
(weight) $q in RR$ with the above homogeneity dilation and written as $"deg"(f)=q$,
if for any $kappa>0$,
the identity $f(d_k x)=kappa^q f(x)$ holds.
A *vector field* $f: RR^n arrow RR^n$, $f=(f_1,dots,f_n)^T$,
is called homogeneous of degree $q in RR$ with the above dilation and written as $"deg"(f)=q$,
if all its componenets $f_i$ are homogeneous and the identities
$
"deg" f_i="deg" x_i + "deg" f ="deg" x_i +q, i=1,2,dots,n
$
#colbreak()
Let $A$ and $B$ be two homogeneous functions of $x in RR^n$ different from identical zero, and let $lambda$ be a real number;
then
1. The sum of A and B is a homogeneous function only if $"deg" A ="deg" B$
2. $forall lambda != 0$, we have $"deg"lambda =0$
3. $"deg" A B ="deg" A +"deg" B$
4. $"deg" A/B="deg" A -"deg" B$
5. $"deg" lambda A ="deg" A$
6. $"deg" diff/(diff x_i) A ="deg" A -"deg" x_i$ if $diff/(diff x_i) A$ is not identical zero
To verify the last equality it can be seen that
$
diff/(diff kappa^m_i x_i) A(d_k x)&=
kappa^(-m_i) diff/(diff x_i) kappa^("deg" A) A(x)\
&=
kappa^("deg" A -m_i) diff/(diff x_i) A(x)
$
The last equality tells that for a systme $dot(x)=f(x)$, we have $dot(x)_i=f_i(x)$.
Then,
$
"deg" dot(x)_i="deg" f_i="deg" x_i-"deg"t
$
]
#pagebreak()
== Homogeneity of differential equations and inclusions
#columns(2)[
Take the one dimension system $dot(x)=f(x)=x^2$ into cosideration,
let $"deg" x=1$, then the homogeneity degree of *function* $f$ is $"deg" f=2$ and the homogeneity degree of *vector field* $f$ is
$"deg" f = "deg" f_i -"deg" x_i =1$.
_The ambiguity disappears if we speak aout the homogeneity of the differential equation $dot(x)=f(x)$._
We call differential equation $dot(bold(x))=f(bold(x))$ homogeneous with degree $q$, if the system is invariant with respect to the linear time-coorinate transformation
$
G_k:(t,bold(x)) arrow.bar (kappa^(-1)t,d_k x), kappa>0.
$<Gk>
Then we have
$
f(x)=kappa^(-1) d_k^(-1) f(d_k x)
$.
The homogeneity degree of the system is
$"deg" t="deg" f_i-"deg" x_i="deg" bold(f)$
.
$
f_i(d_k x)=kappa^("deg" f_i) f_i(x)=kappa^("deg" x_i+"deg" f) f_i(x)\
bold(f)(d_k x)=d_k kappa^("deg" f) bold(f)(x)
$
*Note*: The nonzero homogeneity degree $q$ of a vector field can always be scaled to $plus.minus 1 $ by an appropriate proportional change to the weights of the coordinates and time.
*Definition 6.6* A vector-set field $F(x) subset RR^n, x in RR^n$, and the differential inclusion
$
dot(x) in F(x)
$
are called homogeneous of the degree $q in RR$ with the dialtion, which is writen as $"deg" F=q$,
if the DI is invariant with respect to the time-coordinate transformation @Gk
]
#pagebreak()
== Convergence Rates of Homogeneous Algorithms
#columns(2)[
=== Finite-Time and Fixed-Time Stabilization
Consider simplest scalar first order system
$
dot(x)=u
$
- The classical approach fives the standard _linear_ proportional feedback
$
u_"linear"(x)=-x
$
which guarantees an asymptotic(exponential) convergence to the origin of any trajectory of the closed-loop system: $abs(x(t))=e^(-t)abs(x_0)$.
- The *globally homogeneous* feedback is
$
u_"FT"(x)=-sqrt(abs(x))"sgn"(x).
$
This algorithm stabilizes the system at the origin in a _finite time_:
$
x(t)=0, "for" t>= T(x_0)=2sqrt(abs(x_0))
$
- The _fixed-time_ stabilizing controller can be selected *locally homogeneous* in the form:
$
u_"FxT"(x)=cases(
-sig( x )^(1/2)quad& abs(x)<=1,
-sig( x )^(3/2)quad& abs(x)>1,
).
$
The system will be stabilized within 4 second, that is $x(t)=0,forall space t>=4 space forall x_0 in RR$
#for x0 in (0.5,10){
let rhs(t,x)={
let k=1
let dxfxt=-op.sig(x.xfxt,1/2)
if calc.abs(x.xfxt)>1 {
dxfxt=-op.sig(x.xfxt,3/2)
}
let dx=(
xlinear:-k*x.xlinear,
xft:-k*op.sig(x.xft,1/2),
xfxt:k*dxfxt
)
dx
}
let (xout,dxout)=ode45(rhs,6,(xlinear:x0,xft:x0,xfxt:x0),0.005,record_step:0.02)
cetz.canvas({
plot.plot(
size: (8,2),
axis-style: "school-book",
x-tick-step: 1, y-tick-step:x0,
{
plot.add(get_signal(xout,"xlinear"),label:$dot(x)=u_"linear"$)
plot.add(get_signal(xout,"xft"),label:$dot(x)=u_"FT"$)
plot.add(get_signal(xout,"xfxt"),label:$dot(x)=u_"Fxt"$)
},
y-label:"value",
x-label:"time",
)
})
}
// === Finite-Time and Fixed-Time Estimation
]
#pagebreak()
=== Robustness
=== Elimination of an Unbounded “Peaking” Effect
#pagebreak() |
|
https://github.com/typst/packages | https://raw.githubusercontent.com/typst/packages/main/packages/preview/icu-datetime/0.1.0/README.md | markdown | Apache License 2.0 | # icu-datetime
This library is a wrapper around [ICU4X](https://github.com/unicode-org/icu4x)' `datetime` formatting for Typst which provides internationalized formatting for dates, times, and timezones.
As the WASM bundle includes all localization data, it's quite large (about 8 MiB).
## Example
```typ
#import "@preview/icu-datetime:0.1.0": fmt-date, fmt-time, fmt-datetime, experimental
// These functions may change at any time
#import experimental: fmt-timezone, fmt-zoned-datetime
#let day = datetime(
year: 2024,
month: 5,
day: 31,
)
#let time = datetime(
hour: 18,
minute: 2,
second: 23,
)
#let dt = datetime(
year: 2024,
month: 5,
day: 31,
hour: 18,
minute: 2,
second: 23,
)
#fmt-date(day, locale: "de", length: "full") \
#fmt-time(time, locale: "de", length: "medium") \
#fmt-datetime(dt, locale: "fi", date-length: "full") \
#fmt-timezone(
"-07",
iana: "America/Los_Angeles",
local-date: dt,
zone-variant: "st",
includes: "specific-non-location-long"
) \
#fmt-zoned-datetime(
dt,
(
offset: "-07",
iana: "America/Los_Angeles",
zone-variant: "st", // standard
)
)
```
<!-- typst c res/example.typ res/example.png --root . -->

## API
### `fmt-date`
```typ
#let fmt-date(
dt,
locale: "en",
length: "full"
)
```
Formats a date in some `locale`. Dates are assumed to be ISO dates.
- `dt`: The date to format. This can be a [`datetime`] or a dictionary with `year`, `month`, `day`.
- `locale`: A [Unicode Locale Identifier].
- `length`: The length of the formatted date ("full", "long" (default), "medium", "short", or `none`).
### `fmt-time`
```typ
#let fmt-time(
dt,
locale: "en",
length: "short"
)
```
Formats a time in some `locale`.
- `dt`: The time to format. This can be a [`datetime`] or a dictionary with `hour`, `minute`, `second`, and (optionally) `nanosecond`.
- `locale`: A [Unicode Locale Identifier].
- `length`: The length of the formatted time ("medium", "short" (default), or `none`).
### `fmt-datetime`
```typ
#let fmt-datetime(
dt,
locale: "en",
date-length: "long",
time-length: "short"
)
```
Formats a date and time in some `locale`. Dates are assumed to be ISO dates.
- `dt`: The date and time to format. This can be a [`datetime`] or a dictionary with `year`, `month`, `day`, `hour`, `minute`, `second`, and (optionally) `nanosecond`.
- `locale`: A [Unicode Locale Identifier].
- `date-length`: The length of the formatted date part ("full", "long" (default), "medium", "short", or `none`).
- `time-length`: The length of the formatted time part ("medium", "short" (default), or `none`).
### `fmt-timezone`
⚠ Warning: This function is experimental and can change at any time.
```typ
#let fmt-timezone(
offset,
iana: none,
bcp47: none,
local-date: none,
metazone-id: none,
zone-variant: none,
locale: "en",
fallback: "localized-gmt",
includes: ()
)
```
Formats a timezone in some `locale`.
- `offset`: A string specifying the GMT offset (e.g. "-07", "Z", "+05", "+0500", "+05:00")
- `iana`: Name of the IANA TZ identifier (e.g. "Brazil/West" - see [IANA](https://www.iana.org/time-zones) and [Wikipedia](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)). This is mutually exclusive with `bcp47`. This identifier will be converted to a BCP-47 ID.
- `bcp47`: Name of the BCP-47 timezone ID (e.g. "iodga" - see [timezone.xml](https://github.com/unicode-org/cldr/blob/main/common/bcp47/timezone.xml)). This is mutually exclusive with `iana`.
- `local-date`: A local date to calculate the metazone-id. This is mutually exclusive with `metazone-id`. When formatting zoned-datetimes this isn't necessary.
- `metazone-id`: A short ID of the metazone. A metazone is a collection of multiple time zones that share the same localized formatting at a particular date and time (e.g. "phil" - see [metaZones.xml](https://github.com/unicode-org/cldr/blob/main/common/supplemental/metaZones.xml) (bottom)).
- `zone-variant`: Many metazones use different names and offsets in the summer than in the winter. In ICU4X, this is called the _zone variant_. Supports `none`, `"st"` (standard), and `"dt"` (daylight).
- `locale`: A [Unicode Locale Identifier]
- `fallback`: The timezone format fallback. Either `"LocalizedGmt"` or a dictionary for an ISO 8601 fallback (e.g. `(iso8601: (format: "basic", minutes: "required", seconds: "never"))`).
- `includes`: An array or a single item (str/dictionary) of part(s) to include - corresponds to calls on [`TimeZoneFormatter`](https://docs.rs/icu/latest/icu/datetime/time_zone/struct.TimeZoneFormatter.html). Valid options are:
- `generic-location-format` (e.g. "Los Angeles Time")
- `generic-non-location-long` (e.g. "Pacific Time")
- `generic-non-location-short` (e.g. "PT")
- `localized-gmt-format` (e.g. "GMT-07:00")
- `specific-non-location-long` (e.g. "Pacific Standard Time")
- `specific-non-location-short` (e.g. "PDT")
- `iso8601`: A dictionary of ISO 8601 options `(iso8601: (format: "utc-basic", minutes: "optional", seconds: "optional"))` (e.g. "-07:00")
### `fmt-zoned-datetime`
⚠ Warning: This function is experimental and can change at any time.
```typ
#let fmt-zoned-datetime(
dt,
zone,
locale: "en",
fallback: "localized-gmt",
date-length: "long",
time-length: "long"
)
```
Formats a date and a time in a timezone. Dates are assumed to be ISO dates.
- `dt`: The date and time to format. This can be a [`datetime`] or a dictionary with `year`, `month`, `day`, `hour`, `minute`, `second`, and (optionally) `nanosecond`.
- `zone`: The timezone. A dictionary with `offset`, `iana`, `bcp47`, `metazone-id`, and `zone-variant`. The options correspond to the arguments for `fmt-timezone`. Only `offset` is mandatory - the other fields provide supplemental information for named timezones.
- `locale`: A [Unicode Locale Identifier]
- `fallback`: The timezone format fallback. Either `"localized-gmt"` or a dictionary for an ISO 8601 fallback (e.g. `(iso8601: (format: "basic", minutes: "required", seconds: "never"))`).
- `date-length`: The length of the formatted date part ("full", "long" (default), "medium", "short", or `none`).
- `time-length`: The length of the formatted time part ("full", "long" (default), "medium", "short", or `none`).
## Using Locally
Download the [latest release](https://github.com/Nerixyz/icu-typ/releases), unzip it to your [local Typst packages](https://github.com/typst/packages#local-packages), and use `#import "@local/icu-datetime:0.1.0"`.
## Building
To build the library, you need to have [Rust](https://www.rust-lang.org/), [Deno](https://deno.com/), and [`wasm-opt`](https://github.com/WebAssembly/binaryen) installed.
```sh
deno task build
```
While developing, you can symlink the WASM file into the root of the repository (it's in the `.gitignore`):
```sh
# Windows (PowerShell)
New-Item icu-datetime.wasm -ItemType SymbolicLink -Value ./target/wasm32-unknown-unknown/debug/icu_typ.wasm
# Unix
ln -s ./target/wasm32-unknown-unknown/debug/icu_typ.wasm icu-datetime.wasm
```
Use `cargo b --target wasm32-unknown-unknown` to build in debug mode.
[`datetime`]: https://typst.app/docs/reference/foundations/datetime/
[Unicode Locale Identifier]: https://unicode.org/reports/tr35/tr35.html#Unicode_locale_identifier
|
https://github.com/Enter-tainer/zint-wasi | https://raw.githubusercontent.com/Enter-tainer/zint-wasi/master/README.md | markdown | MIT License | # zint-wasi
This is a Zint binding for WASI.
- `zint-wasm-sys` is a low-level binding to the Zint library.
- `zint-wasm-rs` is a high-level binding to the Zint library.
- `zint-typst-plugin` is a typst package for the Zint library.
This package only uses the Zint library but not any of its frontends. So it is MIT licensed.
Checkout examples and `typst-package/manual.typ` for more information.
## Manual
_(click on the image to open)_
<a aria-label="Link to manual" href="https://raw.githubusercontent.com/Enter-tainer/zint-wasi/master/typst-package/manual.pdf" target="_blank">
<img src="/assets/manual-preview.svg">
</a>
## Build
Clone with:
```sh
git clone --recurse-submodules -j8 https://github.com/Enter-tainer/zint-wasi.git
```
You must have standard development tools pre-installed on your machine and in path:
- cargo (rustc; get with [rustup](https://rustup.rs/))
- tar
- wget/curl
- gcc/clang
To build the typst package, run:
```sh
cargo xtask package
```
See [`xtask` readme](./xtask/README.md) for more information.
## License
This package is licensed under MIT license.
A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
https://github.com/aik2mlj/cv-typst | https://raw.githubusercontent.com/aik2mlj/cv-typst/master/cv.typ | typst | #show heading: set text(font: "Linux Biolinum")
#show link: underline
// Uncomment the following lines to adjust the size of text
// The recommend resume text size is from `10pt` to `12pt`
#set text(
size: 12pt,
)
// Feel free to change the margin below to best fit your own CV
#set page(
margin: (x: 1cm, y: 1.3cm),
)
// For more customizable options, please refer to official reference: https://typst.app/docs/reference/
#set par(justify: true, leading: 0.7em)
#set list(indent: 0.8em)
#let chiline() = {v(-3pt); line(length: 100%); v(-5pt)}
#set align(center)
#text(size: 20pt)[*<NAME>*] \
Researcher, Artist \
<EMAIL> | #link("https://aik2.site")[aik2.site]
#set align(left)
== EDUCATION
#chiline()
*Center for Computer Research in Music and Acoustics (CCMRA), Stanford* #h(1fr) Sept. 2024 - Present \
Master of Music, Science, and Technology #h(1fr) California, United States
- Advisor: Prof. <NAME>.
*Zhiyuan College, Shanghai Jiao Tong University* #h(1fr) Sept. 2019 -- June 2023 \
Bachelor of Engineering in Computer Science #h(1fr) Shanghai, China \
- Member of *ACM Honor Class*, an elite CS program for *top 5%* talented students.
- GPA: 88.5 / 100 (*top 10 student*).
== PUBLICATIONS
#chiline()
// <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, *<NAME>*, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, "MuPT: A Generative Symbolic Music Pretrained Transformer", submitted to _Proc. 1#super[st] Conference on Language Modeling (COLM 2024)_. [#link("https://arxiv.org/abs/2404.06393")[arXiv]]
<NAME>, *<NAME>*, <NAME>, "Whole-song Hierarchical Generation of Symbolic Music Using Cascaded Diffusion Models", *Spotlight (top 5%)* in _Proc. 12#super[th] International Conference on Learning Representations (ICLR 2024)_, Vienna, May 2024. [#link("https://openreview.net/forum?id=sn7CYWyavh")[OpenReview]]
*<NAME>*, <NAME>, <NAME>, <NAME>, "Polyffusion: A Diffusion Model for Polyphonic Score Generation with Internal and External Controls", in _Proc. 24#super[th] International Society for Music Information Retrieval Conference (ISMIR 2023)_, Milan, November 2023. [#link("https://arxiv.org/abs/2307.10304")[arXiv]] [#link("https://ismir2023program.ismir.net/poster_51.html")[poster]]
== ACADEMIC EXPERIENCE
#chiline()
*Hierarchical Generation and Performance Rendering of Symbolic Music* #h(1fr) Sept. 2023 - Feb. 2024 \
Research Assistant at MBZUAI #h(1fr) Abu Dhabi, United Arab Emirates
- Designed and implemented comprehensive experiments for the hierarchical generation of symbolic music, with a cascaded diffusion model as backend.
- Experimented on performance rendering for symbolic music using Transformer architecture.
- Advisor: Prof. <NAME>.
*Controllable Symbolic Music Generation with Diffusion Models* #h(1fr) June 2022 – Dec. 2022 \
Research Assistant at MBZUAI #h(1fr) Abu Dhabi, United Arab Emirates
- Achieved state-of-the-art polyphonic music generation using diffusion models.
- Devised two control paradigms for music generation in the diffusion model framework: internal control via masked generation, and external control via cross-attention mechanism.
- Advisor: Prof. <NAME>.
*Deep Learning on Piano Reduction and Orchestration* #h(1fr) Jan. 2022 – May 2023 \
Researcher at Music X Lab, New York University, Shanghai #h(1fr) Shanghai, China
- Projected piano and orchestral scores to a joint latent space with variational autoencoders.
- Applied contrastive learning on the latent space with end-to-end autoencoder training.
- Advisor: Prof. <NAME>.
*Approximating Holant problems in 3-regular graphs* #h(1fr) Sept. 2021 – Dec. 2021 \
Researcher at John Hopcroft Center for Computer Science #h(1fr) Shanghai, China
- Constructed gadgets for approximation of Holant problems in 3-regular graphs.
- Applied complexity results from Ising Model to Holant problems by reduction.
- Advisor: Prof. <NAME>.
== TEACHING
#chiline()
*Design and Analysis of Algorithms (AI2615)* #h(1fr) Spring 2022 \
Teaching Assistant at SJTU #h(1fr) Shanghai, China
- Prepared well-written standard answers for class assignments.
- Graded homework and final exam.
- Lecturer: Prof. <NAME>.
*Principle and Practice of Computer Algorithms (CS1952)* #h(1fr) Summer 2021 \
Teaching Assistant at SJTU #h(1fr) Shanghai, China
- Designed a comprehensive ray tracing tutorial written in the Rust language. The #link("https://github.com/aik2mlj/raytracer-tutorial")[repository] received 100+ stars on GitHub.
- Designed algorithm programming tests for grading.
- Supervisor: Prof. <NAME>.
== LANGUAGE PROFICIENCY
#chiline()
*TOEFL*: *112* (Reading *30*, Listening *30*, Speaking *24*, Writing *28*) \
*GRE*: Verbal *162*, Quantitative *170*, Writing *4.0*
== SKILLS
#chiline()
*Computer Science Skills*
- C, C++, Python, Java, Rust, Verilog, Git.
- Proficient in machine learning coding, strategies and frameworks.
- Experienced in designing compilers, architecture, and computer systems.
- Sophisticated skills in managing projects and debugging.
- Linux and open-source software enthusiastic.
- Experienced in Unity game development and audio plugin development using JUCE framework.
*Musical Abilities*
- Guzheng (the Chinese zither) Performance Level 10 (the highest nonprofessional level in China) qualified.
- Piano Performance Level 10 qualified.
- Singing Performance Level 6 qualified.
- Published an electronic music piece under Chinese Electronic Music (CEM) Records, one of the most prestigious electronic music labels in China.
*Artistic Capacities*
- Trained on pencil sketching and pastel painting.
- Well-versed in world literature.
- Experienced in 3D modeling using Blender.
== LEADERSHIP
#chiline()
*Zhihui Camp, Zhiyuan College* #h(1fr) Sept. 2020 \
Group Leader #h(1fr) Shanghai, China
- Led a team of 10 students in knowledge contests, volunteering and several social activities.
- Ranked first among 12 groups from Zhiyuan College.
*Zhiyuan Traditional Culture Festival* #h(1fr) May 2020 \
Group Leader #h(1fr) Shanghai, China
- Directed, filmed and edited an online traditional Chinese music ensemble.
- Won the first prize.
*<NAME> (Freshmen Welcome Party)* #h(1fr) Dec. 2019 \
Performance Director #h(1fr) Shanghai, China
- Directed an on-stage mime performance comprising dance, singing and interactive installations.
- Won the Silver Prize among 7 groups.
|
Subsets and Splits