repo
stringlengths
26
115
file
stringlengths
54
212
language
stringclasses
2 values
license
stringclasses
16 values
content
stringlengths
19
1.07M
https://github.com/ClazyChen/Table-Tennis-Rankings
https://raw.githubusercontent.com/ClazyChen/Table-Tennis-Rankings/main/WS-latest.typ
typst
#set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (1 - 32)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [1], [SUN Yingsha], [CHN], [3581], [2], [CHEN Meng], [CHN], [3488], [3], [WANG Manyu], [CHN], [3438], [4], [HAYATA Hina], [JPN], [3339], [5], [HARIMOTO Miwa], [JPN], [3277], [6], [ZHU Yuling], [MAC], [3210], [7], [WANG Yidi], [CHN], [3192], [8], [CHEN Xingtong], [CHN], [3154], [9], [ITO Mima], [JPN], [3131], [10], [FAN Siqi], [CHN], [3130], [11], [HE Zhuojia], [CHN], [3121], [12], [QIAN Tianyi], [CHN], [3104], [13], [ZHANG Rui], [CHN], [3098], [14], [CHENG I-Ching], [TPE], [3098], [15], [HASHIMOTO Honoka], [JPN], [3086], [16], [HIRANO Miu], [JPN], [3075], [17], [KIM Kum Yong], [PRK], [3055], [18], [LIU Weishan], [CHN], [3054], [19], [KIHARA Miyuu], [JPN], [3010], [20], [CHEN Yi], [CHN], [3009], [21], [SHI Xunyao], [CHN], [3006], [22], [KUAI Man], [CHN], [2993], [23], [HAN Ying], [GER], [2992], [24], [ODO Satsuki], [JPN], [2986], [25], [SHIN Yubin], [KOR], [2978], [26], [JOO Cheonhui], [KOR], [2950], [27], [SATO Hitomi], [JPN], [2946], [28], [NAGASAKI Miyu], [JPN], [2942], [29], [POLCANOVA Sofia], [AUT], [2937], [30], [OJIO Haruna], [JPN], [2936], [31], [KAUFMANN Annett], [GER], [2931], [32], [LEE Eunhye], [KOR], [2928], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (33 - 64)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [33], [SUH Hyo Won], [KOR], [2925], [34], [MORI Sakura], [JPN], [2916], [35], [PYON Song Gyong], [PRK], [2915], [36], [WANG Xiaotong], [CHN], [2915], [37], [BATRA Manika], [IND], [2910], [38], [SHIBATA Saki], [JPN], [2909], [39], [YUAN Jia Nan], [FRA], [2896], [40], [DOO Hoi Kem], [HKG], [2886], [41], [YOKOI Sakura], [JPN], [2858], [42], [<NAME>], [KOR], [2828], [43], [<NAME>], [CHN], [2823], [44], [<NAME>], [GER], [2818], [45], [<NAME>], [CHN], [2804], [46], [SZOCS Bernadette], [ROU], [2803], [47], [<NAME>], [PUR], [2797], [48], [EERLAND Britt], [NED], [2796], [49], [<NAME>], [CHN], [2789], [50], [<NAME>], [CHN], [2784], [51], [YANG Yiyun], [CHN], [2783], [52], [TAKAHASHI Bruna], [BRA], [2777], [53], [ZHU Chengzhu], [HKG], [2755], [54], [<NAME>], [LUX], [2751], [55], [BAJOR Natalia], [POL], [2750], [56], [KIM Nayeong], [KOR], [2741], [57], [WINTER Sabine], [GER], [2740], [58], [SAMARA Elizabeta], [ROU], [2738], [59], [<NAME>], [CHN], [2720], [60], [PESOTSKA Margaryta], [UKR], [2714], [61], [YANG Ha Eun], [KOR], [2711], [62], [SASAO Asuka], [JPN], [2708], [63], [YANG Xiaoxin], [MON], [2706], [64], [QI Fei], [CHN], [2704], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (65 - 96)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [65], [PAVADE Prithika], [FRA], [2703], [66], [ZONG Geman], [CHN], [2701], [67], [BERGSTROM Linda], [SWE], [2698], [68], [ZHANG Lily], [USA], [2694], [69], [MESHREF Dina], [EGY], [2693], [70], [GHORPADE Yashaswini], [IND], [2691], [71], [XU Yi], [CHN], [2688], [72], [KIM Hayeong], [KOR], [2686], [73], [LI Yu-Jhun], [TPE], [2681], [74], [CHOI Hyojoo], [KOR], [2678], [75], [AKULA Sreeja], [IND], [2665], [76], [LEE Daeun], [KOR], [2663], [77], [POTA Georgina], [HUN], [2662], [78], [LEE Ho Ching], [HKG], [2658], [79], [CHEN Szu-Yu], [TPE], [2655], [80], [ZHU Sibing], [CHN], [2654], [81], [LIU Hsing-Yin], [TPE], [2649], [82], [WANG Amy], [USA], [2645], [83], [ZHANG Mo], [CAN], [2645], [84], [AKAE Kaho], [JPN], [2631], [85], [MATELOVA Hana], [CZE], [2628], [86], [NG Wing Lam], [HKG], [2627], [87], [<NAME>], [EGY], [2625], [88], [SH<NAME>], [GER], [2614], [89], [OJIO Yuna], [JPN], [2611], [90], [<NAME>], [SWE], [2610], [91], [<NAME>], [IND], [2605], [92], [<NAME>], [SGP], [2605], [93], [YU Fu], [POR], [2601], [94], [HUANG Yu-Chiao], [TPE], [2601], [95], [<NAME>], [THA], [2600], [96], [SHAO Jieni], [POR], [2594], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (97 - 128)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [97], [RAKOVAC Lea], [CRO], [2587], [98], [DRAGOMAN Andreea], [ROU], [2583], [99], [LEE Zion], [KOR], [2583], [100], [<NAME>], [CRO], [2582], [101], [LIU Jia], [AUT], [2574], [102], [DIACONU Adina], [ROU], [2573], [103], [LIU Yangzi], [AUS], [2572], [104], [<NAME>], [ITA], [2564], [105], [UESAWA Anne], [JPN], [2561], [106], [MUKHERJ<NAME>], [IND], [2560], [107], [<NAME>], [SRB], [2560], [108], [SAWETTABUT Suthasini], [THA], [2558], [109], [KAM<NAME>], [IND], [2556], [110], [LUTZ Charlotte], [FRA], [2552], [111], [PLAIAN Tania], [ROU], [2545], [112], [WAN Yuan], [GER], [2540], [113], [<NAME>], [GRE], [2539], [114], [CHIEN Tung-Chuan], [TPE], [2539], [115], [<NAME>], [KOR], [2537], [116], [CHENG Hsien-Tzu], [TPE], [2534], [117], [LAM Yee Lok], [HKG], [2533], [118], [ZHANG Xiangyu], [CHN], [2533], [119], [SAWETTABUT Jinnipa], [THA], [2526], [120], [HUANG Yi-Hua], [TPE], [2521], [121], [PARK Gahyeon], [KOR], [2520], [122], [ZHANG Sofia-Xuan], [ESP], [2520], [123], [GHOSH Swastika], [IND], [2518], [124], [ZARIF Audrey], [FRA], [2517], [125], [XIAO Maria], [ESP], [2514], [126], [CHA Su Yong], [PRK], [2512], [127], [SU Pei-Ling], [TPE], [2503], [128], [CHASSELIN Pauline], [FRA], [2500], ) )
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compute/calc-26.typ
typst
Other
// Test the `fact` function. #test(calc.fact(0), 1) #test(calc.fact(5), 120)
https://github.com/polarkac/MTG-Stories
https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/028_Kaladesh.typ
typst
#import "@local/mtgset:0.1.0": conf #show: doc => conf("Kaladesh", doc) #include "./028 - Kaladesh/001_Homesick.typ" #include "./028 - Kaladesh/002_A Time for Innovation.typ" #include "./028 - Kaladesh/003_Torch of Defiance.typ" #include "./028 - Kaladesh/004_Renegade Prime.typ" #include "./028 - Kaladesh/005_Born of Aether.typ" #include "./028 - Kaladesh/006_Bottled Up.typ" #include "./028 - Kaladesh/007_Release.typ" #include "./028 - Kaladesh/008_In This Very Arena.typ" #include "./028 - Kaladesh/009_A Grateful Consulate.typ"
https://github.com/eliapasquali/typst-thesis-template
https://raw.githubusercontent.com/eliapasquali/typst-thesis-template/main/preface/acknowledgements.typ
typst
Other
#import "../config/variables.typ" : profTitle, myProf, myLocation, myTime, myName #import "../config/constants.typ" : acknlowledgements #set par(first-line-indent: 0pt) #set page(numbering: "i") #align(right, [ #text(style: "italic", "Tieni l'infinito per ciò che lo merita...") #v(6pt) #sym.dash#sym.dash#sym.dash <NAME> ]) #v(10em) #text(24pt, weight: "semibold", acknlowledgements) #v(3em) #text(style: "italic", "Innanzitutto, vorrei esprimere la mia gratitudine al " + profTitle + myProf + " relatore della mia tesi, per l'aiuto e il sostegno fornitomi durante la stesura del lavoro.") #linebreak() #text(style: "italic", "Desidero ringraziare con affetto i miei genitori per il sostegno, il grande aiuto e per essermi stati vicini in ogni momento durante gli anni di studio.") #linebreak() #text(style: "italic", "Ho desiderio di ringraziare poi i miei amici per tutti i bellissimi anni passati insieme e le mille avventure vissute.") #v(2em) #text(style: "italic", myLocation + ", " + myTime + h(1fr) + myName) #v(1fr)
https://github.com/Duolei-Wang/modern-sustech-thesis
https://raw.githubusercontent.com/Duolei-Wang/modern-sustech-thesis/main/README.md
markdown
MIT License
# modern-sustech-thesis: v0.1.1-light - [使用说明 Usage](#使用说明-usage) - [typst.app 网页版](#typstapp-网页版) - [离线编译](#离线编译) - [Quickstart of typst template](#quickstart-of-typst-template) - [有关字体的补充说明](#有关字体的补充说明) - [版本说明](#版本说明) - [特别鸣谢](#特别鸣谢) - [写在后面](#写在后面) # 使用说明 Usage 模板已提交给 [typst 网页端](typst.app/univer),仍需审核. typst.app 中,搜索 modern-sustech-thesis 即可一键生成在线模板. 初次编译时间较长,此后编译速度与 markdown 体验无异. ## typst.app 网页版 [typst 网页端](typst.app/univer) 的模板库里已经上线,搜索 modern-sustech-thesis 即可一键生成在线模板. 初次编译时间较长,此后编译速度与 markdown 体验无异. 使用步骤: - 打开 typst.app 从模板新建项目(start from template) - 论文所需字体需要手动上传到你的项目文件列表. 点击左侧 Explore Files,上传字体文件,上传后的字体文件存储位置没有特殊要求,typst 拥有优秀的内核,可以完成自动搜索. 由于格式渲染引擎的核心需要指定字体的名称,我在模板测试阶段使用了若干标准字体,这些字体可以在我的 github 仓库 [modern-sustech-thesis](https://github.com/Duolei-Wang/modern-sustech-thesis) /template/fonts 里找到. 此外,可以手动更改字体配置,在正文前使用 '#set' 命令即可,由于标题、正文字体不同,此处大致语法如下: ```typst // headings show heading.where(level: 1): it =>{ set text( font: fonts.HeiTi, size: fonts.No3, weight: "regular", ) align(center)[ // #it #strong(it) ] text()[#v(0.5em)] } show heading.where(level: 2): it =>{ set text( font: fonts.HeiTi, size: fonts.No4, weight: "regular" ) it text()[#v(0.5em)] } show heading.where(level: 3): it =>{ set text( font: fonts.HeiTi, size: fonts.No4-Small, weight: "regular" ) it text()[#v(0.5em)] } // paragraph set block(spacing: 1.5em) set par( justify: true, first-line-indent: 2em, leading: 1.5em) ``` headings 设定了各个登记标题的格式,其中一级标题需要居中对齐. 'font: fonts.HeiTi' 即为字体的关键参数,参数的值是字体的名称(字符串). typst 将会在编译器内核、项目目录中搜索. typst 内核自带了 Source Sans(黑体)和 Source Serif(宋体)系列,但是中文论文所需的仿宋、楷体仍需自己上传. ## 离线编译 离线编译如果在命令行使用 'typst init "@preview/modern-sustech-thesis:0.1.0"',则使用体验和网页端使用没有差别. 故本文考虑具有自定义格式需求的用户,作出额外说明. 项目结构: - 格式渲染核心:lib.typ 格式的设置 '#set', '#show' 均是具有大括号内的生命周期,这种渲染是临时的,不会超出文件的作用. '#show' 会捕获满足格式的内容加以渲染,如无声明被捕获的对象,默认捕获 'doc'(全部文段). 因此必须将渲染的核心打包成一个函数,在需要被渲染的内容前通过 '#show: xxx.with()[content]' 进行默认捕获. 其中方括号内是内容,在 typst 语法中如果只声明了函数调用 '#show: xxx.with()',则函数后的部分都会被认为是文章的主体. 即: '#show: xxx.with(arg1, arg2, ..., body)' 中的最后一个参数可以通过如下方式传入: - 'xxx.with(args, ..., [content])' - 'xxx.with(args, ...)[content]' 渲染一个模板的工作分配给了两个部分: - 静态内容编译. 如 cover, commitment 等只需要传递少量参数后可以渲染出完整界面的,我命名以静态内容. 这些内容传递参数后渲染出的整个界面是静态的对象,因此可以直接调用函数(return 渲染后的界面)插入到调用函数的位置. - 动态内容编译. 动态的设定很难超过当前的生命周期,除非使用 '#show' 不断传递. 在 content.typ 中,正文前使用 '#show: sustech-thesis.with()' 即将 content.typ 作为 body 参数交给 sustech-thesis() 这个函数渲染,因此,此时 body 作为被传递的参数进入了编译其中已经编译好了的 sustech-thesis,从而实现了格式控制. 这样的格式控制分布在 configs 里的各个文件. 此外,引用文献的设置采用了 typst 成员 [Laurenz](https://github.com/typst/packages) 教学模板里的设置,将 std-biliography 设置为某个 biliography 后才能夸文件引用,否则会出现标签引用相关错误. ```typst // lib.typ #let std-bibliography = bibliography ... // content.typ #show: sustech-thesis.with( ..., bibliography: bibliography( "refer.bib", title: [参考文献], style: "gb-7714-2015-numeric", ), ) ``` # Quickstart of typst template 按照毕业设计要求,以 markdown 格式书写你的毕业论文,只需要: - 在 configs/info 里填入个人信息. 如有标题编译错误(比如我默认了有三行标题),可以自行按照编译器提示把相关代码注释或者修改. 大体语法和内容与基本的编程语言无差别. - 在 content.typ 里以 typst 特定的 markdown 语法书写你的论文内容. 有关 typst 中 markdown 的语法变更,个人认为的主要变化罗列如下: - 标题栏使用 '=' 而非 '#','#' 在 typst 里是宏命令的开头. - 数学公式不需要反斜杠,数学符号可以查阅:https://typst.app/docs/reference/symbols/sym/. 值得注意的是,typst 中语法不通过叠加的方式实现,如 “不等号” 在 LaTex 中是 '\not{=}'. 而在 typst 中,使用 'eq.not' 的方式来调用 'eq'(等号)的 'not'(不等)变体实现. - 引用标签采用 '@label' 来实现,自定义标签通过 '<label-title>' 来实现. 对于 BibTex 格式的引用(refer.bib),与 LaTex 思路相同,第一个缩略词将会被认定为 label. - 自定义格式的思路. 如有额外的需要自定义格式的需求,可以自行学习 '#set', '#show' 命令,这可能需要一定的编程语言知识,后续我会更新部分简略教程在我的 github 仓库里:https://github.com/Duolei-Wang/lang-typst. - 本模板的结构 1. 内容主体. 文章主体内容书写在 content.typ 文件中,附录部分书写在 appendix.typ 文件中. 2. 内容顺序. 文章内容顺序由 main.typ 决定,通过 typst 中 '#include' 指令实现了页面的插入. 3. 内容格式. 内容格式由 /sections/*.typ 控制,body.typ 控制了文章主体的格式,其余与名称一致. cover 为封面,commitment 为承诺书,outline 为目录,abstract 为摘要. # 有关字体的补充说明 为了 typst/packages 审核方便,我将字体文件上传到了个人版本的仓库里:https://github.com/Duolei-Wang/fonts. 如有字体使用的需求,请将其下载后移动到当前目录下使用,typst 编译器会直接能找到目录下的可用字体. 经个人查阅,论文等要求的“宋体”等字体要求均是一个模糊的概念. 实际上,Windows 系统的宋体指中易宋体等,macOS 采用了华文宋体等. 为了避免不必要的纠纷,建议字体采用完全开源的字体,如:思源宋体、思源黑体、方正宋体、方正黑体、方正楷体、仿宋GB2312 等. 如担心字体审核问题,建议统一采用 GB2312 系列(缺点是部分生僻字缺失). 本模板中使用的字体均为开源字体. 论文字体的选择在 font.typ 里进行了设置,可以修改 SongTi, HeiTi 等自变量的值来决定采用哪一个字体,这些自变量的值应当是字体的标准名称. 如果想查阅当前编译环境内的可选字体,可以通过以下两种方式: ```typst #set text( font: ... ) ``` 然后将光标悬停在 'font: ' 后,编译器会自动列出当前可用字体. 或者采用命令行指令 'typst fonts' 来查看可选字体. # 版本说明 版本号:v0.1.0. TODO: - [ ] 引用格式 check. # 特别鸣谢 南方科技大学本科毕业设计(论文)模板,论文格式参照 [南方科技大学本科生毕业设计(论文)撰写规范](https://tao.sustech.edu.cn/studentService/graduation_project.html). 如有疏漏敬请谅解,本模板为本人毕业之前自用,如有使用,稳定性请自行负责. - 本模板主要参考了 [iydon](https://github.com/iydon) 仓库的的 $\LaTeX$ 模板 [sustechthesis](https://github.com/iydon/sustechthesis);结构组织参照了 [shuosc](https://github.com/shuosc) 仓库的 [SHU-Bachelor-Thesis-Typst](https://github.com/shuosc/SHU-Bachelor-Thesis-Typst) 模板;图片素材使用了 [GuTaoZi](https://github.com/GuTaoZi) 的同内容仓库里的模板. - 感谢 [SHU-Bachelor-Thesis](https://github.com/shuosc/SHU-Bachelor-Thesis-Typst) 的结构组织让我学习到了很多,给我的页面组织提供了灵感, - 在查找图片素材的时候,使用了 GuTaoZi 仓库 [SUSTech-thesis-typst](https://github.com/GuTaoZi/SUSTech-thesis-typst) 里的svg 素材 [LICENSE](https://github.com/GuTaoZi/SUSTech-thesis-typst/blob/main/LICENSE),特此感谢! # 写在后面 笔者开发能力较弱,仅出于个人安利 typst 的需要开发了本模板. 如有开发、接管、功能需求请务必联系: QQ: 782564506 mail: <EMAIL>
https://github.com/hesampakdaman/resume
https://raw.githubusercontent.com/hesampakdaman/resume/main/class.typ
typst
MIT License
#let font_default_size = 11pt // Set name and contact data and format headings #let template(name, contact_data, color, doc) = { set page(margin: (x: 1.5cm, y: 0.8cm)) set par(justify: true, leading: 0.4em) set text(size: font_default_size, font: "New Computer Modern") show heading: set block(above: 1.5em, below: 0.5em) show link: set text(fill: rgb(0, 0, 255)) show par: set block(spacing: 0.5em) show raw: set text(font: "New Computer Modern") align(center)[ #smallcaps(text(size: 2.5em, fill: color)[#name]) \ #{ if contact_data != none and contact_data.len() > 0 { let elements = for el in contact_data { (link(el.link)[#{box(image(height: 0.7em, "./logo/" + el.service + ".svg")) + " " + el.display}],) } elements.join(" | ") } } ] show heading.where(level: 1): i => { set align(left) let title = smallcaps(i.body) set block(above: 0.2em) set text(weight: "light", size: 1.2em, fill: color) stack( dir: ttb, spacing: 3mm, title, line(length: 100%, stroke: color) ) } show heading.where(level: 2): i => { set align(left) let title = smallcaps(i.body) set block(above: 0.8em) set text(weight: "light", size: 1.1em, fill: color) title } doc } #let experience_entry(logo_path: str, logo_size: none, company: str, role: str, start_date: str, end_date: none, description: str, skills: none) = { if logo_size == none { logo_size = 1.3cm } grid( columns: (3.5cm, 1fr), gutter: 0cm, inset: (x: 5pt, y: 1pt), grid.cell( figure( image(logo_path, width: logo_size), caption: [#text(size: font_default_size - 1.5pt, company)], numbering: none, placement: bottom, kind: image, ) ), grid.cell( align: left, [ *#role* \ #text(size: font_default_size - 1pt, start_date)―#text(size: font_default_size - 1pt, end_date) #v(1mm) #description #v(1mm) #{ if skills != none { let cell = rect.with(radius: 3pt, inset: 3pt) let boxes = for skill in skills {(box(cell(text(size: 8pt, skill))),)} {boxes.join(" ")} } } ] ), ) } // Entry for skills #let skill_entry(category, skills) = { [=== #category] let cell = rect.with(radius: 3pt, inset: 3pt) let boxes = for skill in skills {(box(cell(text(size: 9pt, skill))),)} {boxes.join(" ")} }
https://github.com/sitandr/typst-examples-book
https://raw.githubusercontent.com/sitandr/typst-examples-book/main/src/basics/tutorial/index.md
markdown
MIT License
# Tutorial by Examples The first section of Typst Basics is very similar to [Official Tutorial](https://typst.app/docs/tutorial/), with more specialized examples and less words. It is _highly recommended to read the official tutorial anyway_.
https://github.com/ClazyChen/Table-Tennis-Rankings
https://raw.githubusercontent.com/ClazyChen/Table-Tennis-Rankings/main/history/2016/WS-02.typ
typst
#set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (1 - 32)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [1], [DING Ning], [CHN], [3402], [2], [LIU Shiwen], [CHN], [3294], [3], [LI Xiaoxia], [CHN], [3280], [4], [ZHU Yuling], [MAC], [3273], [5], [WU Yang], [CHN], [3228], [6], [CHEN Meng], [CHN], [3177], [7], [MU Zi], [CHN], [3140], [8], [ISHIKAWA Kasumi], [JPN], [3053], [9], [HAN Ying], [GER], [3046], [10], [WAKAMIYA Misako], [JPN], [2989], [11], [SOLJA Petrissa], [GER], [2976], [12], [FENG Yalan], [CHN], [2973], [13], [FUKUHARA Ai], [JPN], [2964], [14], [RI Myong Sun], [PRK], [2962], [15], [WEN Jia], [CHN], [2961], [16], [LI Jiao], [NED], [2953], [17], [HU Limei], [CHN], [2944], [18], [FENG Tianwei], [SGP], [2923], [19], [ITO Mima], [JPN], [2922], [20], [LI Jie], [NED], [2916], [21], [TIE Yana], [HKG], [2910], [22], [YANG Ha Eun], [KOR], [2884], [23], [CHOI Hyojoo], [KOR], [2871], [24], [HIRANO Miu], [JPN], [2871], [25], [JEON Jihee], [KOR], [2869], [26], [CHENG I-Ching], [TPE], [2861], [27], [LEE Ho Ching], [HKG], [2857], [28], [SUH Hyo Won], [KOR], [2849], [29], [SHAN Xiaona], [GER], [2848], [30], [HU Melek], [TUR], [2840], [31], [JIANG Huajun], [HKG], [2832], [32], [LI Qian], [POL], [2826], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (33 - 64)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [33], [SAMARA Elizabeta], [ROU], [2818], [34], [#text(gray, "HIRANO Sayaka")], [JPN], [2815], [35], [LI Xiaodan], [CHN], [2813], [36], [KIM Kyungah], [KOR], [2812], [37], [MIKHAILOVA Polina], [RUS], [2808], [38], [NI Xia Lian], [LUX], [2788], [39], [YU Fu], [POR], [2784], [40], [LI Fen], [SWE], [2775], [41], [ISHIGAKI Yuka], [JPN], [2772], [42], [#text(gray, "MOON Hyunjung")], [KOR], [2765], [43], [CHE Xiaoxi], [CHN], [2762], [44], [SHEN Yanfei], [ESP], [2754], [45], [MON<NAME>a], [ROU], [2753], [46], [LIU Jia], [AUT], [2752], [47], [DOO Hoi Kem], [HKG], [2743], [48], [YU Mengyu], [SGP], [2742], [49], [PESOTSKA Margaryta], [UKR], [2739], [50], [YANG Xiaoxin], [MON], [2735], [51], [KATO Miyu], [JPN], [2733], [52], [BILENKO Tetyana], [UKR], [2727], [53], [LIU Gaoyang], [CHN], [2726], [54], [CHEN Xingtong], [CHN], [2698], [55], [GU Ruochen], [CHN], [2694], [56], [POTA Georgina], [HUN], [2691], [57], [NG Wing Nam], [HKG], [2689], [58], [WU Jiaduo], [GER], [2687], [59], [KIM Song I], [PRK], [2685], [60], [SATO Hitomi], [JPN], [2679], [61], [IVANCAN Irene], [GER], [2678], [62], [POLCANOVA Sofia], [AUT], [2677], [63], [LI Xue], [FRA], [2677], [64], [PARTYKA Natalia], [POL], [2676], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (65 - 96)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [65], [<NAME>], [JPN], [2675], [66], [CHEN Ke], [CHN], [2674], [67], [WINTER Sabine], [GER], [2667], [68], [<NAME>], [JPN], [2667], [69], [<NAME>], [KOR], [2665], [70], [<NAME>], [CHN], [2659], [71], [<NAME>], [POR], [2651], [72], [<NAME>], [PRK], [2649], [73], [<NAME>], [SGP], [2646], [74], [<NAME>], [RUS], [2645], [75], [<NAME>ook], [KOR], [2642], [76], [WANG Manyu], [CHN], [2635], [77], [<NAME>], [SGP], [2633], [78], [BALAZOVA Barbora], [SVK], [2631], [79], [#text(gray, "LEE Eunhee")], [KOR], [2630], [80], [<NAME>], [KOR], [2630], [81], [ZEN<NAME>], [SGP], [2629], [82], [GRZYBOWSKA-FRANC Katarzyna], [POL], [2628], [83], [CHEN Szu-Yu], [TPE], [2627], [84], [ZHANG Qiang], [CHN], [2621], [85], [PASKAUSKIENE Ruta], [LTU], [2620], [86], [KIM Hye Song], [PRK], [2619], [87], [#text(gray, "YOON Sunae")], [KOR], [2617], [88], [LANG Kristin], [GER], [2617], [89], [PAVLOVICH Viktoria], [BLR], [2614], [90], [ZHANG Lily], [USA], [2612], [91], [#text(gray, "JIANG Yue")], [CHN], [2609], [92], [HAMAMOTO Yui], [JPN], [2605], [93], [LI Chunli], [NZL], [2603], [94], [ABE Megumi], [JPN], [2602], [95], [SAWETTABUT Suthasini], [THA], [2599], [96], [LEE Zion], [KOR], [2596], ) )#pagebreak() #set text(font: ("Courier New", "NSimSun")) #figure( caption: "Women's Singles (97 - 128)", table( columns: 4, [Ranking], [Player], [Country/Region], [Rating], [97], [#text(gray, "KIM Jong")], [PRK], [2594], [98], [LIU Xi], [CHN], [2594], [99], [HAYATA Hina], [JPN], [2588], [100], [KOMWONG Nanthana], [THA], [2588], [101], [MITTELHAM Nina], [GER], [2580], [102], [CHOI Moonyoung], [KOR], [2578], [103], [LEE Yearam], [KOR], [2576], [104], [CHENG Hsien-Tzu], [TPE], [2575], [105], [TASHIRO Saki], [JPN], [2575], [106], [PROKHOROVA Yulia], [RUS], [2574], [107], [VACENOVSKA Iveta], [CZE], [2572], [108], [<NAME>], [JPN], [2571], [109], [CHA Hyo Sim], [PRK], [2568], [110], [ODOROVA Eva], [SVK], [2564], [111], [NOSKOVA Yana], [RUS], [2561], [112], [#text(gray, "XIAN Yifang")], [FRA], [2555], [113], [WANG Yidi], [CHN], [2552], [114], [<NAME>], [JPN], [2550], [115], [<NAME>], [SWE], [2547], [116], [MATELOVA Hana], [CZE], [2547], [117], [#text(gray, "PARK Seonghye")], [KOR], [2546], [118], [HUANG Yi-Hua], [TPE], [2540], [119], [RAMIREZ Sara], [ESP], [2540], [120], [STRBIKOVA Renata], [CZE], [2537], [121], [TIKHOMIROVA Anna], [RUS], [2536], [122], [SZOCS Bernadette], [ROU], [2535], [123], [HAPONOVA Hanna], [UKR], [2527], [124], [LIU Hsing-Yin], [TPE], [2526], [125], [#text(gray, "<NAME>")], [KOR], [2523], [126], [GRUNDISCH Carole], [FRA], [2516], [127], [EERLAND Britt], [NED], [2515], [128], [SO Eka], [JPN], [2515], ) )
https://github.com/polarkac/MTG-Stories
https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/027%20-%20Conspiracy%3A%20Take%20the%20Crown/003_Proclamation%20by%20Queen%20Marchesa.typ
typst
#import "@local/mtgstory:0.2.0": conf #show: doc => conf( "Proclamation by Que<NAME>", set_name: "Conspiracy: Take the Crown", story_date: datetime(day: 15, month: 08, year: 2016), author: "<NAME>", doc ) People of the High City! It is my solemn duty to inform you that Brago, King of Paliano, is no more. His death shocked the city all those years ago; his spiritual continuance brought joy and relief to us all. Now, he has at last passed truly and forever beyond the veil. His long reign has come to an end, and his spirit is finally granted the eternal rest it deserves. In his beneficent wisdom, our late king appointed a successor with the will and the strength to bring peace to his beloved city. As his designated heir, recognized by the sacred order of Custodi as his one true successor, I vow to uphold the laws of Paliano, to maintain order in the city, and to see that justice is served swiftly and even-handedly. Though I know that I will never be a worthy heir to a man whose commitment to his city transcended life itself, I must hope that, with the blessings of the Custodi, I am able to guide our fair city to a new age of prosperity. The transfer of power is always difficult, and all the more so when the end of a monarch's reign comes unexpectedly. Even loyal and steadfast servants of the crown may find themselves ill-equipped to serve a new monarch in the same capacity as the old. As of now, the post of Captain of the Guard is disbanded. The soldiers of this city will now report directly to me. The former captain has retired with thanks from our fair city and a generous pension from the throne that will support her for the rest of her life, however long that life may be. In the absence of any natural heir, Brago made his intentions for the disposition of his throne quite clear. Lamentably, not all of the king's former vassals respect his final wishes. Those who might use this transition as an excuse for rebellion should know that treason will be answered, as it always has been, with the harshest punishments, while loyalty will be rewarded lavishly. May fortune smile upon Paliano! #figure(image("003_Proclamation by Queen Marchesa/01.jpg", width: 100%), caption: [Art by <NAME>], supplement: none, numbering: none) —As proclaimed by her Majesty Queen Marchesa, the Black Rose, first of her name; head of the council, guarantor of lawful governance, sole sovereign of the High City, true heir to the throne of Paliano and all the rights and privileges thereof.
https://github.com/noahjutz/CV
https://raw.githubusercontent.com/noahjutz/CV/main/body/title.typ
typst
#block[ #text( size: 48pt, font: "Roboto Slab" )[ #text( weight: "black", "Noah" ) #text( "Jutz" ) ] #linebreak() #text( weight: "light", "Computer Science Student" ) ]
https://github.com/onomou/typst-examit
https://raw.githubusercontent.com/onomou/typst-examit/main/README.md
markdown
MIT License
# examit A Typst exam package based on the MIT LaTeX [exam](https://ctan.org/pkg/exam) package ## Features - Title block - Read questions from file/question bank - Grading table - Marking boxes - Question types - Standard answerline - Multiple choice (including true-false) - Writing box - Blank rectangular, polar, numberline graphs ## Example `main.typ` ```typst #import "@preview/examit:0.1.1": * #import "questions.typ": questions #show: examit.with( questions: questions, // questions file, see example title: [examit], subtitle: "A Typst Exam Package", date: "2023-03-21", margin: ( left: 18mm, top: 16mm, bottom: 25mm, right: 25mm, ), cols: 2, gutter: 18mm, lang: "en", font: "New Computer Modern", extrapicturebox: true, // "If you have time..." box at the end // dropallboxes: true, // points boxes next to answerlines instead of level with the question instructions: [Instructions before exam columns.], namebox: "left", pointsplacement: "right", answerlinelength: 4cm, defaultpoints: 1, ) ``` `questions.typ` ```typst #import "@preview/examit:0.1.1": * #let questions = ( ( header: [Multiple Choice] ), ( question: [What attributes #underline("must") a *vector* have?], points: 2, choices: ( [position], [magnitude], [direction], [x- and y-coordinates], [height], [width], ), horizontal: false, sameline: false, ), ( question: [$bold(sin^(-1))$ returns an angle in which quadrants?], choices: ([I],[II],[III],[IV],), sameline: false, points: 2, ), ( question: [A *scalar* is a vector with a\ magnitude of *1*.], tf: true, points: 2, ), ( question: [ Write this polar equation in rectangular form: $r = frac(5 ,cos theta + sin theta )$ ], points: 4, bonus: true, answerbox: 3cm, ), ( pagebreak: true ), ( header: [Graphing]), ( question: [ Simplify and graph the complex number\ $5(cos 15 degree + i sin 15 degree) dot 10(cos 5 degree + i sin 5 degree)$. ], points: 3, spacing: 2.5cm, graph: "rect", answerline: true, ), ( question: [$x^2(x^2+9)>6x^3$], points: 2, bonus: true, numberline: 2.5in, ), (subheader: [A child is pulling a wagon with a force of 15 lb at an angle of 35° to the ground. Gravity is pulling down on the wagon with a force of 12 lb.]), ( question: [ What is the resulting force vector? ], points: 4, spacing: 3cm, label: "wagon" ), ( question: [ This question references \#@wagon. ], points: 4, ), ) ``` ![examit-example](https://github.com/onomou/typst-examit/assets/131693/78ba6fdc-59c0-460a-89cc-9617c15ac3e0) ## To Do - [ ] Parts or sub-questions - [ ] Customize numbering for questions - [X] References for other questions - [ ] Customize marking box properties: size, positioning, style - [ ] Better multiple-choice and matching options: box/bubble style, layout arrangement (horizontal/vertical, alignment) - [ ] Question types: fill-in-the-blank - [ ] Grading table options: bonus points, positioning - [ ] Footer options - [ ] Margin adjustments based on points position - [ ] Size options for graph response - [ ] Customize first page or title block - [ ] Configure even/odd headers/footers - [ ] Show/hide answers - [ ] Page break vs column break?
https://github.com/yonatanmgr/university-notes
https://raw.githubusercontent.com/yonatanmgr/university-notes/main/0366-%5BMath%5D/03661101-%5BCalculus%201A%5D/src/lectures/03661101_lecture_8.typ
typst
#import "/template.typ": * #show: project.with( title: "חדו״א 1א׳ - שיעור 8", authors: ("<NAME>",), date: "30 בינואר, 2024", ) #set enum(numbering: "(1.א)") = גבולות חלקיים == הגדרה מספר $x in RR$ (או $x=pm oo$) נקרא *גבול חלקי* (במובן הרחב) של סדרה $(an)_(n=1)^oo$ אם קיימת תת-סדרה $(a_n_k)_(k=1)^oo$ כך ש: $ lim_(k->oo) a_n_k = x $ == דוגמאות + $an = (-1)^n$. נראה כי $exists.not lim an$. אך תת-הסדרה $a_n_k = a_(2k) = 1$ שואפת ל-$1$ ולכן $1$ גבול חלקי של $(an)$. נראה כי גם $a_n_k = a_(2k-1) = -1 -> -1$ ולכן גם $-1$ הוא גבול חלקי של $(an)$. + תהי $M = {x_1, dots, x_m}$ קבוצה סופית של מספרים ממשיים. נבנה סדרה $(an)$ כך שכל איבר מ-$M$ הינו גבול חלקי שלה: $an: x_1, dots, x_m, x_1, dots, x_m, dots$. נראה כי קיים לסדרה גבול חלקי לכל איבר מ-$M$ עבור כל תת-סדרה לפי הנוסחה $a_n_k=a_(m+k)$ ו-$m$ הולך מ-$0$ ל-$oo$. + תהי ${x_1, x_2, dots, x_m, dots}$ קבוצה אינסופית. נבנה סדרה $(an)$ שכל איבר מ-$M$ הינו גבול חלקי שלה: $ an: x_1, x_1, x_2, x_1, x_2, x_3, dots $ האם מובטח שאין לה גבולות חלקיים שונים מ-$M$? #underline[לא], למשל, $M={1, 1/2, 1/3, dots}$. כאשר נבנה את הסדרה באותו האופן, נראה כי גם $0$ הוא גבול חלקי שלה, אך הוא לא נמצא ב-$M$. == טענות, משפטים ומסקנות === (טענה) הסדרה $(an)$ אינה חסומה מלמעלה אם״ם $+oo$ הינו גבול חלקי של $(an)$ (בדומה, חסימות מלמטה של $(an)$ שקולה לכך ש-$-oo$ איננו גבול חלקי שלה) ==== הוכחה אם $+oo$ גבול חלקי אז $exists ank -->^(k-oo) +oo$. כלומר, $forall M>0 exists k_M in NN : ank > M, forall k>k_M$ ואז $(an)$ לא חסומה מלמעלה. נניח עתה כי $(an)$ איננה חסומה מלמעלה. אזי, קיים איבר בסדרה $1<a_n_1$. נתבונן כעת בתת״ס $a_(n_1+1), a_(n_1+2), dots$. תת״ס זו גם איננה חסומה מלמעלה. לכן, ניתן למצוא איבר בתת״ס $2< a_n_2$ (נשים לב ש-$n_2>n_1$). נמשיך ונקבל $k<ank$ לכל $k$ טבעי. ואז $limits(lim)_(k->oo) ank = + oo$. #QED === (טענה) לכל סדרה יש לפחות גבול חלקי אחד (כולל במובן הרחב) === (טענה) המספר $L in RR$ גבול חלקי של הסדרה $(an)$ אם״ם לכל $epsilon >0$ הקבוצה ${n: abs(an-L)<epsilon}$ אינסופית (הטענה שקולה למושג גבול חלקי). ==== הוכחה נניח כי $L$ גבול חלקי. כלומר, $exists ank -->^(k->oo) L$, ואז $forall epsilon>0, exists n_epsilon in NN: abs(ank-L)<epsilon, forall k> n_epsilon$. נניח עתה כי $forall epsilon> 0$ הקבוצה ${n: abs(an-L)<epsilon}$ היא אינסופית. נבנה את התת״ס $(ank)_(k=1)^oo$ השואפת ל-$L$ באופן הבא: נבחר $n_1 in NN$ כך ש-$abs(a_n_1 - L) < 1/1$. נבחר $n_2>n_1$ כך ש-$abs(a_n_2 - L) < 1/2$. נמשיך כך, כלומר נבחר את $n_k$ באופן הבא: $abs(a_n_k - L) < 1/k$. ואז, $L-1/k < ank < L+1/k$. לפי כלל הסנדוויץ׳, כאשר $k->oo$, גם $ank->oo$. $QED$ = $liminf$ ו-$limsup$ == הגדרה תהי $(an)$ סדרה. נסמן ב-$P$ את אוסף הגבולות החלקיים הסופיים שלה, ונסמן ב-$hat(P)$ את אוסף הגבולות החלקיים שאינם בהכרח סופיים. ברור ש-$P seq hat(P)$ ו-$hat(P) != nothing$. למשל, $1,1,3,1,5,dots$. נראה כי $P={1}, hat(P)={1, +oo}$. - *הערה* - אם $(an)$ חסומה אז $pm oo$ אינם גבולות חלקיים ואז $nothing != P=hat(P)$. *נגדיר*: $ limsup an &= overline(lim) an &&= sup hat(P) \ liminf an &= underline(lim) an &&= inf hat(P) $ ==== הערה ברור כי $forall m in NN: underbracket(inf an, M_1) <= a_m <= underbracket(sup an, M_2) $. מכאן נובע כי: $ inf an &<= limsup an&&<= sup an \ inf an &<= liminf an &&<= sup an $ וייתכן ש-$limsup an < sup an$ ובאופן דומה $inf an < liminf an$. למשל, בסדרה $-1, 1, 1/2, 1/3, 1/4, dots$ : $sup an = 1$, $limsup an = 0$, $lim an = -1$, $liminf an = 0$, $hat(P) = {0}$. == טענות, משפטים ומסקנות === (טענה) $hat(P) = {L} iff lim an = L in RR$ ==== הוכחה - $arrow.l.double$: הוכחנו. - $arrow.r.double$: נניח כי $hat(P)={L}$ ו-$L != pm oo$. נראה כי $lim an = L$. מהנתון נובע ש-$(an)$ חסומה, כי אחרת $pm oo in hat(P)$ בסתירה. אז $exists M>0: abs(an) <= M, forall n in NN$. נזכיר את הגדרת $lim an = L$: $ forall epsilon > 0 exists n_epsilon in NN: forall n > n_epsilon, abs(an - L) < epsilon $ נניח בשלילה כי מתקיים: $ exists epsilon_0 > 0 forall n in NN : exists n_0 > n, abs(a_n_0 - L) >= epsilon_0 $ בפרט, יש אינסוף של ערכים של $n$ עבורם $abs(an - L) >= 0$. כלומר, $an-L >= epsilon_0$ או $an-L <= -epsilon_0$. כלומר, $L + epsilon_0 <= an <= M$ או $-M <= an <= L- epsilon_0$. לכן, לפחות באחד מהקטעים $[L+epsilon_0, M]$ או $[-M, L-epsilon_0]$ יש מספר אינסופי של איברי הסדרה. נניח כי ב-$[-M, L-epsilon_0]$ יש מספר אינסופי של איברי הסדרה $(an)$. ואז, מאינסוף איברים אלה נבחר תת-סדרה $(ank)$ של $(an)$. כלומר, מתקיים $-M<=ank<=L-epsilon_0, forall k in NN$. מכך נובע כי קיימת תת״ס $(a_n_k_l)_(l=1)^oo$ של $(ank)_(k=1)^oo$ כך ש- $a_n_k_l -->^(l -> oo) L' in RR$. בגלל ש-$(a_n_k_l)_(l=1)^oo$ היא גם תת״ס של $(an)$ אזי מסיקים ש-$L' in hat(P)$. מצד שני, $a_n_k_l <= L- epsilon_0$ ואז מתקיים: $ L'<=L-epsilon_0<L => L' != L $ בסתירה ל-$hat(P)={L}$. #QED === (תרגיל) $hat(P)={+oo} iff lim an = +oo$ באופן דומה, גם $hat(P)={-oo} iff lim an = -oo$. === (מסקנה מהטענות) $L = lim an$ (סופי או אינסופי) אם״ם $limsup an = liminf an = L$. === (משפט) אם $(an)$ סדרה חסומה (ואז $hat(P)=P$) אזי $exists min P$ ו-$exists max P$ ==== הערה מהמשפט נובע כי $liminf an in P and limsup an in P$.
https://github.com/MatheSchool/typst-g-exam
https://raw.githubusercontent.com/MatheSchool/typst-g-exam/develop/examples/exam-minimal.typ
typst
MIT License
#import "../src/lib.typ": * #show: g-exam.with() #g-question(points: 2)[List prime numbers] #v(1fr) #g-question(points: 1)[Complete the following sentences] #g-subquestion[<NAME> was written by ...] #v(1fr) #g-subquestion[The name of the continent we live on is ...] #v(1fr)
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/import-02.typ
typst
Other
// Test importing from function scopes. // Ref: true #import enum: item #import assert.with(true): * #enum( item(1)[First], item(5)[Fifth] ) #eq(10, 10) #ne(5, 6)
https://github.com/ofurtumi/formleg
https://raw.githubusercontent.com/ofurtumi/formleg/main/h04/pumping.typ
typst
Pumping lemma for regular languages If $A$ is regular $exists "pumping length" P$ such that for any $s in A$, $|s|>=P$, string s can be split into $x y z$ so that the folowing three conditions hold: + $x y^i z in A "for all" i=0,1,2,3...$ + $|y| > 0$ + $|x y| <= P$
https://github.com/pedrofp4444/BD
https://raw.githubusercontent.com/pedrofp4444/BD/main/report/templates/template.typ
typst
#let project(body) = { // Set the document's basic properties. set page(paper: "a4", margin:(top: 2.5cm, bottom: 2.5cm, left: 3cm, right: 3cm)) set text(font: "Calibri", lang: "pt", region: "PT", size: 10pt) // Main body. set par(justify: true) // Remove dots from outlines set outline(fill: none) set heading(numbering: "1.") show heading: it => { if it.level == 1 { pagebreak() block(below: 5em) } it } show heading: set block(above: 2.5em, below: 1em) show link: set text(fill: blue.darken(30%)) show link: underline.with(evade: false) show figure.where(kind: "attachment"): it => { heading(outlined: false, numbering: none, level: 2)[#it.caption] it.body } show heading.where(level: 1): it => { text(size:18pt, it) } show heading.where(level: 2): it => { text(size:16pt, it) } body } #let attachment = figure.with(kind: "attachment", numbering: "1", supplement: [Anexo])
https://github.com/rhedgeco/resume
https://raw.githubusercontent.com/rhedgeco/resume/main/resume.typ
typst
#import "template.typ": * #set text(size: 10.5pt) #let theme = light-theme() #let theme = dark-theme() #show: file-layout.with( filename: "resume.typ", source: "https://github.com/rhedgeco/resume/blob/main/resume.typ", theme: theme, ) #resume_title( firstname: "RYAN", lastname: "HEDGECOCK", linkedin: "rhedgeco", github: "rhedgeco", youtube: "rhedgeco", website: "hedgecock.dev", subtitle: "Software Engineer - Bay Area - US Citizen - 1 (408) 834-3376 - <EMAIL>", theme: theme, ) = Objective Software Development Engineer = Education *San Jose State University* - Software Engineering - 2024 = Professional Skills - *Programming Languages* - Proven fluency in *C\#, Rust, Python, Java, JavaScript/Typescript*. - *Development Experience* - 4 years of professional development experience working on large systems. - *Tooling* - Experienced a range of tools including: *git, vscode, jet-brains, git-actions and other CI*. = Work Experience - *Software Contractor/Consultant*\ Nov22 - Present - *Software Engineer* - #link("https://www.komprise.com/")[*Komprise*]\ Oct21 - Aug22 - Contributed to and built critical data migration infrastructure using Java. - Assessed customer issues in real time to push sensitive patches. - Solved integration issues between on-premises system hardware and cloud infrastructure. - *Software Development Intern* - #link("https://www.elekta.com/")[*Elekta*]\ May21 - Oct21 - Built analytics tooling in C\# for internal database management of patient data. - Constructed new database integrations and built a new GraphQL backend layer. - *Software Engineer* - #link("https://aotu.ai/")[*AOTU*]\ Oct18 - May21 - Lead engineer bulding a synthetic rendering platform #link("https://aotu.ai/synthall")[Synthall] using C\#, python, and Unity Engine. - Used to generate synthetic training data for AI machine learning inference models. - Packaged self-contained machine learning #link("https://github.com/aotuai/capsule-zoo")[capsules] in python for our AI platform. - *Programming Instructor* - #link("https://www.whizkidzcc.com/")[*WhizKidz*]\ July17 - Oct19 - Taught programming classes for Java, Python, C\#, Lua. - Taught game and modelling classes covering Unity Engine and Blender. = Passion Projects - #link("https://github.com/rhedgeco/boba-engine")[boba-engine] - A modern game engine written in rust. - #link("https://github.com/rhedgeco/boba-script")[boba-script] - A domain specific language for my game engine built in rust. - #link("https://github.com/rhedgeco/Synthic")[Synthic] - A real-time audio synthesis engine built for Unity. - #link("https://github.com/rhedgeco/imposters")[imposters] - A rust library for creating and managing type erased item collections. - #link("https://github.com/rhedgeco")[See more on github...]
https://github.com/kdog3682/2024-typst
https://raw.githubusercontent.com/kdog3682/2024-typst/main/src/page-templates.typ
typst
#import "base-utils.typ": * #let fixables = ( page: ( index-card: ( width: 6in, height: 4in, ), small-index-card: ( width: 5in, height: 3in, ), ) ) #let fix(attrs, fixable) = { if fixable != none { for (k, v) in attrs { if is-string(v) and v in fixable { attrs.remove(k) // panic(fixable.at(v)) for (a, b) in fixable.at(v) { attrs.insert(a, b) } // panic(attrs) } } } // panic(attrs) return attrs } #let theMainImportantWrapper(base, key, kwargs) = { let ref = base.at(key, default: none) if ref == none { return } let edits = kwargs.at(key, default: none) let fixable = fixables.at(key, default: none) ref = assign(ref, edits) ref = fix(ref, fixable) return ref } #let default-marker-func(n) = { // color the marker as blue // you could theoretically have the marker be anything if calc.even(n) { text(fill: blue, [•]) } else { [--] } } #let test-show-enum(it) = { // this will be used for lists ... set align(center) set text(size: 16pt) it // [\~ #it.body \~ #counter(list).display() ] } #let test-show-list(it) = { set text(fill: green) set align(center) it } #let test-show-heading(it) = { set align(center) set text(size: 16pt) // all the sizes are normalized to 16pt [\~ #it.body \~ #counter(heading).display() ] } #let base-ref = ( default: ( page: ( paper: "us-letter", margin: ( // top: 0.65in, // left: 0.65in, // right: 0.65in, // bottom: 0.5in, top: 0.85in, left: 1in, right: 1in, bottom: 0.7in, ), ), text: ( size: 12pt, font: "Crimson Pro", ), ), test: ( page: ( paper: "index-card", // background: cetz-background(), margin: ( top: 0.65in, left: 0.65in, right: 0.65in, bottom: 0.65in, ), ), text: ( size: 20pt, fill: red, ), enum: ( numbering: "1 a .", ), list: ( // marker: ([•], [--]), marker: default-marker-func, ), // show-heading: test-show-heading, show-list: test-show-list, // show-enum: test-show-enum, ), ) #let base(doc, ..sink) = { let kwargs = sink.named() let key = kwargs.at("key", default: "default") let attrs = base-ref.at(key) let page-attrs = theMainImportantWrapper(attrs, "page", kwargs) let text-attrs = theMainImportantWrapper(attrs, "text", kwargs) let list-attrs = theMainImportantWrapper(attrs, "list", kwargs) let enum-attrs = theMainImportantWrapper(attrs, "enum", kwargs) let show-enum = mdg("show-enum", kwargs, attrs, fallback: identity) let show-list = mdg("show-list", kwargs, attrs, fallback: identity) let show-heading = mdg("show-heading", kwargs, attrs, fallback: identity) set page(..page-attrs) if has-value(page-attrs) set text(..text-attrs) if has-value(text-attrs) set list(..list-attrs) if has-value(list-attrs) set enum(..enum-attrs) if has-value(enum-attrs) show heading: show-heading show list: show-list show enum: show-enum doc } #let test = base.with(key: "test") #let dialogue = base.with(key: "default")
https://github.com/monaqa/typst-class-memo
https://raw.githubusercontent.com/monaqa/typst-class-memo/master/src/lib.typ
typst
MIT License
//! target: ../.memo.local/memo.typ #import "href.typ" #import "code.typ" #let document( show_toc: false, link_converters: href.default_link_converters, body, ) = { // text & paragraph set text(font: "IBM Plex Sans JP") set par(justify: true, leading: 0.85em) // inline elements show link: href.pretty_link.with(link_converters: link_converters) // heading show heading.where(level: 1): (it) => { pad(y: 3pt, block( breakable: false, width: 100%, inset: (bottom: 5pt), stroke: (bottom: 0.5pt + black), text(weight: 200, size: 20pt, it), )) } show heading.where(level: 2): (it) => { pad(y: 3pt, block( breakable: false, inset: (bottom: 5pt), stroke: (bottom: 1.5pt + black), text(weight: 600, it), )) } show heading: (it) => { if it.level <= 2 { return it } let heading_sign = { text(fill: luma(60%), size: 0.7em, weight: 200)[h#it.level;. ] } pad( y: 3pt, block( breakable: false, [ #grid(columns: 2)[ #place(right + bottom, text(fill: luma(70%))[#heading_sign#h(0.5em)]) ][ #text(weight: 600, it.body) ] ], ), ) } // list & enum & term set list( indent: 0.8em, marker: place(center, dy: 0.25em)[#circle(radius: 1.5pt, fill: black)], ) // raw show raw: set text(font: ( "CommitMono-height105", "Hack Nerd Font", "IBM Plex Mono", "Noto Sans Mono CJK JP", )) show raw.where(block: true): set par(leading: 0.6em) show raw.where(block: true): (it) => { if it.lang == "sh" { code.console_block(it) } else { code.normal_raw_block(it) } } set quote(block: true) show quote.where(block: true): set block(stroke: (left: 2pt + gray), inset: 0pt, outset: 5pt) show quote.where(block: true): set pad(x: 10pt) if show_toc { outline(indent: 1em) pagebreak() } body }
https://github.com/benedictweis/typst-actions-demo
https://raw.githubusercontent.com/benedictweis/typst-actions-demo/main/main.typ
typst
MIT License
= ABC123 This is an example file
https://github.com/7sDream/fonts-and-layout-zhCN
https://raw.githubusercontent.com/7sDream/fonts-and-layout-zhCN/master/chapters/01-history/ps-ttf-otf.typ
typst
Other
#import "/template/template.typ": web-page-template #import "/template/components.typ": note #import "/lib/glossary.typ": tr #show: web-page-template // ## PostScript Fonts, TrueType and OpenType // 这里和原文意思不一样是有意的 // 主要是因为本段其实没有 OpenType 的相关内容 // 而且作为标题过长,酌情修改 == 从 PostScript 到 TrueType // PostScript level 1 defined two kinds of fonts: Type 1 and Type 3. PostScript Type 3 fonts were also allowed to use the full capabilities of the PostScript language. Prior to this level, fonts could only be specified in terms of graphics instructions: draw a line, draw a curve, and so on. But PostScript is a fully-featured programming language. When we talk about a "PostScript printer", what we mean is a printer which contains a little computer which can "execute" the documents they are sent, because these documents are actually *computer programs* written in the PostScript language. (The little computers inside the printers tended not to be very powerful, and one common prank for bored university students would be to send the printers [ridiculously complicated programs](https://www.pvv.ntnu.no/~andersr/fractal/PostScript.html) which drew pretty graphics but tied them up with computations for hours.) PostScript Level 1 规定了两种字体格式:Type 1 和 Type 3。PostScript Type 3字体还支持完整的 PostScript语言。在这之前,字体只能用图形指令来描述,比如画一条直线、画一个圆之类。但PostScript是一个全功能的编程语言。当我们在说“PostScript 打印机”时,我们指的其实是一台其内部的计算机能够“执行”收到的文档的打印机。这些文档本身也只是用PostScript语言写成的*计算机程序*。(打印机中集成的计算机往往性能不强,对于无聊的大学生们来说,一个常见的恶作剧就是把极端复杂的程序#[@Reggestad.PostScriptFractals.2006]发送给打印机,这些程序可以画出漂亮的图形,但却要花好几个小时进行计算。) // Not many font designers saw the potential of using the programming capabilities of PostScript in their fonts, but one famous example which did was <NAME> and <NAME>'s *FF Beowolf*. Instead of using the PostScript `lineto` and `curveto` drawing commands to make curves and lines, Erik and Just wrote their own command called `freakto`, which used a random number generator to distort the positions of the points. Every time the font was called upon to draw a character, the random number generator was called, and a new design was generated - deconstructing the concept of a typeface, in which normally every character is reproduced identically. 提前看到在字体中运用PostScript程序的潜力的字体设计师并不多,其中一个著名的例子是<NAME>和<NAME>设计的*FF Beowolf*(@figure:beowolf)。他们没有直接使用PostScript的`lineto`和`curveto`指令来绘制曲线和直线,而是自己实现了一个`freakto`指令,它使用随机数生成器来扭曲点的位置。每当需要绘制一个#tr[character]时,字体就会调用随机数生成器,并生成一个新的设计。这其实解构了字体的概念,毕竟在我们的印象中,字体里的每个#tr[character]总是以相同的样子出现。 #figure( caption: [ FF Beowolf字体中的字母e。 ], image("beowolf.jpg", width: 60%), ) <figure:beowolf> // While (perhaps thankfully) the concept of fully programmable fonts did not catch on, the idea that the font itself can include instructions about how it should appear in various contexts, the so-called "smartfont", became an important idea in subsequent developments in digital typography formats - notable Apple's Advanced Typography and OpenType. 尽管完全可编程字体的概念并没有流行开来(也许幸亏如此),但字体本身可以含有如何在各种情形下显示的指令(即所谓“智能字体”)的想法在后来数字#tr[typography]技术的发展中变得非常重要。在Apple Advanced Typography和OpenType中尤其如此。 // Type 3 fonts were open to everyone - Adobe published the specification for how to generate Type 3 fonts, allowing anyone to make their own fonts with the help of font editing tools such as Altsys' Fontographer. But while they allowed for the expressiveness of the PostScript language, Type 3 fonts lacked one very important aspect - hinting - meaning they did not rasterize well at small sizes. If you wanted a professional quality Type 1 font, you had to buy it from Adobe, and they kept the specification for creating Type 1 fonts to themselves. Adobe commissioned type designs from well-known designers and gained a lucrative monopoly on high-quality digital fonts, which could only be printed on printers with Adobe PostScript interpreters. Type 3 字体向所有人开放——Adobe公开了生成Type 3 字体的规范,允许任何人通过字体编辑软件,诸如Altsys公司的Fontographer等来制作自己的字体。虽然Type 3 字体中允许使用PostScript语言强大的表现力,但它仍然缺少一个非常重要的东西——#tr[hinting]。这也意味着它们在小尺寸下的#tr[rasterization]效果不够理想。如果要使用专业质量的Type 1 字体,则必须从Adobe购买,他们没有公开创建Type 1 字体的规范。Adobe委托著名设计师创作了一系列高质量字体,并由此达成了利润丰厚的垄断。这些字体只能在使用Adobe PostScript解释器的打印机上进行打印。 // By this time, Apple and Microsoft had attempted various partnerships with Adobe, but were locked out of development - Adobe jealously guarded its PostScript Type 1 crown jewels. In 1987, they decided to counter-attack, and work together to develop a scalable font format designed to be rasterized and displayed on the computer, with the rasterizer built into the operating system. In 1989, Apple sold all its shares in Adobe, and publicly announced the TrueType format at the Seybold Desktop Publishing Conference in San Francisco. The font wars had begun. 那时,苹果和微软尝试与Adobe建立各种合作,但都被拒之门外——Adobe把PostScript Type 1当作掌上明珠。于是在1987年他们决定反击,开始共同开发一种可缩放的字体格式。这一格式为#tr[rasterization]和屏幕显示而设计,而且在操作系统中内置了相应的#tr[rasterization]程序。1989年,Apple出售了他们手中Adobe的全部股份,并在旧金山的Seybold桌面出版大会上公开发布了TrueType格式。字体大战开始了。#footnote[#cite(form: "prose", <Shimada.FontWars.2006>)是对那个时代的极好的历史回顾。] // This was one of the factors which caused Adobe to break its own monopoly position in 1990. They announced a piece of software called "Adobe Type Manager", which rendered Type 1 fonts on the computer instead of the printer. (It had not been written at the time of announcement, but this was intended as a defensive move to keep people loyal to the PostScript font format.) The arrival of Adobe Type Manager had two huge implications: first, by rendering the fonts on the computer, the user could now see the font output before printing it. Second, now PostScript fonts could be printed on any printer, including those with (cheaper) Printer Command Language interpreters rather than the more expensive PostScript printers. These two factors - "What You See Is What You Get" fonts printable on cheap printers - led to the "desktop publishing" revolution. At the same time, they also published the specifications for Type 1 fonts, making high quality typesetting *and* high quality type design available to all. 这是导致Adobe在1990年自发打破其垄断地位的因素之一。他们宣布推出一款名为Adobe Type Manager的软件,它能在计算机而非打印机上渲染并显示 Type 1 字体。(这一软件在发布时其实还没有写出来,这是为了让人们忠于PostScript格式而采取的一种防御措施。)Adobe Type Manager的出现带来了两大影响:首先,通过在计算机上渲染字体,用户在打印之前就能看到字体的输出。其次,现在PostScript字体可以在任何打印机上打印,这不仅包括昂贵的PostScript打印机,也包括了使用打印机命令语言解释器的(很便宜的)那种。这两个因素——可在廉价打印机上使用的“所见即所得”字体——引发了“桌面出版”革命。于此同时,他们还公开了Type 1 字体的规范,使得所有人都可以使用高质量的#tr[typeset]*和*字体设计。
https://github.com/cadojo/correspondence
https://raw.githubusercontent.com/cadojo/correspondence/main/src/dear/src/cover.typ
typst
MIT License
// // Preamble // #import "../../rolo/rolo.typ": * #import "../../options/options.typ": some #let cover( sender: author(), recipient: author(), regarding: none, logo: none, date: datetime.today().display("[month repr:long] [day], [year]"), theme: black, divider: true, header: "default", footer: "default", body, ) = { set stack(spacing: 1em) set par(justify: true) show par: set block(spacing: 2em) set page( fill: white, background: locate( loc => { let current_page = counter(page).at(loc).at(0) if current_page == 1 { place(top, rect(fill: theme, width: 100%, height: 1.75in)) } else { none } } ), header: if type(header) == "string" and lower(header) == "default" { locate( loc => { let current_page = counter(page).at(loc).at(0) if current_page == 1 { let content = { set text(11pt, white) stack( dir: ltr, stack( dir: ttb, spacing: 2em, 1fr, align(left, text(28pt, weight: "bold", fullname(sender.name))), if some(regarding) { text(12pt, "Regarding: ") + text(size: 12pt, weight: "bold", regarding) } else { none }, 1fr, ), 1fr, align( left, stack( dir: ttb, spacing: 0.65em, 1fr, ..contact(sender), 1fr, ) ) ) } place(top, block(height: 1.75in, content)) } } ) } else { header }, footer: if type(footer) == "string" and lower(footer) == "default" { locate( loc => { let pages = counter(page).final(loc).at(0) if pages > 1 { set text(rgb(125,125,125)) place(left, align(left, regarding)) place(right, align(right, counter(page).display("1 / 1", both: true))) } } ) } else { footer }, ) v(1in) let to = if recipient == none { none } else { set text(rgb(125,125,125)) stack( dir: ttb, spacing: 0.65em, ..(fullname(recipient.name), recipient.roles, recipient.affiliations.at(0).department, recipient.affiliations.at(0).name).filter(some), ..address(recipient), ) } // Write if some(to) { align(left, to) } body }
https://github.com/AliothCancer/AppuntiUniversity
https://raw.githubusercontent.com/AliothCancer/AppuntiUniversity/main/capitoli_fisica/acqua_vapore.typ
typst
#import "../custom_functions.typ": c = Miscele bifasiche acqua-vapore == titolo di vapore (x) È la percentuale di quantità(kg) di acqua allo stato di vapore sulla quantità totale di $H_2 O$ contenuta in un volume. Si indica con la lettera *x*. \ \ *Esempio:* - 1 kg di acqua di cui 0,2 kg vapore: - $x = 0.2 / 1 =$ #{0.2/1} == Liquido sottoraffreddato In genere si ha quando liquido saturo viene compresso. Si calcola il lavoro tecnico, volume massico constante perchè liquido si assume incomprimibile. == Miscela liquido-vapore Rappresentato dal caso $0<=x<=1$. Si usano i valori della tabella, in particolare le entropie per le trasformazioni isoentropiche permettono di ricavare il titolo(x) da cui è poi possibile ricavare il valore esatto di entalpia. == Vapore Surriscaldato Si usa la tabella dei valori per i vapori Surriscaldati.
https://github.com/Toniolo-Marco/git-for-dummies
https://raw.githubusercontent.com/Toniolo-Marco/git-for-dummies/main/slides/practice/staging.typ
typst
#import "@preview/touying:0.5.2": * #import themes.university: * #import "@preview/numbly:0.1.0": numbly #import "@preview/fletcher:0.5.1" as fletcher: node, edge #let fletcher-diagram = touying-reducer.with(reduce: fletcher.diagram, cover: fletcher.hide) #import "../components/gh-button.typ": gh_button #import "../components/git-graph.typ": branch_indicator, commit_node, connect_nodes, branch To bring modified files from the working directory to the staging area, we use the `git add` command. #grid(columns: 2, column-gutter: 10%, [ Generally we use the command `git add -A` or `git add .` to add all modified files to the staging area. However, you can add files one at a time with `git add <filename>`. Similarly, we can add all files respecting a Regex with `git add <regex>`; for example: `git add Documentation/\*.txt`, will add all `.txt` files in the `Documentation` folder. ], image("/slides/img/meme/git-add.png",width: 70%) )
https://github.com/spherinder/ethz-infk-thesis
https://raw.githubusercontent.com/spherinder/ethz-infk-thesis/master/pages/abstract.typ
typst
#let abstract_page( author: "", title: "", keywords: (), abstract: "", ..metadata ) = { // TODO Needed, because context creates empty pages with wrong numbering set page( numbering: "i", ) let custom_title(title) = { text(title, weight: "bold") } set par(justify: true) stack( spacing: 10mm, custom_title(author), v(9mm), custom_title("Thesis title"), v(6mm), text(title), v(9mm), custom_title("Keywords"), v(6mm), text(keywords.join(", ")), v(9mm), custom_title("Abstract"), v(6mm), text(abstract), ) }
https://github.com/dssgabriel/master-thesis
https://raw.githubusercontent.com/dssgabriel/master-thesis/main/src/chapters/1-introduction.typ
typst
Apache License 2.0
= Introduction In the last ten years, HPC has experienced a dramatic shift in computer architecture, slowly moving away from general-purpose central processing units (CPU) and instead turning towards heterogeneous systems with specialized hardware designed to accelerate computations. The use of graphical processing units (GPU), field programmable gate arrays (FPGA), or even application-specific integrated circuits (ASIC) has increased significantly in modern supercomputers, often outnumbering CPUs by a factor of four in the systems that have most recently entered the TOP500 ranking. This change leads to a growing need for efficient software that exploits the computational performance unlocked by such accelerators. Even more recently, the surging of artificial intelligence (AI) has pushed performance requirements even further with extensive reliance on GPU architectures, which are especially well-suited for these workloads. This leads to a rapid convergence between HPC and AI, in which both fields depend on similar hardware but accommodate different computational demands. To meet these new criteria and take advantage of the performance improvements offered by GPUs, the software has to change, which involves rewriting significant portions of existing applications. This endeavor is not trivial, and fully exploiting these accelerators requires comprehensive knowledge of GPU architecture. Moreover, rewrites often induce complex communications between CPU and GPU to transfer the data between their respective memory space. These come at a high cost that is difficult to offset, as memory latency and bandwidth have improved very slowly compared with the hardware computing performance. In the pursuit of efficiency, programming languages offer modern tools to work with accelerators, either by offering low-level control of the device (e.g., CUDA C++, OpenCL C, etc.) or by providing higher-level concepts that abstract over architectural details, sometimes sacrificing performance in favor of better code portability (e.g., SYCL, Kokkos, OpenMP, etc.). The Rust programming language is a newcomer in the field of high-performance compiled languages, with its first stable release in 2015. It aims to solve most of the memory and type safety issues that exist in C and C++ while maintaining equivalent performance. It also puts a significant accent on correctness in concurrency contexts by eliminating an entire class of data race bugs thanks to its borrow checker. Rust thus provides robust safety guarantees without performance penalties, packed in a modern syntax with many functional features that align well with the current trends in software engineering. This internship aims to evaluate the viability of Rust as a GPGPU programming language in the context of scientific computing and HPC. In particular, the goal is to determine if we can leverage some of the language's properties to guarantee the robustness, memory and thread safety of GPU codes developed at the CEA.
https://github.com/jhqiu21/Notes
https://raw.githubusercontent.com/jhqiu21/Notes/main/EC1101E%20Introduction%20to%20Economics%20Analysis/Typst/lecture%202.typ
typst
#import "template.typ": * // Take a look at the file `template.typ` in the file panel // to customize this template and discover how it works. #show: project.with( title: "Summary of Lecture 2", authors: ( "JINHANG",), ) // We generated the example code below so you can see how // your document will look. Go ahead and replace it with // your own content! = Demand + *Quantity demanded(Q#super[D]):* The quantity demanded of any good is the amount of the good that buyers are willing and able to purchase. + *Demand schedule:* a table that shows the relationship between the price of a good and the quantity demanded + *Quantity demanded in the market:* The sum of the quantities demanded by all buyers at each price. + *Demand Curve:* Shows how price affects quantity demanded,other things equal + *Demand Curve Shifter:* #table( columns: (auto, auto, auto), inset: 10pt, align: horizon, [*Shifters*], [*Q#super[D]*],[*Shift*], [Number of Buyers $arrow.t$],[$arrow.t$],[Right], [Income $arrow.t$],[Normal Good $arrow.t$\ Inferior Good $arrow.b$],[Normal Good $arrow.r$\ Inferior Good $arrow.l$], [Tastes toward a good],[$arrow.t$],[Right], [Expectations(income$arrow.t$)],[$arrow.t$(maybe)],[Right], ) - Two goods are *substitutes* if an *increase* in the price of one good causes a(n) *increase* in the demand for the other good. - Two goods are *complements* if an *increase* in the price of one good causes a(n) *decrease* in the demand for the other good. = Supply + *Quantity Supplied(Q#super[S]):* The amount of the good that sellers are willing and able to sell. - Price$arrow.t$ $arrow.r.double.long$ Q#super[S]$arrow.b$ + *Supply schedule:* a table that shows the relationship between the price of a good and the quantity supplied + *Quantity supplied in the market:* the sum of the quantities supplied by all sellers at each price + *Supply Curve Shifters:* #table( columns: (auto, auto, auto), inset: 10pt, align: horizon, [*Shifters*], [*Q#super[S]*],[*Shift*], [Number of Sellers $arrow.t$],[$arrow.t$],[Right], [Input Prices $arrow.b$],[$arrow.t$],[Right], [Technology $arrow.t$],[$arrow.b$],[Right], ) - *Weather:* - Ideal weather conditions bring a bumper harvest of sweet and rosy apples; the S curve shifts *right*. - Freezing temperatures in California damage the state’s citrus crops; the S curve shifts *left*. - *Expectations:* Events in the Middle East lead to expectations of higher oil prices. In response, oil fields in Brunei reduce supply now, saving some inventory to sell later when prices are higher. The S curve shifts *left*. = Supply and Demand + *Equilibrium:* a state in which opposing forces are balanced so that one is not greater than the other. + *Surplus (excess supply):* when Q#super[S] $gt$ Q#super[D] + *Shortage (excess demand):* when Q#super[S] $lt$ Q#super[D] + *Shifts vs. Movements:* - *Supply:* A shift in the S curve occurs when a non-price determinant of supply changes. - *Quantity Supplied:* A movement along a fixed S curve occurs when P changes. - *Demand:* A shift in the D curve occurs when a non-price determinant of demand changes. - *Quantity Demanded:* A movement along a fixed D curve occurs when P changes. = Exercise + Which of the following events would unambiguously cause a decrease in the equilibrium price of cotton shirts?\ A. An increase in the price of wool shirts and a decrease in the price of raw cotton.\ B. A decrease in the price of wool shirts and a decrease in the price of raw cotton.\ C. An increase in the price of wool shirts and an increase in the price of raw cotton.\ D. A decrease in the price of wool shirts and an increase in the price of raw cotton. = Online Resource + http://www.mcneilecon.com/econ1/exercises1/s&d_exercises/s&d_l1a.html#anchor26327664 + https://ocw.mit.edu/courses/15-988-system-dynamics-self-study-fall-1998-spring-1999/c0478ac5a8d6aa657cd82812fb91ce75_economics.pdf + https://socialsci.libretexts.org/Courses/Riverside_City_College/Book%3A_Principles_of_Microeconomics_(A._Casolari)/03%3A_Demand_and_Supply/3.E%3A_Demand_and_Supply_(Exercises)
https://github.com/DrGo/typst-tips
https://raw.githubusercontent.com/DrGo/typst-tips/main/refs/samples/typst-uwthesis-master/glossaries.typ
typst
#let abbreviations = ( "aaaaz": ("AAAAZ", "American Association of Amateur Astronomers and Zoologists"), ) #let symbols = ( "rvec": ($bold(v)$, "Random vector: a location in n-dimensional Cartesian space, where each dimensional component is determined by a random process", $bold(v)$), ) #let glossaries = ( "computer": ("computer", "A programmable machine that receives input data, stores and manipulates the data, and provides formatted output", "computer") ) #let GLOSSARIES = ( abbreviation: abbreviations, symbol: symbols, glossary: glossaries )
https://github.com/Gavinok/typst-res
https://raw.githubusercontent.com/Gavinok/typst-res/master/experience.typ
typst
#import "layout.typ": EXP, job #EXP("Quartech", "Victoria, BC", "Software Developer CO-OP", "May 2023-Present") #job("Develop Configuration API (Python)", "https://github.com/hyperledger/aries-endorser-service")[ Expand the functionality of an existing webservice to monitor Webhooks and utilize a \ REST based API for updating the apps configuration at runtime ] #job("Implement Tutorial System for OCA Explorer (TypeScript & React)" , "https://github.com/bcgov/aries-oca-bundles")[ Integrate a new tutorial system for non technical in an existing React Application ] #v(-10pt) #EXP( "Nokia", "Ottawa, ON", "Software Developer In Testing", "September 2021-May 2022") #job("Service Starter (Common Lisp)", "")[ A server that provided a REST API as well as browser based GUI utilizing WebSockets for monitoring, restarting, and stopping services. ] #job( "End To End Testing Framework (TypeScript)", "")[ Add support for asynchronous HTTP responses from multiple remote services and reverse engineer services to create request templates. Doubled the possible testcase coverage. ] #job( "Multiple Part SMS Support (Java)", "")[ Correct the support for multi-part SMS messages sent to a device. ]
https://github.com/Area-53-Robotics/53E-Notebook-Over-Under-2023-2024
https://raw.githubusercontent.com/Area-53-Robotics/53E-Notebook-Over-Under-2023-2024/giga-notebook/entries/lift-pneumatics/entry.typ
typst
Creative Commons Attribution Share Alike 4.0 International
#import "/packages.typ": notebookinator #import notebookinator: * #import themes.radial.components: * #show: create-body-entry.with( title: "Build: Lift Pneumatics", type: "build", date: datetime(year: 2024, month: 3, day: 1), author: "<NAME>", witness: "Violet Ridge", ) = Problem While using the lift, we noticed that it consistently ran out of air before the end of the match. We had it wired in its own pneumatic system, with its own tank, to keep it separate from the wings, but this didn't appear to help. We usually had enough air to push it up once and pull it down once. This mean that if we wanted to hang, we'd have to keep the lift up for the entire match, dramatically limiting movement. Here is a diagram of our current wiring: #image("./wiring-0.svg", width: 50%) We use 1 double acting solenoid to power the lift. This means that the lift uses air on both the up and down stroke. = Solution Since the lift has rubber bands on it, we decided that it was wasteful to use the double acting capabilities of the piston unless we were trying to elevate. This means we needed some way to only use the double acting capabilities at the very end of the match. The solution we came up with was to put a single acting solenoid in the tubing between the double acting solenoid and the pistons. == Wiring #grid( columns: (1fr, 1fr), gutter: 20pt, [ When the piston needs to be extended, the air flows through the `P` and `B` ports of the solenoid, causing the piston to extend. This behaves exactly like it did before we made the wiring changes. ], image("./wiring-1.svg"), [ When we want the piston to retract, but we don't need the extra power to pull the robot into the air, we completely cut off air to the piston. The double acting solenoid allows air to flow from `P` to `A`, but the single acting solenoid blocks it, saving us air. The rubber bands and gravity then pull the whole lift down. ], image("./wiring-2.svg"), [ When we want to lift the robot into the air, we need the extra power of the double acting piston. We open both the double acting solenoid and the single acting one, allowing the air to flow from `P` to `A` in the double acting solenoid, and then from `P` to `A` in the single acting solenoid. This pulls the lift down with both the force of the rubber bands, and the air in the canisters, pulling the robot into the air. ], image("./wiring-3.svg"), ) == Code The code for this ended up being relatively simple: ```cpp void Hang::loop() { switch (get_state()) { case HangState::Expanded: piston->set_value(true); extra_piston->set_value(false); break; case HangState::Idle: piston->set_value(false); extra_piston->set_value(false); break; case HangState::Boosted: // Should only happen at the end of the match piston->set_value(false); extra_piston->set_value(true); break; } } ```
https://github.com/jneug/schule-typst
https://raw.githubusercontent.com/jneug/schule-typst/main/tests/from-0.5.0/test.typ
typst
MIT License
#import "@local/schule:1.0.0": kl #import kl: * #show: klausur.with( autor: ( name: "<NAME>", kuerzel: "Ngb", ), titel: "2. Klausur", reihe: "Rechnernetze", nummer: "2", fach: "Informatik", kurs: "Q1 LK", // version: datetime.today(), dauer: 225, datum: "27.11.2023", loesungen: "seite" ) #aufgabe(titel: "Grundlagen der informatisch-technischen Kommunikation")[ #teilaufgabe[ Das _Sender-Empfänger-Modell_ von Shannon und Weaver gilt als eines der ersten theoretischen Modelle zur technischen Kommunikation. _#operator[Skizziere] die wesentlichen Bestandteile des Modells._ _#operator[Erläutere] das Modell anhand der "Zettelkommunikation" unter Schülerinnen und Schüler während einer Schulstunde._ #text( .88em, [(Schülerinnen und Schüler schreiben sich Zettelchen, die untereinander unter den Tischen weitergegeben werden.)], ) _#operator[Beurteile] die "Zettelkommunikation" anhand der Kriterien Geschwindigkeit, Reichweite, Zuverlässigkeit, Sicherheit, Komplexität und Zuverlässigkeit._ #erwartung([Skizziert die wichtigsten Bestandteile des S-E-Modells.], 3) #erwartung([Erläutert das Modell anhand des genannten Beispiels.], 3) #erwartung([Beurteilt die Kommunikation anhand jedes der fünf Kriterien.], 4) #loesung[ Wichtige Bestandteile: Sender, Empfänger, Sendegerät, Empfangsgerät, Übertragungskanal Codierung, Decodierung, Information, Nachricht ] #loesung[ Die Staatsoberhäupter geben ihre Nachricht an den Dolmetscher, der die Nachricht von der Landessprache in die Sprache des Kommunikationspartners übersetzt (codiert). Die Nachricht kann über verschiedene Kanäle übertragen werden, als Schallwellen oder geschrieben. Beim Empfänger muss die Sprache nicht erneut decodiert werden, da die Sprache schon korrekt ist. Ob die beabsichtigte Information ankommt, hängt von der Interpretation der Sprache ab. Die Übersetzung zwischen Sprachen kann unter Umständen Mißverständnisse befördern. ] #loesung[ - Die *Geschwindigkeit* hängt von den Fähigkeiten der DolmetscherInnen ab. - Die *Reichweite* vom gesprochenen Wort ist eher gering, kann aber durch technische Verstärkungen und Übertragungen erweitert werden. - Die *Sicherheit* hängt auch vom Kanal ab. Die Codierung in eine andere Sprache ist zunächst nicht sicher. Das Gesprochene Wort kann belauscht werden. Flüstert die DolmetscherIn die Worte aber direkt in das Ohr der EmpfängerIn, dann erhöht sich dei Sicherheit bedeutend. - Die *Komplexität* übertragbarer Nachrichten ist sehr hoch, da Sprache universale Nachrichten codieren kann. - Die *Zuverlässigeit* ist auch relativ hoch, sofern die DolmetscherInnen gut ausgebildet sind. ] ] #teilaufgabe[ Schichtenmodelle stellen die Grundlage der technischen Kommunikation dar. Das ISO/OSI-Modell beschreibt sieben Schichten: #align( center, table( columns: (1cm, auto), align: left, [*Nr.*], [*Schicht*], [], [Transportschicht (_Transport Layer_)], [], [Darstellungsschicht (_Presentation Layer_)], [], [Sicherungsschicht (_Data Link Layer_)], [], [Bitübertragungsschicht (_Physical Layer_)], [], [Anwendungsschicht (_Application Layer_)], [], [Vermittlungsschicht (_Network Layer_)], [], [Sitzungsschicht (_Session Layer_)], ), ) _#operator[Gib] die Reihenfolge der Schichten #operator[an], indem Du die Nummern 1 -- 7 in die Tabelle einträgst. (Beginne mit 1 bei der untersten Schicht.)_ _#operator[Beschreibe] allgemein, wie die Schichten eines Schichtenmodells zusammenarbeiten, wenn eine Nachricht von einem Sender zu einem Empfänger verschickt wird. (Eine detallierte Beschreibung der Schichten im Einzelnen ist hier nicht nötig.)_ #erwartung([Ordnet die siben Schichten passend an.], 2) #erwartung([Beschreibt den Ablauf einer Kommunikation durch mehrere Schichten hindurch.], 3) #loesung[ #align( center, table( columns: (1cm, auto), align: (center, left), [*Nr.*], [*Schicht*], [*3*], [Vermittlungsschicht (_Network Layer_)], [*2*], [Sicherungsschicht (_Data Link Layer_)], [*7*], [Anwendungsschicht (_Application Layer_)], [*4*], [Transportschicht (_Transport Layer_)], [*6*], [Darstellungsschicht (_Presentation Layer_)], [*5*], [Sitzungsschicht (_Session Layer_)], [*1*], [Bitübertragungsschicht (_Physical Layer_)], ), ) ] #loesung[ Jede Schicht arbeitet mit der darüber und der darunter zusammen (die oberste und unterste jeweils nur mit einer). Bei ausgehenden Nachrichten nimmt die Schicht die Daten der Vorgängerschicht entgegen, reichert diese ggf. mit Metadaten (Headern) an, die zur Ausführung ihrer Funktion notwendig sind, und gibt die neue Nachricht an die nachfolgende Schicht weiter. Dabei werden die Daten unter Umständen in kleinere Teie zerlegt. Bei eingehenden Nachrichten werden die Daten der vorherigen Schicht entgegengenommen und auf die Metadaten der äquivalenten Sender-Schicht geprüft. Ggf. müssen dazu mehrere Datenpakete abgewartet werden, die zunächst wieder zusammengesetzt werden müssen. Den erhaltenen Daten werden dei Metadaten entfernt und an die Folgeschicht weitergegeben. ] ] #teilaufgabe[ Aufgrund seiner Bedeutung für die Kommunikation im Internet ist das TCP/IP-Modell das am weitesten verbreitete Kommunikationsmodell. Es fasst einige Schichten des ISO/OSI-Modells zusammen und besteht nur aus vier Schichten. Die wichtigsten sind die Transport- und die Vermittlungsschicht. #erwartung([Beschreibt die Funktion der Schichten.], 3) #erwartung([Beschreibt die Adressierungsmethoden (IP und Portnummern).], 2) #loesung[ Die Transportschicht (TCP) stellt eine persistente Verbindung zwischen Sender und Empfänger her. Die Transportschicht sorgt dafür, dass die Verbindung für die darüberliegende Schicht wie eine persistente Verbindung wirkt. Sie verwendet *Ports* als Adressierungsmethode. Die Internetschicht (IP) sorgt für dei Weiterleitung von Paketen und das Routing vom Sendegerät zum Empfangsgerät auch über mehrere Zwischenknoten hinweg. Es verwendet *IP-Adressen* zur Adressierung. ] ] ] #pagebreak() #aufgabe(titel: "Routing in vermaschten Netzen")[ In einem Netzwerk sind zwei Rechner mit den folgenden IPs vorhanden: #align( center, table( columns: 2, [*IP*], [*Subnetmaske*], [`192.168.0.10`], [`255.255.252.0`], [`192.168.1.23`], [`255.255.252.0`], ), ) #teilaufgabe[ _#operator[Gib] für beide Rechner den Netzwerk- und Geräteteil, die Brodcastadresse und das Default-Gateway, sowie den IP-Bereich (Min./Max. IP) an._ #erwartung([Gibt die gesuchten Informationen über das Netzwerk an.], 4) #loesung[ #align( center, table( columns: (4cm, auto, auto), align: left, [], [`192.168.0.10`], [`192.168.1.23`], [Netzwerkteil], [`192.168.0.0`], [`192.168.0.0`], [Geräteteile], [`0.0.0.10`], [`0.0.0.10`], [Broadcast], [`192.168.3.255`], [`192.168.3.255`], [Minimale IP], [`192.168.0.1`], [`192.168.0.1`], [Maximale IP], [`192.168.3.254`], [`192.168.3.254`], ), ) Beide Rechner liegen im selben Netzwerk. ] ] Ein neuer Rechner mit der IP `192.168.5.17` und der Subnetmaske `255.255.252.0` wird in das Netzwerk eingebunden. #teilaufgabe[ _#operator[Ermittele], mit welchem der obigen Rechner der Neue im selben _logischen Netzwerk_ liegt._ #erwartung([Mit keinem, die die Netzwerkadresse `192.168.4.0` lautet.], 2) #loesung[ Netzteil: `192.168.4.0` #sym.arrow.r *Nicht im selben Netzwerk!* ] ] #teilaufgabe[ #quote[Eine Paket im Internet nimmt immer den kürzesten Weg vom Sender zum Empfänger.] _#operator[Beurteile] diese Aussage mit Deinem Wissen über das Routing in vermaschten Netzwerken._ #erwartung([Beurteilt die Aussage hinreichend genau.], 4) #loesung[ Ein Paket in einem vermaschten Netzwerk wird über verschiedene Zwischentsationen vom Sender zum Empfänger geleitet. Jede Station entscheidet autonom, an welchen nächsten Knoten die Nachricht weitergeleitet wird. In der Regel versucht dabei jeder Knoten einen möglichst effizienten (schnellen) Weg zu nehmen. Dies ist aber nicht garantiert. Knoten können anders (oder falsch) konfiguriert sein, und kein Knoten kennt das komplette Netzwerk. Daher ist der kürzeste Weg nicht garantiert. ] ] ] #aufgabe(titel: "Mensa-App")[ An einer Universität können die Studierenden ihr Essen in der Mensa bis zu drei Tage im Voraus bestellen. Dazu müssen sie ihren Essenswunsch in der Mensa angeben. Dieser wird auf einem Zettel notiert und in die Küche gegeben. Dabei ist es in der Vergangenheit immer wieder dazu gekommen, dass Essen vergessen oder nicht abgeholt wurden. Um diesen Problemen zu begegnen hat die Univerwaltung eine App in Auftrag gegeben, mit der das Essen im Voraus per bestellt werden kann. Dazu wurde schon eine Client-App entwickelt, die mit dem Mensa-Server kommunizieren soll. An jedem Tag gibt es jeweils zwei Hauptgerichte und zwei Nachspeisen, aus denen die Studierenden jeweils eins beziehungsweise eine wählen können. Für Freitag, den 15.09.2023, mit den Hauptgerichten Vegetarisches Risotto und Weißwurst mit Brezel sowie den Nachtischen gemischtes Eis und Schokoladenpudding haben zwei Mitarbeitende folgende Bestellungen notiert: #grid( columns: (1fr, 1fr), figure( { set align(left) table( columns: (4cm, 3cm), stroke: none, fill: luma(220), [Fr., 15|09|23], [], [Veg.Risotto], [IIII], [Wurst], [III], [], [], [gem. Eis], [III], [Pudding], [III], [Obst], [], ) }, caption: [Bestellung Mitarbeitende I], ), figure( { set align(left) table( columns: (4cm, 3cm), stroke: none, fill: luma(220), [], [für morgen], [Risotto], [4], [Weißwurst], [3], [], [], [Eis], [3], [Pudding], [3], [Obst], [keins], ) }, caption: [Bestellung Mitarbeitende II], ), ) Innerhalb der App sollen Studierende nach der Anmeldung mit ihrer Martrikelnummer und dem persönlichen Passwort das Essen der kommenden drei Tage einsehen und Bestellungen für jeden oder einzelne der drei Tage abgeben können (jeweils ein Hauptgericht und eine Nachspeise). Sie sollen außerdem ihre bisher getätigten Bestellungen einsehen können. Eine Stornierung ist in der ersten Version nicht vorgesehen. Die Mensaküche soll in der Lage sein, alle Bestellungen für einen beliebigen Tag abzurufen. #teilaufgabe[ _#operator[Beschreibe] ausgehend von der bisherigen Notation der Bestellungen die Unterschiede zwischen menschlicher und informatisch-technischer Kommunikation._ _#operator[Erläutere] darüber hinaus den Sinn von Protokollen in der technisch-informatischen Kommunikation._ #erwartung([Beschreibt die bisherige Notation.], 3) #erwartung([Erläutert den Sinn von Protokollen.], 3) #loesung[ Menschen erfassen Muster viel schnelle rals Maschinen und können aus verschiedenen Darstellungen von Informationen leicht auf dieselbe Information schließen. Beispielsweise wird in der bisherigen Notation eine Zahl auf verschiedene Arten codiert (Ziffer und Strichliste). Eine Maschine benötigt genaue Anweisungen, wie Daten zu interpretieren sind. Sie kann zwar auch Muster erkennen, aber nur bis zu einem gewissen grad und unter großem Aufwand. Technische Kommunikation ist daher auf genau Festlegungen der Formate angewiesen. Protokolle übernehmen diese Aufgabe, der Kommunikation einen klaren Rahmen mit genauen Befehlen, Verhalten und Formaten zu geben. Beide Seiten können sich auf dieses Protokoll berufen und die Daten entsprechend der Festlegungen leicht verarbeiten. ] ] #teilaufgabe[ _#operator[Entwickele] basierend auf den bisherigen Bestellungen konkrete _Formate_ für die folgenden Fälle, die in ein Protokoll für die Bestell-App einfließen sollten._ 1. Abruf der verfügbaren Mahlzeiten innerhalb der nächsten drei Tage. 2. Abgeben einer Bestellung für einen Tag (Hauptgericht plus Nachspeise). #hinweis[Eine Angabe von Befehlen ist nicht nötig, sondern nur das Format, in dem die Daten übermittelt werden sollen.] #erwartung([Entwickelt Formate für die genannten Fälle.], 4) #loesung[ Notwendige Bestandteile der SchülerInnen-Lösung: - Angabe des Tages - Liste der Mahlzeiten (dem Tag zugeordnet) ] ] #teilaufgabe[ _#operator[Entwirf] ein möglichst vollständiges Protokoll für die Vorbestellung von Mahlzeiten._ (Der Abruf durch die Mensa muss nicht berücksichtigt werden.) #erwartung([Entwickelt ein vollständiges, sinnvolles Protokoll.], 8) #loesung[ #table( columns: 3, [*Befehl*], [*Bedeutung*], [*Erwartete Antwort*], [`LOGIN <username> <password>`], [Anmeldung am Server mittels Nutzernamen und Passwort.], [`++ Willkommen bei der Mensa-App`], [`MEALS <datum>`], [], [], [`ORDER <datum> <mahlzeit> <dessert>`], [], [], ) ] ] Für den Abruf der Bestellungen vom Server gibt es in der Mensa-Küche einen Computer, der tagesaktuell die Bestellungen anzeigt. Für den Abruf wurde das folgende Protokoll definiert: #figure( { set text(.88em) set align(left) table( columns: 3, [*Befehl*], [*Bedeutung*], [*Erwartete Antwort*], [`LOGIN <username> <password>`], [Anmeldung am Server mittels Nutzernamen und Passwort.], [`++ Willkommen bei der Mensa-App` _oder im Fehlerfall_ `-- Keine Anmeldung möglich`], [`DATE <YYYY-MM-DD>`], [Legt das Datum des Tages fest, von dem Daten abgerufen werden sollen.], [`++ Datum auf <DD.MM.YYYY> gesetzt` _oder im Fehlerfall eines von_ - `-- Datumsformat nicht erkannt`\ - `-- Datum schon vergangen`\ - `-- Datum zu weit in der Zukunft`], [`FETCH (MAIN|DESSERT)`], [Abruf der Bestelldaten für Hauptgerichte (`MAIN`) bzw. Nachspeisen (`DESSERT`) des vorher gewählten Datums.], [ Der Server sendet Zeilenweise eine Liste der Bestellungen im Format `<Name des Gerichts> :: <Anzahl Bestellungen>`. Beendet wird die Liste von einem einzelnen Punkt (`.`). `++ Hauptspeisen 15.09.2023`\ `RISOTTO_VEG :: 4`\ `WEISSWURST :: 3`\ `.` _ oder im Fehlerfall_ `-- Kein Datum gewählt` ], ) }, caption: [Protokoll zum Abruf der Bestelldaten.], ) #teilaufgabe[ _#operator[Skizziere] ein Sequenzdiagramm zum Abruf der Bestelldaten nach folgendem Ablauf:_ + Der Küchen-Computer baut eine Verbindung auf und meldet sich mit den Zugangsdaten `mensa-01` / `123456` am Server an. + Der Computer setzt das Datum auf den 27.11.2023. + Der Computer ruft die bestellten Hauptspeisen vom Server ab. + Der Computer ruft die bestellten Nachspeisen vom Server ab. Die Bestellungen für den 27.11.2023 sind: #align( center, grid( columns: 2, gutter: 1cm, [Hauptgerichte #table( columns: (auto, 1.8cm), [*Name*], [*Anzahl*], [`PIZZA_MARG`], [124], [`PIZZA_SALA`], [56], [`PIZZA_THUN`], [77], [`PASTA_PESTO`], [98], )], [ Nachspeisen #table( columns: (auto, 1.8cm), [*Name*], [*Anzahl*], [`EIS`], [86], [`PUDDING`], [12], [`OBSTSALAT`], [64], )], ), ) #erwartung([Skizziert ein Sequenzdiagramm zum angegebenen Ablauf.], 8) #loesung[ Schülerlösung ] ] #teilaufgabe[ Der Verein "Studierende für Datensicherheit" gibt bei der Universitätsverwaltung zu bedenken, dass die Verwendung einer App für die Bestellungen dazu führen könnte, dass die persönlichen Daten der Studierenden über das Internet ausgespäht werden. Der Aufbau des Internets wäre zu unsicher, da in einem vermaschten Netz nicht kontrolliert werden kann, wer alles mithört. Außerdem sehen sie es als wichtig an, dass das System ausfallsicher ist, damit das leibliche Wohl der Studierenden nicht gefährdet wird, wenn sie kein Essen bestellen können. Der Verein schlägt vor, die Bestellungen ausschließlich von den Computern innerhalb der der Universität durchführen zu lassen. Die Rechner der Uni sind alle in einer großen Ring-Topologie vernetzt. _#operator[Beurteile] den Vorschlag des Studierenden-Vereins hinsichtlich der geforderten Kriterien._ #erwartung([Beurteilt den Vorschlag hinreichen genau.], 4) #loesung[ In einer Ring-Topologie ist die Sicherheit gering, da alle Clients im Netzwerk die Nachricht mitlesen können (zumindest die zwischen Sender und Empfänger). Sofern nicht auf weitere Verschlüsselungen zurückgegriffen wird, wäre diese Idee unzureichend. ] ] ]
https://github.com/soul667/typst
https://raw.githubusercontent.com/soul667/typst/main/PPT/typst-slides-fudan/themes/polylux/book/src/dynamic/reserve.md
markdown
# Reserve space or not? When you want to specify that a certain piece of content should be displayed one some subslides but not on others, the first question should be what should happen on the subslides it is _not_ displayed on. You could either want - that it is completely not existing there, or - that it is invisible but it still occupies the space it would need otherwise (see [the docs of the `#hide` function](https://typst.app/docs/reference/layout/hide/)) The two different behaviours can be achieved using either `#only` or `#uncover`, respectively. The intuition behind it is that, in one case, content is _only_ existing on some slides, and, in the other case, it is merely _covered_ when not displayed.
https://github.com/Mc-Zen/quill
https://raw.githubusercontent.com/Mc-Zen/quill/main/tests/examples/qft/test.typ
typst
MIT License
#set page(width: auto, height: auto, margin: 0pt) #include "/examples/qft.typ"
https://github.com/AxiomOfChoices/Typst
https://raw.githubusercontent.com/AxiomOfChoices/Typst/master/Courses/Math%20595%20-%20Geometric%20Analysis/Symmetrization%20talk/Talk%20notes.typ
typst
#import "../../../Templates/generic.typ": generic #import "../../../Templates/notes.typ": chapter_heading #import "@preview/ctheorems:1.1.0": * #import "../../../Templates/math.typ": * #show: doc => generic(title: "Talk notes", name: "<NAME>", doc) #show: doc => chapter_heading(doc) #show: thmrules #let ve = $epsilon$ #let seq = $subset.eq$ #let Mink = math.op("Mink") #let fu = $frak(U)$ #let pb() = {pagebreak(weak: true)} = Introduction Historically one of the first methods used to attack the Isoperimetric inequality is the method of symmetrization which, as the name suggests, exploits the symmetries of the ambient space. I will be presenting the modern version of this argument in the most symmetric spaces that exist in Riemannian geometry, space forms. #definition[ A _Space form_ is a Riemannian $n$-manifold which is simply connected, complete and has constant sectional curvature $kappa$. ] #proposition[ There are exactly three space forms up to rescaling and isometries. These are $S^n, RR^n, HH^n$ with sectional curvatures $kappa = 1,0,-1$ respectively. ] Space forms enjoy a number of strong symmetry properties but we will only use $2$ for what follows. #proposition("Frame homogeneity")[ Let $M$ be a space form, for any two points $p,q in M $ and any orthonormal basis $e_i$ for $p$ and $e_i'$ for $q$ we have an isometry $f$ such that $f(p) = q$ and $f_*(e_i) = e_j$. ] <prop:homog> #proposition("Decomposition")[ Let $M = S^n, RR^n, HH^N$ be a space form, $p in M$ and unit tangent vector $xi in T_p M$, we have a decomposition of $M$ into leaves $M_t$ enjoying the following properties. + Each leaf $M_t$ is isometric to a rescaling of $M^(n-1)$ where $M^(n-1)$ is the lower dimensional version of $M$. Each leaf has sectional curvature $1 + tan^2(t), 0, -1 + tanh^2(t)$ respectively. + The slice $M_t$ goes through $gamma(t)$ and is orthogonal to $gamma'(t)$ where $gamma$ is the geodesic with $gamma(0) = p$ and $gamma'(0) = xi$. + The geodesics orthogonal to $M_t$ allow us to identify points on different leaves. The distance between a point $q in M_t$ and its identification $q' in M_(t')$ is $|t - t'|$. + We have a family of global maps $lambda_s$ sending each point $q in M_t$ to its identified point $q' in M_(t+s)$. + The Riemannian measure $dif V$ decomposes as $dif V = f(t) dif t dif A$ where $dif A$ is the Riemannian measure on $M^(n-1)$ and $f$ is some function. ] <prop:decomposition> #pb() = Balls are the optimal shape For the rest of the talk we will fix $M = S^n, RR^n, HH^n$. The isoperimetric inequality states that for any compact subset $X seq M$ with smooth boundary $diff X$ we have $ A(diff X) >= A(diff B) $ where $B$ is the ball in $M$ with $V(B) = V(X)$. We will first use a small generalization of area which will allow us to reason about area through volume. We will denote by $[X]_ve$ the $ve$ ball around a subset $X in M$ (recalling that the Riemannian metric gives us a standard metric), and by $frak(X)$ the set of compact subsets of $M$ #definition[ Let $X$ be a compact subset of $M$ (not necessarily smooth). We define its _Minkowski area_ to be $ Mink(X) = liminf_(h arrow.b 0) (V([X]_ve) - V(X))/ve $ ] This new concept of area is indeed a generalization of our old concept of area since #proposition[ If $diff X$ is compact then $Mink(X) = A(X)$. ] We need one last definition before we can start the proof. #definition[ Let $X$ be a compact subset, we define the _undercut set_ $fu(X)$ to be $ fu(X) = { Y in frak(X) | V(Y) = V(X), V([Y]_ve) <= V([X]_ve), forall ve > 0 } $ ] Because of the above proposition to show the Isoperimetric inequality it is sufficient to show that #theorem[ Let $X$ be a compact subset of $M$ there exists a ball $B seq M$ with $B in fu(X)$. ] We will prove this by induction on $n$, so for the rest of the chapter we will assume that this holds for $M^(n-1)$. #pb() We will now outline the method of proof for this theorem + Define a total ordering $<=_r$ on $fu(X)$ and prove that a $<=_r$ minimal element exists in $fu(X)$. + Prove that for any non-ball element $Y in fu(X)$ there exists an element $S(Y) in fu(X)$ with $S(Y) <_r Y$ and thus $Y$ cannot be $<=_r$ minimal. == Circumradius #definition[ The circumradius $r(X)$ of a bounded subset $X$ is defined to be $ r(X) := inf { r | exists x_0 in M "with" X seq overline(B(x_0,r))} $ ] #proposition[ $r(X)$ is always achieved by a ball $B(x_0,r(X))$ called the minimal ball. ] #proof[ Take a sequence of balls $overline(B(x_n, r_n))$ containing $X$ with $r_n arrow.b r(X)$, then all $x_n$ are contained within the compact subset $overline(B(X,r_1))$ and thus they have a converging subsequence $x_(n_k) -> x_infinity$, then it is easy to see that $B(x_infinity, r(X))$ contains $X$ ] We will definte our ordering on $fu(X)$ to be $X <=_r Y <=> r(X) <= r(Y)$ #definition[ The Hausdorff metric on $frak(X)$ is given by $ d(X,Y) = min {r | X seq [Y]_r and Y seq [X]_r } $ ] #proposition[ $V: frak(X) -> RR$ is upper-semicontinuous on $frak(X)$ ] <prop:upper> #proof[ Consider the open sets $U_k = B(Y, 1 / k)$, since $Y$ is compact we have $Y = sect.big_k U_k$ and so by continuity of measure $V(U_k) arrow.b V(Y)$. Let $Y_n$ be a sequence of subsets with $Y_n -> Y$ then for any $k$ we will eventually have $Y_n seq U_k$ and so $V(Y_n) <= V(U_k) arrow.b V(Y)$ and so $lim sup V(Y_n) <= V(Y)$ ] #lemma[ For any compact set $X$, $fu(X)$ contains an $<=_r$ minimal element. ] #proof[ By @prop:homog[Prop.] we can translate the elements of $fu(X)$ so that their minimal ball is concentring with the minimal ball of $X$. We can also restrict ourselves to the elements $Y in fu(X)$ with $r(Y) <= r(X)$. Set $r = inf{r(Y), Y in fu(X)}$. Take a sequence of such elements $Y_n$ with $r(Y_n) arrow.b r$, since $r(Y) <= r(X)$ we have that $Y_n seq overline(B(x_0,r(X)))$ for all $n$ and so there exists a subsequence $Y_(n_k)$ that converges in the Hausdorff metric to an compact subset $Y seq overline(B(x_0,r(X)))$. Now since this convergence is in the Hausdorff metric we know that for any $ve >= 0$ we have $[Y]_ve seq [Y_n]_(ve + eta_n)$ for some $eta_n arrow.b 0$ and so we have $ V([Y]_ve) <= V([Y_n]_(ve + eta_n)) <= V([X]_(ve + eta_n)) $ and so by taking $n -> infinity$ we get $V([Y]_ve) <= V([X]_ve)$. By taking $ve = 0$ above we get that $V(Y) <= V(X)$ and by @prop:upper[Prop.] we get $V(Y) >= limsup_n V(Y_n) = V(X)$. Thus $Y in fu(X)$ and has $r(Y) = r$ ] == Symmetrization Now that we know a minimal element exists we want to take a non-disk element of $fu(X)$ and 'symmetrize' it to decrease its circumradius. #definition[ Take a compact subset $X seq M$ along with a point $p$, and a unit tangent vector $xi in T_p M$. By @prop:decomposition[Prop.] we get a decomposition of $M$ into leaves $M_t$. We denote by $[X]^t$ the intersection $X sect M_t$. The symmetrization $S_xi (X)$ is defined as $ union.big_(t in RR) B_(M_t)(gamma(t),r_t) $ where $r_t$ is chosen so that $A(B_(M_t)(gamma(t), r_t)) = A([X]^t)$. In other words we are taking each slice $[X]^t$ and replacing it with a ball of equal centered at $gamma(t)$. ] #lemma("Symmetrizaiton undercuts")[ For any compact $X$, any point $p in M$ and any unit tangent vector $xi in T_p M$ we have $S_xi (X) in fu(X)$. ] #proof[ It is immediate from the decomposition of the Riemannian measure that $ V(X) = integral_(-oo)^(oo) A([X]^t) dif t $ and so $V(X) = V(S_xi (X))$. We will write $W = S_xi (X)$ for brevity. We want to show now that $V([W]_ve) <= V([X]_ve)$ for all $ve > 0$. We will prove this by showing it is true for each slice. First consider the slice $Z = [B(X, ve)]^t$ of the inflation of $X$, what preinflated slices of $X$ contribute to $Z$? We can easily see that only the slices $X^s$ with $s in [t - ve, t + ve]$ contribute to $Z$ since any other slices are too far away. Now fix $s in [t - ve, t + ve]$, how does the slice $[X]^s$ contribute to $Z$? We by the rotational symmetry (@prop:homog) of the space we get that for any point $x in M_s$ the ball $B(x, ve)$ will intersect with $M_t$ in a circle $B_(M_t)(lambda_(t - s) x, h(t,s,ve))$ where $h$ does not depend on $x$. We can rewrite this as $lambda_t B_(M_0)(lambda_(-s) x, h(t,s,ve))$. We thus get that $ [B([X]^s, ve)]^t = lambda_t B_(M_0)(lambda_(-s) [X]^s, h(t,s,ve)) $ and thus so by unioning the contributions from all slices we get $ [B(X, ve)]^t = union.big_(s in [t - ve, t + ve]) lambda_t B_(M_0)(lambda_(-s) [X]^s, h(t,s,ve)) $ Using this form we can get $ A([B(X,ve)]^t) &= A(union.big_(s in [t - ve, t + ve]) lambda_t B_(M_0)(lambda_(-s) [X]^s, h(t,s,ve))) \ &>= sup_(s in [t - ve, t + ve]) A(lambda_t B_(M_0)(lambda_(-s) [X]^s, h(t,s,ve))) $ Now for $W$ the exact same logic holds, but in the last step we will get a union of concentric circles and so its area will be equal to that of the largest circle. We thus have $ A([B(W,ve)]^t) = sup_(s in [t - ve, t + ve]) A(lambda_t B_(M_0)(lambda_(-s) [W]^s, h(t,s,ve))) $ Now by the inductive hypothesis we have that $ A(lambda_t B_(M_0)(lambda_(-s) [W]^s, h(t,s,ve))) <= A(lambda_t B_(M_0)(lambda_(-s) [X]^s, h(t,s,ve))) $ for all $s,t$ and so we get $ A([B(W,ve)]^t) <= A([B(X,ve)]^t) $ for all $t$ which gives us $V([W]_ve) <= V([X]_ve)$. This proves that $W in fu(X)$. ] #lemma("Symmetrization decreases circumradius")[ For any compact $X$ which is not a ball, there exists a point $p$ and tangent vectors $xi_1,...,xi_n in T_p M$ such that $ r(S_(xi_n) ( S_(xi_(n-1))(...(S_(xi_1)(X))...))) < r(X) $ ] #proof[ Let $r = r(X)$ and let $B(x_0,r)$ be the minimal ball of $X$ then since $X$ is not a ball the set $diff B(x_0,r) without X$ is nonempty and open, now note that if $x in diff B(x_0,r) without X$ then we also have $x in diff B(x_0,r) without S_xi (X)$ for any $xi$. Our job then will be to make this set larger and larger until it is the entire boundary, then if $X$ does not intersect the boundary, then since it is compact then we can shrink the ball by some positive amount, provin the resulting symmetrization has smaller circumradius. To achieve this consider the largest circle contained in $diff B(x_0,r) without S_xi (X)$, if this circle contains a hemisphere of $diff B(x_0,r)$ then it contains two antipodal points and so symmetrization along the axis going through the two points will be sufficient to make the intersection empty. Otherwise take an axis aligned with the boundary of this circle after symmetrization this circle's radius doubles. Continuing this gives us the desired result. ]
https://github.com/sitandr/typst-examples-book
https://raw.githubusercontent.com/sitandr/typst-examples-book/main/src/packages/drawing.md
markdown
MIT License
# Drawing ## `cetz` Cetz is an analogue of LaTeX's `tikz`. Maybe it is not as powerful yet, but certainly easier to learn and use. It is the best choice in most of cases you want to draw something in Typst. ```typ #import "@preview/cetz:0.1.2" #cetz.canvas(length: 1cm, { import cetz.draw: * import cetz.angle: angle let (a, b, c) = ((0,0), (-1,1), (1.5,0)) line(a, b) line(a, c) set-style(angle: (radius: 1, label-radius: .5), stroke: blue) angle(a, c, b, label: $alpha$, mark: (end: ">"), stroke: blue) set-style(stroke: red) angle(a, b, c, label: n => $#{n/1deg} degree$, mark: (end: ">"), stroke: red, inner: false) }) ``` ```typ #import "@preview/cetz:0.1.2": canvas, draw #canvas(length: 1cm, { import draw: * intersections(name: "demo", { circle((0, 0)) bezier((0,0), (3,0), (1,-1), (2,1)) line((0,-1), (0,1)) rect((1.5,-1),(2.5,1)) }) for-each-anchor("demo", (name) => { circle("demo." + name, radius: .1, fill: black) }) }) ``` ```typ #import "@preview/cetz:0.1.2": canvas, draw #canvas(length: 1cm, { import draw: * let (a, b, c) = ((0, 0), (1, 1), (2, -1)) line(a, b, c, stroke: gray) bezier-through(a, b, c, name: "b") // Show calculated control points line(a, "b.ctrl-1", "b.ctrl-2", c, stroke: gray) }) ``` ```typ #import "@preview/cetz:0.1.2": canvas, draw #canvas(length: 1cm, { import draw: * group(name: "g", { rotate(45deg) rect((0,0), (1,1), name: "r") copy-anchors("r") }) circle("g.top", radius: .1, fill: black) }) ``` ```typ // author: LDemetrios #import "@preview/cetz:0.2.2" #cetz.canvas({ let left = (a:2, b:1, d:-1, e:-2) let right = (p:2.7, q: 1.8, r: 0.9, s: -.3, t: -1.5, u: -2.4) let edges = "as,bq,dq,et".split(",") let ell-width = 1.5 let ell-height = 3 let dist = 5 let dot-radius = 0.1 let dot-clr = blue import cetz.draw: * circle((-dist/2, 0), radius:(ell-width , ell-height)) circle((+dist/2, 0), radius:(ell-width , ell-height)) for (name, y) in left { circle((-dist/2, y), radius:dot-radius, fill:dot-clr, name:name) content(name, anchor:"east", pad(right:.7em, text(fill:dot-clr, name))) } for (name, y) in right { circle((dist/2, y), radius:dot-radius, fill:dot-clr, name:name) content(name, anchor:"west", pad(left:.7em, text(fill:dot-clr, name))) } for edge in edges { let from = edge.at(0) let to = edge.at(1) line(from, to) mark(from, to, symbol: ">", fill: black) } content((0, - ell-height), text(fill:blue)[APPLICATION], anchor:"south") }) ```
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/rivet/0.1.0/src/xml-loader.typ
typst
Apache License 2.0
#let find(elmt, tag) = { if not "children" in elmt { return none } return elmt.children.find(e => "tag" in e and e.tag == tag) } #let find-all(elmt, tag) = { if not "children" in elmt { return () } return elmt.children.filter(e => "tag" in e and e.tag == tag) } #let parse-values(elmt) = { let values = (:) let case-elmts = find-all(elmt, "case") for case-elmt in case-elmts { let val = case-elmt.attrs.value let desc = case-elmt.children.first() let struct = none if "structure" in case-elmt.attrs { struct = case-elmt.attrs.structure } values.insert(val, if struct != none { ( description: desc, structure: struct ) } else { desc } ) } return values } #let parse-range(elmt) = { let range_ = ( name: elmt.attrs.name ) let desc = none if "children" in elmt { desc = find(elmt, "description") } if desc != none { range_.insert("description", desc.children.first()) } let values-elmt = find(elmt, "values") if values-elmt != none { range_.insert("values", parse-values(values-elmt)) } if "depends-on" in elmt.attrs { range_.insert("depends-on", elmt.attrs.depends-on) } return range_ } #let parse-structure(elmt) = { let ranges = (:) let range-elmts = elmt.children.filter(e => "tag" in e and e.tag == "range") for range-elmt in range-elmts { let span = range-elmt.attrs.end + "-" + range-elmt.attrs.start ranges.insert(span, parse-range(range-elmt)) } return ( bits: elmt.attrs.bits, ranges: ranges ) } #let parse(content) = { let struct-elmts = content.children.filter(e => "tag" in e and e.tag == "structure") let color-elmts = content.children.filter(e => "tag" in e and e.tag == "color") let structures = (:) let colors = (:) for struct-elmt in struct-elmts { structures.insert( struct-elmt.attrs.id, parse-structure(struct-elmt) ) } for color-elmt in color-elmts { let struct = color-elmt.attrs.structure if not struct in colors { colors.insert(struct, (:)) } let span = color-elmt.attrs.end + "-" + color-elmt.attrs.start colors.at(struct).insert(span, color-elmt.attrs.color) } return ( structures: structures, colors: colors ) } #let load(path) = { let content = xml(path).first() return parse(content) }
https://github.com/Many5900/aau-typst
https://raw.githubusercontent.com/Many5900/aau-typst/main/chapters/chapter1.typ
typst
#import "../custom.typ": * = Chapter 1 #lorem(300) #todo[Remember to remove the "Lorem ipsum" text in chapter 1!]
https://github.com/pku-typst/unilab
https://raw.githubusercontent.com/pku-typst/unilab/main/template/main.typ
typst
MIT License
#import "@preview/unilab:0.0.2": * #set text(lang: "zh") #show: doc => labreport( course-name: lorem(3), exper-name: lorem(5), exper-date: "2024-03-26", handin-date: "2024-04-02", exper-no: 6, student-name: "张三", student-no: 2020200111, faculty: "物理学院", logos: (image("./school-logo.svg"), image("./school-text.svg")), doc, ) #principles - #lorem(12) - #lorem(18) - #lorem(9) #apparatus - #lorem(6) - #lorem(3) - #lorem(5) #principles #lorem(50) #procedure #lorem(56) #data #lorem(62) #analysis #lorem(48) = 其他标题 #lorem(10)
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/set-07.typ
typst
Other
// Error: 12-26 set is only allowed directly in code and content blocks #{ let x = set text(blue) }
https://github.com/jamesrswift/ionio-illustrate
https://raw.githubusercontent.com/jamesrswift/ionio-illustrate/main/gallery/callout-aside.typ
typst
MIT License
#set par(justify: true) #set page(width: auto, height: auto, margin:1em) #set text(size: 7pt) #import "../src/lib.typ": * #let data = csv("../assets/isobutelene_epoxide.csv") #let massspec = data.slice(1) #let ms = mass-spectrum(massspec, args: (range: (0,100), plot-extras: (this) => { (this.callout-above)(43) (this.callout-aside)(41, (44, 90), anchor: "left", height: 95%) (this.callout-aside)(42, (45, 65), anchor: "left", height: 95%) (this.title)([Isobuletene Epoxide, +70eV]) },)) #(ms.display)()
https://github.com/Kasci/LiturgicalBooks
https://raw.githubusercontent.com/Kasci/LiturgicalBooks/master/SK/zalmy/Z050.typ
typst
Zmiluj sa, Bože, nado mnou pre svoje milosrdenstvo \* a pre svoje veľké zľutovanie znič moju neprávosť. Úplne zmy zo mňa moju vinu \* a očisť ma od hriechu. Vedomý som si svojej neprávosti \* a svoj hriech mám stále pred sebou. Proti tebe, proti tebe samému som sa prehrešil \* a urobil som, čo je v tvojich očiach zlé, aby si sa ukázal spravodlivý vo svojom výroku \* a nestranný vo svojom súde. Naozaj som sa v neprávosti narodil \* a hriešneho ma počala moja mať. Ty naozaj máš záľubu v srdci úprimnom \* a v samote mi múdrosť zjavuješ. Pokrop ma yzopom a zasa budem čistý; \* umy ma a budem belší ako sneh. Daj, aby som počul radosť a veselosť, \* a zaplesajú kosti, ktoré si rozdrvil. Odvráť svoju tvár od mojich hriechov \* a zotri všetky moje viny. Bože, stvor vo mne srdce čisté \* a v mojom vnútri obnov ducha pevného. Neodvrhuj ma spred svojej tváre \* a neodnímaj mi svojho Ducha Svätého. Navráť mi radosť z tvojej spásy \* a posilni ma duchom veľkej ochoty. Poučím blúdiacich o tvojich cestách \* a hriešnici sa k tebe obrátia. Bože, Boh mojej spásy, zbav ma škvrny krvipreliatia \* a môj jazyk zajasá nad tvojou spravodlivosťou. Pane, otvor moje pery \* a moje ústa budú ohlasovať tvoju slávu. Veď ty nemáš záľubu v obete \* ani žertvu neprijmeš odo mňa. Obetou Bohu milou je duch skrúšený; \* Bože, ty nepohŕdaš srdcom skrúšeným a poníženým. Buď dobrotivý, Pane, a milosrdný voči Sionu, \* vybuduj múry Jeruzalema. Potom prijmeš náležité obety, obetné dary a žertvy; \* potom položia na tvoj oltár obetné zvieratá.
https://github.com/Avemark/typst_actix_server
https://raw.githubusercontent.com/Avemark/typst_actix_server/master/example.typ
typst
= Introduction This is a basic Typst template, converted to PDF
https://github.com/Toniolo-Marco/git-for-dummies
https://raw.githubusercontent.com/Toniolo-Marco/git-for-dummies/main/slides/practice/alias.typ
typst
#import "@preview/touying:0.5.2": * #import themes.university: * #import "@preview/numbly:0.1.0": numbly #import "@preview/fletcher:0.5.1" as fletcher: node, edge #let fletcher-diagram = touying-reducer.with(reduce: fletcher.diagram, cover: fletcher.hide) #import "../components/gh-button.typ": gh_button #import "../components/git-graph.typ": branch_indicator, commit_node, connect_nodes, branch Aliases@git-alias are often *used to shorten longer commands*, or *combine* several *commands into one*. For example, to create an alias to add all files in stage and then also commit untracked files: ```bash ➜ git config --global alias.commit-all '!git add -A && git commit' ➜ git commit-all -m "Message describing changes made" ```
https://github.com/werifu/HUST-typst-template
https://raw.githubusercontent.com/werifu/HUST-typst-template/main/README.md
markdown
MIT License
# HUST-typst-template 用于华科毕业设计(本科)的 typst 模板,一键、快速、持续生成毕业论文 pdf。 ![](assets/sample.png) ## ⚠️风险警告⚠️ - 民间模板,存在不被认可风险 - 作者用此模板已成功从网安学院毕业 - 有部分难以短期解决的问题: - 无伪粗体:暂时无法解决,但是标题所需的黑体粗体在打印出来的情况下几乎看不出区别 ## 什么是 typst [typst](https://github.com/typst/typst) 是最新最热的标记文本语言,定位与 LaTeX 类似,具有极强的排版能力,通过一定的语法写文档,然后生成 pdf 文件。与 LaTeX 相比有以下的优势: 1. 编译巨快:因为提供增量编译的功能所以在修改后基本能在一秒内编译出 pdf 文件,typst 提供了监听修改自动编译的功能,可以像 Markdown 一样边写边看效果。 2. 环境搭建简单:原生支持中日韩等非拉丁语言,不用再大量折腾字符兼容问题以及下载好几个 G 的环境。只需要下载命令行程序就能开始编译生成 pdf。 3. 语法友好:对于普通的排版需求,上手难度跟 Markdown 相当,同时文本源码阅读性高:不会再充斥一堆反斜杠跟花括号 个人观点:跟 Markdown 一样好用,跟 LaTeX 一样强大 可以从[这里速通 typst](https://typst.app/docs/tutorial) 跟 word 比的优势:格式好调,玄学问题少。 ## 使用 快速浏览效果:[查看sample.pdf](./sample.pdf),样例论文源码:[查看sample.typ](./sample.typ) ### 本地编辑(推荐) 1. 下载对应平台的 typst:https://github.com/typst/typst/releases 记得先看看它的 README 2. clone 本仓库 3. 按本仓库中的 sample.typ 照葫芦画瓢即可,sample.typ 既是样例也是说明书 4. 在本项目目录里,命令行执行 `typst watch xxx.typ` 的命令即可编译同名 pdf 文件,而且一旦更新就会增量编译,推荐在 VSCode 中编辑,下载 `Typst LSP` 插件获得语法提示,使用 [`Typst Preview`](https://github.com/enter-tainer/typst-preview-vscode) 插件可以实现文本和预览之间的快速跳转与实时预览。 ### 线上编辑(不推荐) typst 也提供了线上编辑器(类似overleaf),查看本模板: https://typst.app/project/rqTPs502DAhLTQctaUmbtn (ps:浏览器可能没有微软宋体、微软黑体等学校要求的字体,不建议在该平台上生成) ## 其他特性 * 支持匿名处理,anonymous 参数设置为 true 即为匿名,会把校名以及个人信息等替换成小黑条,论文提交阶段使用,不需要再对 pdf 作特殊编辑(致谢中的敏感信息仍需自己处理) ![](assets/anony-sample.png) ## 说明 该模板仍需完善,有一定肉眼排版成分,所以有可能不完全符合华科排版要求,如果遇到不对的间距、字体等请提交 issue 说明,也欢迎 pull request 贡献。 不同学院的模版不太一致,因此在原网安学院的模版上稍加修改得到了计算机学院的模版,如果使用**计算机学院**的模版,请在 `sample.typ` 中选择导入`cs-template.typ` **Help Wanted** 笔者已毕业跑路,无法勤快维护,希望能有对本项目有兴趣的人参与,可做的事情还有很多:) ### TODO [ ] 模板翻新(基本在 Typst 0.6.0 前成型,有部分人工造轮子成分) [ ] 支持 HUST 硕士、博士论文甚至作业报告模板 [ ] template.typ 模块化 ## 参考及致谢 * https://github.com/zfengg/HUSTtex HUSTTex 项目 * https://www.overleaf.com/read/fdbtqrqrqgfg 计院模板 * https://github.com/nju-lug/nju-thesis-typst NJU 模板,文档相对完善,对中文 Typst 模板有疑问可以查询 * https://github.com/redleafnew/Chinese-STD-GB-T-7714-related-csl/blob/main/462huazhong-university-of-science-and-technology-school-of-cyber-science-and-engineering.csl 华科网安版本 CSL,稍加改写以通过 Rust 的 quick-xml 库编译
https://github.com/WinstonMDP/knowledge
https://raw.githubusercontent.com/WinstonMDP/knowledge/master/prop_logic.typ
typst
#import "cfg.typ": cfg #show: cfg = Логика высказываний Пропозициональные формулы: - Всякая пропозициональная переменная (у Клини она называется атомом, элементарной формулой) есть формула. - Если $A$ - пропозициональная формула, то $not A$ - пропозициональная формула. - Если $A$ и $B$ - пропозициональные формулы, то $(A and B), (A or B), (A => B)$ - пропозициональные формулы. Пропозициональная формула задаёт булеву функцию от $n$ переменных. Тавтология $:=$ пропозициональные формула, истиннае независимо от переменных. $models A := A$ - тавтология. $A_1, ..., A_n models B :=$ в строках, где одновременно $A_1, ..., A_n$ истинны, $B$ тоже истинна. Множество формул совместно $:= exists$ набор значений переменных, при которых все формулы множества истинны. Формула выполнима $:= exists$ набор значений переменных, при которых она истинна. $A models B <-> med models A => B$ Две формулы эквивалентны $:=$ они задают одну и ту же булеву функцию. $A <=> B := (A => B) and (B => A)$ Формулы $A$ и $B$ эквивалентны $<->$ $models A <=> B$. Пропозициональные формулы однозначны для разбора. Теорема о полноте системы связок: булева функция задаётся пропозициональной формулой. Литерал $:=$ переменная или отрицание переменной. Конъюнкт $:=$ произвольная конъюнкция литералов. Дизъюнктивная нормальная форма $:=$ дизъюнкция конъюнктов. Другая форма - это конъюнктивная нормальная форма. Булева функция задаётся дизъюнктивной нормальной формой. То же самое про конъюнктивную нормальную форму. Система связок $and, not$ является полной. $and, or, =>$ неполна. Булева функция однозначно представляется полиномом над $ZZ_2$ с многими переменными - полиномом Жегалкина. <NAME>: Набор булевых функций является полным $<=>$ он не содержится целиком ни в одном из пяти следующих "предполных классов": - монотонные функции - функции, сохраняющие нуль ($f(0, ..., 0) = 0$) - функции, сохраняющие единицу - линейные функции - самодвойственные функции ($f(1 - p_1, ..., 1 - p_n) = 1 - f(p_1, ..., p_n)$) Теория моделей: заменяя атомы истинностными значениями во всевозможных сочетаниях, мы получаем, так сказать, "модели", конкретные "реализации", воплощения того, что могут выражать высказывания. Теория доказательств: аксиомы и правила. Аксиомы (cхемы аксиом) ичисления высказываний (вообще скобки были, согласно индуктивному определению, но я их опустил): + $A => B => A$ + $(A => B => C) => (A => B) => A => C$ + $A and B => A$ + $A and B => B$ + $A => B => A and B$ + $A => A or B$ + $B => A or B$ + $(A => C) => (B => C) => A or B => C$ + $not A => A => B$ + $(A => B) => (A => not B) => not A$ + $A or not A$ Тут переменные - это "шаблонные" переменные, в которые подставляются пропозициональные формулы. В Шень различают эти переменные и пропозициональные переменные. То же в Клини. В ИВ входит ещё modus ponens: $(A, A => B)/B$. Вывод в ИВ $:=$ конечная последовательность формул, каждая из которых есть аксиома или получается из предыдущих по правилу MP. Пропозициональная формула выводима в ИВ или теоремой ИВ $:= exists$ вывод, в котором эта формула встречается. Вывод из $Gamma :=$ конечная последовательность формул, каждая из которых является аксиомой, принаждлежит $Gamma$ или получается из предыдущих по правилу MP. Другими словами, мы как бы добавляем формулы из $Gamma$ к аксиомам ИВ - именно как формулы, а не как схемы аксиом. $Gamma = emptyset =>$ формула выводима в ИВ. $A tack.r Gamma$. Вместо $emptyset tack.r A$ пишут $tack.r A$. $Gamma union {A}$ обозначается $Gamma, A$ Теорема о дедукции: $Gamma tack.r A => B <-> Gamma, A tack.r B$ Правило подстановки разрешает заменить в выведенной формуле все переменные на произвольные формулы (вхождения одной переменной должны заменяться на одну и ту же формулу). После добавления такого правила класс выводимых формул не изменится, но теорема о дедукции перестанет быть верной. 10 аксиома интуиционистская, но при наличии 11 выводится. $Gamma$ противоречиво $:= Gamma tack.r A, not A$ $Gamma$ полное $:= cases(delim: "[", Gamma tack.r A, Gamma tack.r not A)$ Теорема о корректности ИВ: $Gamma tack.r A -> Gamma tack.double.r A$ Вторая форма: совместное множество формул непротиворечиво. Теорема о полноте ИВ: $Gamma tack.double.r A -> Gamma tack.r A$ Вторая форма: непротиворечивое множество формул совместно. Теорема о компактности ИВ: $forall$ конечное подмножество $Gamma$ совместно $-> Gamma$ совместно. Это было ичисление гильбертовского типа.
https://github.com/Myriad-Dreamin/typst.ts
https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/fuzzers/corpora/layout/pagebreak-weak_00.typ
typst
Apache License 2.0
#import "/contrib/templates/std-tests/preset.typ": * #show: test-page // After place // Should result in three pages. First #pagebreak(weak: true) #place(right)[placed A] #pagebreak(weak: true) Third
https://github.com/GeorgeDong32/GD-Typst-Templates
https://raw.githubusercontent.com/GeorgeDong32/GD-Typst-Templates/main/templates/homework.typ
typst
Apache License 2.0
#import "../functions/style.typ": * #import "../functions/booktab.typ": * #import "../functions/dirac.typ": * #let homework( subject: "课程", title: "作业一", name: "张三", stdid: "11223344", time: "2023年10月9日", body ) = { set document(title: title) set page(paper: "a4", numbering: "1", number-align: center, margin: (top: 3cm, bottom: 3cm, left: 2cm, right: 2cm)) set text(font: fonts.text, lang: "zh", size: 11pt) set heading(numbering: "1.1.") show raw.where(block: true): block.with( fill: rgb(248, 248, 248), inset: (x: 1.25em, y: 1em), width: 100%, radius: 4pt, ) show raw.where(block: true): par.with( first-line-indent: 0em, justify: true, leading: 8pt, ) show raw.where(block: false): box.with( fill: rgb(248, 248, 248), inset: (x: 5pt, y: 0pt), outset: (y: 4pt), radius: 3pt ) show raw: text.with( font: fonts.code, size: 1em, ) show heading: it => [ // Cancel indentation for headings of level 2 or above #set par(first-line-indent: 0em, hanging-indent: 2em) #let sized_heading(it, size, weight, mt, mb) = [ #set text(size, weight: weight) #v(mt) #if it.numbering != none { counter(heading).display() h(0.1em) } #text(size, weight: weight, it.body) #v(mb) ] #if it.level == 1 { sized_heading(it, 20pt, "semibold", 0.5em, 0.3em) } else if it.level == 2 { sized_heading(it, 16pt, "semibold", 0.5em, 0.2em) } else if it.level == 3 { sized_heading(it, 14pt, "semibold", 0.5em, 0.1em) } else { sized_heading(it, 12pt, "semibold", 0.5em, 0.1em) } ] show link: underline let fieldname(name) = [ #set align(right + horizon) #set text(font: fonts.text) #name ] let cell = rect.with( width: 100%, radius: 6pt, stroke: none ) let fieldvalue(value) = [ #set align(left + horizon) #set text( font: fonts.text, weight: "medium", size: 11pt ) #cell(value) ] set page(header: align(center)[ #grid( columns: (1.5fr, 1fr, 1fr, 1fr), rows: (12pt,0pt), gutter: 1em, fieldvalue(subject), fieldvalue(title), fieldvalue(name), fieldvalue(stdid) ) ]) align(center)[ #block(text(weight: "semibold", 30pt, subject)) #v(20pt, weak: true) #block(text(weight: "medium", 24pt, title)) #v(20pt, weak: true) #set text(13pt) #text(time) ] set align(left + top) set par(justify: true, first-line-indent: 0pt, leading: line_height) set math.vec(delim: "[") set math.mat(delim: "[") show par: set block(spacing: line_height) body }
https://github.com/Leadlegend/Curriculum-Vitae
https://raw.githubusercontent.com/Leadlegend/Curriculum-Vitae/main/modules/skills.typ
typst
Apache License 2.0
#import "../template.typ": * #cvSection("Skills") /* #cvSkill( type: [Languages], info: [English #hBar() French #hBar() Chinese] ) */ #cvSkill( type: [Languages], info: [Python #hBar() C/C++ #hBar() Java #hBar() HTML/CSS #hBar() Bash #hBar() SQL] ) #cvSkill( type: [Developer Tools], info: [Docker #hBar() Git #hBar() Google Cloud Platform #hBar() $L^A T_E X$] )
https://github.com/polarkac/MTG-Stories
https://raw.githubusercontent.com/polarkac/MTG-Stories/master/stories/050%20-%20Phyrexia%3A%20All%20Will%20Be%20One/003_Episode%201%3A%20Uncontrolled%20Descent.typ
typst
#import "@local/mtgstory:0.2.0": conf #show: doc => conf( "Episode 1: Uncontrolled Descent", set_name: "Phyrexia: All Will Be One", story_date: datetime(day: 13, month: 01, year: 2023), author: "<NAME>", doc ) Kaito couldn't have answered if someone had asked him what he was expecting to find in New Phyrexia. The information they had going in was too scant in some places, and no one still living had ever witnessed a fully compleated plane. They had their intel and their recon and everything they could have to prepare them for the incursion, but he still didn't know what he'd been expecting—only, roughly, what he hadn't been. #figure(image("003_Episode 1: Uncontrolled Descent/01.jpg", width: 100%), caption: [Art by: <NAME>], supplement: none, numbering: none) He certainly hadn't been expecting a feeling like slamming into a wall of electrostatic wind—not enough to do actual damage, but enough to disorient, to distract, and inevitably, to steal consciousness away. And now that it had happened, he certainly hadn't been expecting New Phyrexia to look like one of the nicer tourist beaches of Kamigawa. What he could see of New Phyrexia was all pristine sand without a sign that anything more dangerous than sunbathing had ever happened here. It was pleasant. So pleasant. New Phyrexia wasn't a peril, it was a paradise, and he should just relax and let it wash over him like a wave from the welcoming sea~ The crashing of that sea rang in his ears as he closed his eyes and sank deeper into the sand. Part of him knew Phyrexia would recognize his presence soon and react the way any dangerous beast reacted to an intruder. A small speck of unaddled coherence at the edge of his consciousness screamed at him to wake up, wake up, #emph[snap out of it] ! Phyrexia was a danger. He wouldn't #emph[be] here if Phyrexia wasn't a danger. Kamigawa was under threat, and he had to do whatever was within his power to protect everything he'd ever cared about. His friends, his plane, his sister~ he came here to save them all. But the sand was warm and inviting, and he couldn't find it in himself to move until small, strong hands grabbed him by the shoulders and jerked him into a sitting position. They felt familiar, like hands that he should know. They also felt like an attack, and so he thrashed, trying to pull away. The small, screaming corner of his mind screamed even louder, trying to remind him that fighting back should have been his #emph[first] thought, his #emph[first] reaction to even a whisper of hostile action, but no: only futile thrashing seemed to suit. One of those small, strong hands released his shoulder, and he was able to pull briefly free, ready to sink back into peace and pleasure, before it slammed into his cheek, striking him just below the eye so hard that he heard the crack as much as he felt it. He recoiled, eyes jerking open, and for the first time, he realized that what he'd taken for the sound of waves was actually the sound of metal clashing on metal, spells impacting their targets, and grunts of exertion. Someone screamed, and he knew, without question, that before the blow he would have heard it as some kind of seabird flying overhead—if he'd been able to hear it at all. "There," said the Wanderer with some satisfaction, releasing his other shoulder and shaking the impact out of her hand, knuckles reddened but otherwise unscathed. "I was wondering when you were going to join us." "Join—?" Kaito paused, thoughts flickering back to the wall of static wind. The wall he had remembered as pleasant, even peaceful, only a moment before. But it hadn't been, had it? It had been~ it had been~ it had been something he couldn't remember, except for the sound of screams. Some of them might even have been his own. He grabbed instinctively for his sword, body suddenly flooded with the adrenaline that should have been there all along, and froze as he realized his equipment was gone. No sword: no small, friendly spirit emulating a tanuki drone in form and function. Phyrexia had slapped him down when he should have been untouchable and stripped him bare in the same instant. His eyes flicked back to the Wanderer, just in time to catch her momentarily disappearing, guttering out like a dying candle. "No," he said, shaking his head fiercely. "No. You need more time. #emph[I] need more time. You can't go before you tell me what I missed." "A—barrier," she said. "We didn't expect it, and it seems to have—blocked my ability to anchor myself. Can't—stay here. Losing grasp. Have to—tell you—" A look of profound frustration crossed her face, and she turned, shouting at a point slightly beyond Kaito's right shoulder, "Nahiri! Stop—toying with that—thing!" Kaito turned, loath as he was to take his eyes off the Wanderer when she was so close to vanishing, and beheld Nahiri, sword in hand and cheeks faintly flushed with exertion, the heat of her blood showing through the slatey tone of her skin. She was dancing—no, #emph[fighting] with a figure that looked to have been shaped from liquid metal spliced with panels of wiry cabling, like a fever dream of mechanical poetry escaped from the inventor's bench and turned against the world. It seemed impossible that anyone, even the lithomancer, could fight this construction and win. #figure(image("003_Episode 1: Uncontrolled Descent/02.jpg", width: 100%), caption: [Art by: <NAME>], supplement: none, numbering: none) Then the air flashed around her, igniting with a crash as loud as thunder, as Nahiri called the glittering metallic sand of New Phyrexia to join her dance. It rose grain by grain to swirl around her, a storm even deadlier than her hail of shaped stone blades, and crashed together on the fighting figure, overwhelming it as sand invaded exposed machinery and nasal passages alike, putting down Nahiri's opponent in the blink of an eye. As it fell, Nahiri was there, stepping forward and driving her primary blade home, through the center of the creature. She twisted once, and the shape beneath the mounded sand was still. "#emph[Nahiri] ," snapped the Wanderer, voice strong enough that for a moment Kaito dared to hope she was stabilizing. He turned back to her, and his heart sank. She was still fading in and out, close to being pulled back into the Blind Eternities. It must have been taking immense power of will to stay even this long. Nahiri trotted across the metal sands as easily as if she were walking on solid ground, pausing to bob her head to Kaito before focusing on the Wanderer. "You called?" The Wanderer frowned. "Scrambled longer—than—need to explain—what he—missed," she said, words spaced oddly as she flickered too far out of phase to be heard. "Right," said Nahiri. She focused on Kaito. "I don't know whether they knew we were coming or whether they're just paranoid monsters, but we slammed into some sort of planar shield when we breached New Phyrexia. We should have been fine. We pretty obviously weren't. I don't know where most of our team wound up. The three of us crashed down here. Sand got you?" Numbly, Kaito nodded. "Got me at first, too," said Nahiri. "Luckily for me, this whole place is made of metal—not the normal kind, but close enough for my purposes, even if this stuff would rather harm us than help us. It's a passive weapon. It'll still kill you if you let it. I shook myself out of it, found the Wanderer standing over you, shifting in and out of the plane. Not sure she'll be hanging on much longer." "What was that #emph[thing] you were fighting?" asked Kaito, not wanting to think about the possibility of losing the Wanderer to the Blind Eternities for even a little while. "One of the locals," said Nahiri, shrugging. "Fast. Pretty lethal. No real challenge." "You're not hurt?" "Just a scratch. Nothing I can't walk off." She reached up and touched the back of her neck with her free hand, fingers coming away damp with blood—not smeared, as they would have been had she been direly injured. "My blood's still red. No oil. I'll be fine." She held up her bloody fingers for his inspection, smiling faintly. Behind him, the Wanderer's eyes widened, and she flickered faster, apparently gathering her strength for another exclamation. Nahiri lowered her hand. "Come on," she said. "I don't know where we are, but we need to meet the others on the Furnace Layer, and we don't want to hang around anywhere Phyrexia wants us to be. Let's get moving before this place comes up with defenses beyond a few foot soldiers and some hypnotic sand." "I lost my equipment," said Kaito. "Is it in the sand?" He shook his head as he looked around. "I don't #emph[think] so," he said. "If my drone were here, she'd be digging her way back to me. You're the metal detector, not me. Do you feel any Kamigawa steel near us?" "Sorry. Just Phyrexian metals," said Nahiri. "We'll find it," Kaito said. "And we'll find the others. Do you know which way to go?" "This way," said Nahiri, and started walking. "If we stayed on our original trajectory as we fell, the next landing zone is in this direction. If we didn't, then we're just lost in Phyrexia, and you should start praying to whatever you believe in." "How did you orient yourself so quickly?" asked Kaito, trying to slow her down enough for him to help the Wanderer across the sand. Not that she would normally have needed any assistance, but with as uncertain as her grip on this plane was, he wanted to do anything he could to make this easier on her. "I've had practice," Nahiri replied. "I saw explosions coming from over that way. Things got chaotic over there." She had a grim satisfaction in her tone. It was hard to tell whether she was proud of their companions for wrecking things, envious that she hadn't been given the same opportunity, or pleased that she'd been able to finish her own fight without any major difficulties. Nahiri could be confusing that way. He didn't know her well enough to tease her meaning out of everything she said yet, and wasn't sure, under the circumstances, that he was going to get the chance. They trudged across the sand—sand that wasn't anything of the sort when Kaito looked at it more closely; what he'd been taking for a seashore was an infinite desert of particulate metal, pieces of Mirrodin ground into fine dust by Phyrexian power. The Wanderer paced beside him, flicking and silent, clearly spending all her energy on remaining in tune with the plane. He glanced once again to Nahiri. "Nothing here is what it looks like," she said, voice brusque. "You can't trust anything Phyrexian. It's all lying to you, all the time, whether it knows it or not. Keep moving." Kaito kept moving. The desert stretched out ahead of them, extending to the distant base of a massive, incomprehensible monument built according to some twisted parody of geometry. They pressed on into the shadow of the titanic monument, a tiny trio of attackers moving through a hostile land, and nothing else moved, and they were alone, and the oppressive weight of Phyrexia was all around them, and they were never going to be alone again. The landscape grew more and more ordered as they pressed on, terrible in its alien symmetries. Vast constructs of gleaming metal cast their shadows across the shining ground, celebrating unthinkable victories, glistening in spots with exposed flesh that made Kaito's skin crawl. Were they leftover structures from Mirrodin, or the sleeping forms of Phyrexian goliaths? Some mysteries were better left unsolved. At least Mirrodin's five suns still shone, dim through the dense fog. The group rounded the terminus of a low wall that seemed to have been forged from half-melted bone mixed with silver and stopped at the sight of a stone statue hanging suspended between two iron pillars in a tangle of steel-sheened cables. It depicted an elf, short and muscular, and so perfectly carved that Kaito could have sworn he saw it breathe. It looked entirely out of place in the Phyrexian tangle of metal and bone. Nahiri hissed sharply. Kaito glanced at her, confused. "That stone," she said. "That's a Zendikari hedron. Either Phyrexia has reached Zendikar, or something else is going on here." The Wanderer pointed to the figure. Kaito followed her finger. Why would a statue be wearing #emph[clothes] ? More, why would a statue be #emph[armed] ? A bronze bracer holding a double-bladed sword was strapped to its left arm. "He's one of ours," said Nahiri abruptly. She started to move forward. Against his own better judgement, Kaito put a hand on her arm. She stopped. "On Kamigawa, this would be a trap," he said. She nodded slow understanding. "If it is, we take the bait," she said. Kaito started looking for things he could use as projectiles. Nahiri's metal knives would have been ideal, but he wasn't sure he could wrest a single ingot away from the ancient lithomancer even if he had wanted to. The debris under the statue would have to do. Kaito reached out telekinetically, pulling a cloud of metal slivers and shrapnel into the air around him. It was nothing compared to his blade or Himoto, his tanuki, but it was infinitely better than going into a fight unarmed. Not that they knew this was going to be a fight. The statue might be nothing, and so far, they hadn't been attacked. Carefully, the trio moved toward the statue. They were almost there when the cables holding it up writhed into sudden activity, like a nest of snakes waking from hibernation. Some of them unsnarled themselves completely and reared, increasing the impression of serpentine awareness. Kaito tensed, preparing to strike with his array of makeshift arrows. The Wanderer raised a hand, motioning him into stillness. He stopped, still tense but not yet striking, to watch Nahiri move forward with cautious grace. The cables twisted to track her motion. The statue opened its eyes and started to struggle as the cables drew tighter. "Definitely one of ours," said Nahiri. "He looks uninjured. We should be able to cut him free." "So we attack?" Kaito looked to the Wanderer. She nodded assent, and he unleashed the confused fury he'd been carrying since the beach in a hail of shrapnel, raining crude razors down on the nest of cables in a twisting swarm of pirouetting, swirling slashes. The cables responded, lashing out at the cloud, the clash forming a discordant symphony of cracking, exploding metals. In the meantime, Nahiri spun into action, her own blaze of knives darting forward to continue what Kaito's makeshift armory had begun, slicing and cutting away at the cable creature with an artist's precision. The statue dipped lower and lower as the taut metallic sinews holding him were cut away, until, with a loud snapping sound, the last one broke and dumped him to the ground. The Wanderer rushed forward and knelt beside him, feeling for a pulse. The stone man responded by swinging a mighty, if disoriented, haymaker at her. His fist passed through the white-haired woman's body like she was a ghost, leaving her frowning disapprovingly. "She's not quite here," said Kaito, following the Wanderer's footsteps to offer the stone man his hands. "Please don't hit her again." "What—" The former statue let himself be pulled to his feet and looked frantically around, finally focusing on Nahiri, who was applying a bandage from Kaito's med pack to the back of her neck, pressing the magically adhesive edges firmly down. "What #emph[happened] ?" The Wanderer, who had been silent since calling Nahiri from her fight, swallowed and clearly gathered her strength. "Hit a—barrier," she managed, voice flickering in volume like she was fluxing rapidly near to far. "Everyone—split apart. Trying—find others." Nahiri looked over at them. "Are we going to have to do this every time we find someone?" she asked. "Because it's going to get real old if we do." The statue laughed, looking heartened by her sniping. "We may be lost in a hostile plane, but some things remain the same; heroes clash when first they meet." The stone drained from his skin, replaced by a light tan. He offered the Wanderer a polite bow. "I am <NAME>, Prince of Kaldheim. I thank you for your counsel." She opened her mouth, but no sound came out. A look of frustration crossed her face. "The Wanderer isn't stabilizing," said Kaito. "I don't know how she's managed to hold on for this long, but if we don't rest, we're going to lose her soon." "She'll be back," said Nahiri. "But will it matter if we don't wait for her?" Nahiri didn't have an answer for that. She looked from Kaito to the Wanderer and repeated, "We have to keep moving." As a group, the four of them resumed their trek through the blasted wastes of New Phyrexia. There was beauty in the grim monuments that stood in the distance, but having seen the living cables holding Tyvar, Kaito was all too aware that everything they passed was a construction of this bitter plane, and not grown or nurtured by the plane's own nature. Anything could become a threat at any moment. The Wanderer continued to flicker and did not speak again. She stayed close to Kaito, looking around them with evident worry. Something was clearly bothering her—he wished he had some way to help her, but they couldn't afford to stop long enough for him to try. #figure(image("003_Episode 1: Uncontrolled Descent/03.jpg", width: 100%), caption: [Art by: <NAME>], supplement: none, numbering: none) On they went, until on the horizon a small, ramshackle assemblage of tents and lean-tos appeared, small figures visible moving between them. Nahiri and Tyvar tensed. Kaito, more concerned about getting the Wanderer to a place of rest, motioned for them to be calm. The group continued moving until the figures came into clearer view: they were Mirrans. Most were human, with bronze skin and gold armor, flashes of white fabric visible between the plates. Leonin also moved among them, comforting feline figures. Soft gold glinted from what little exposed skin could be seen around their armor. All of them moved with the natural grace of the organic, rather than the strange gait of the compleated, and Kaito exhaled. Safety. Such as it could be found on this plane, it was there, ahead of them. He turned to the Wanderer, intending to say something to boost her spirits and bolster her strength. His exhale became a sigh as he saw that she was gone. She had been able to hold on long enough to see her childhood friend out of the initial danger, and no longer than that. "She'll be back," said Kaito, as much to himself as to the others. "She always comes back." "Take heart, friend," said Tyvar, clapping him on the shoulder. "We have far yet to go." "Yes, but~ I wanted us to make it here together," said Kaito, and he started walking again. Together, the three of them approached the camp. A wiry human woman with short-cropped red hair and fair skin devoid of metal ornamentation came out to meet them, a staff topped with a gleaming light held low by her side, posing no immediate threat, but ready to become one. #figure(image("003_Episode 1: Uncontrolled Descent/04.jpg", width: 100%), caption: [Art by: <NAME>], supplement: none, numbering: none) "You aren't Phyrexian," she said, voice sharp. "You're the ones Koth told us were coming. I'm Melira. I'm a friend, and a healer. Are any of you hurt? Do you require aid?" "No," said Tyvar, voice bright in the cool, still air. "We came following Karn and the Gatewatch's call, but we were lost on arrival, and you're the first friendly faces we've seen. Are there others like us here?" "Ah," said the woman, understanding. "I'd heard rumors Ele<NAME> was setting up some sort of defense barrier against people like you. Guess she got it up and running. The rest of your people should be gathering two layers down, in the Furnace, assuming they made it that far." She began moving away from the small encampment, gesturing for the three of them to follow. "This is the Monumental Facade," she said. "When the Phyrexians took Mirrodin. They built a shell around our plane, trapping those of us who'd survived to keep fighting underneath. We wouldn't be allowed to see the suns from our original home any longer. This is where they send their toys to fight each other to the death, but we came up to find you. You would have had a harder journey if we hadn't been here, drawing fire and trying to make sure nothing got away to report our location." So, Phyrexia didn't have surveillance everywhere? Kaito nodded, taking this as some of the first good news they'd received since arriving. "Mirrodin—the #emph[real] Mirrodin—is beneath us," Melira continued. She stopped at the center of an oddly flat patch of ground, looking at each of them in turn before finally focusing on Nahiri. "You're the lithomancer they said was coming, yeah?" "I am," said Nahiri, knives shifting in the air around her. "Why?" "It'll help, that's all," said Melira, and she slammed the butt of her staff down on the center of the clear patch. There was a momentary pause, long enough for Melira to look annoyed and glance over her shoulder like she was waiting for something, and then the ground dropped out from under their feet as a roughly ten-foot square of what Melira called the Monumental Facade crumpled inward. The explosive charges had been excellently set. Kaito had to admire them, even as he realized he was falling. This was a new and unsettling development. Above them, the thin shell of the plane looked like a shattered plate of black metal. Beneath them, the landscape rushed up to make their acquaintance, no more than a hundred feet below. Nahiri scowled at a smiling Melira, who didn't look the least bit concerned, and the chunks of falling earth beneath them glowed dully hot as the lithomancer caught hold of them and slowed their descent, creating a shallow shell they could ride the rest of the way to the ground without being injured. Melira actually laughed at that. Kaito blinked. "Why are you laughing? We could have all been killed!" "Koth said you were powerful mages come to save the plane," said Melira. "Well, the Facade's shell breaks all the time, with or without our help. If you couldn't handle a little fall, you weren't going to succeed anyway. Though this is better than I would have hoped. Once we touch down, we'll be near Lowlight—we can make for the lacuna and head into the Furnace Layer to meet the rest of the survivors." Kaito didn't like that word. "Survivors" felt like a premonition, and with the Wanderer already gone, it wasn't something he wanted to think about. Still, he schooled his expression to neutrality. "We're very grateful for your help," he said, and glanced to Nahiri, waiting for her to say that she'd been injured when she and the Wanderer had found him in the sand. She did nothing of the sort, keeping all her focus on guiding them to a landing. It had only been a scratch, after all. Better not to break her concentration for a scratch when it was nothing that required healing. Tyvar had other questions. He waved a hand, indicating the land around them. "We descend farther? Is this not the Furnace Layer?" "No," said Melira. "The Phyrexians call this Mirrex. They can't even allow us the grace of our true name. I told you the real Mirrodin was beneath us. This is all that remains of our home." "I see," said Tyvar, subdued. "The main force of our assault teams will be gathered in Lowlight, ready to aid you in your efforts," said Melira. "There's no price too great for us to pay for a free Mirrodin. This was a beautiful land once. Fate allowing, it will be again." "For Mirrodin, and the Multiverse," Kaito said, and Melira smiled at him in brief unity before moving to look over the edge of Nahiri's makeshift platform. Mirrodin—what remained of it—was a wasteland beneath them, withered from lack of light, without even the alien beauty of the surface. If Phyrexia had done this to break the spirit of the resistance, they had probably come closer than anyone wanted to believe. Nahiri guided their makeshift conveyance to a stop on the surface, looking to Melira. "Is the whole place like this?" she asked. Melira nodded. "It is. You keep going down, and there's always some new horrible surprise waiting for you." She hopped off the piece of shell to the ground, which was actual stone here, interspersed with more of those metallic hexes. "At least they're predictable about it. Everything wants to kill or compleat you. No exceptions." "Not even you?" asked Nahiri. "Me?" said Melira. "I'm immune. It's why the Resistance lets me move around without a guard, and why Koth had me watching for you. Come on. Lowlight's this way." She began moving briskly across the wasteland, leaving the Planeswalkers to follow her toward the low, battered shape of a Mirran camp. Once they reached the border, she led them straight through to a low wall of what looked like razored glass, gesturing to it broadly. "We took the lacuna when Koth said you were coming," she said. "It'll take us down to the Furnace Layer. We would have released it soon, assuming no one was coming." "Then we go down," said Nahiri. Melira looked halfway amused. "Have any of you used one of these before?" "No," said Kaito. "They're fun," she said. "They play games with gravity internally, so you don't fall beyond that first jump. Starting off is always harder than continuing." Melira strode toward the lacuna, easily stepping up the series of crates that had been stacked next to the wall, and leapt off. The Planeswalkers followed her. Upon climbing the same crates and looking down, they saw her standing on the floor of a tunnel of some sort, dropping into the depths of Phyrexia, lit from within by a pale, sourceless illumination. She looked back over her shoulder at them. "Well?" she asked. "Are you doing this?" Nahiri leapt without hesitation, and Kaito jumped after her. There was a moment of sickening disorientation, and then he was standing on the wall of the tunnel. Looking back, Tyvar was the one who looked like he was positioned in defiance of gravity, something the big man clearly realized, because he laughed and jumped into the lacuna. "Onward, friends," he said, and strode forward. Kaito paced him, and in short order, the pair had passed Nahiri, descending into Phyrexian soil. Melira hung back slightly with Nahiri, glancing at the bandage on the back of the other woman's neck, but not asking—not yet. #v(0.35em) #line(length: 100%, stroke: rgb(90%, 90%, 90%)) #v(0.35em) Nahiri didn't feel right. She was familiar with her body and the way it was meant to come together, bones and tissues laced like stones in good soil, and right now, something felt awry. The cut at the back of her neck, the small, inconsequential injury, throbbed, intruding on her awareness more than anything so minor had the right to. She hung back a little, allowing Melira to pass her, before she reached behind herself to feel the bandage she'd taken from Kaito's pack. The gauze was oddly bunched, as if something had pressed against it from below. Peeling back the bandage, she touched the surface beneath with delicate fingers and found no injury, only smooth skin and a short, slick protrusion that had no business growing there, as if her bones had decided to reshape themselves. She pulled her hand away with a hiss of dismay, somehow unsurprised to see that they gleamed with the same glistening oil as had tipped the spears of the Phyrexians. She was infected. She was already lost. #figure(image("003_Episode 1: Uncontrolled Descent/05.jpg", width: 100%), caption: [Art by: PINDURSKI], supplement: none, numbering: none) She knew she should tell her companions—but how? And what good would it do any of them to know? They couldn't kill her, and if they tried, she would fight back, regardless of her condition. She couldn't leave, or she would carry the taint away from this doomed and dying plane to infect another. The Mirran was supposedly a healer, but even a healer couldn't stop this—could they? No, it was best to get them as far as she possibly could before she succumbed and became something that was easier for them to destroy. Pressing the bandage back down, she re-covered the wound, and pressed onward. #v(0.35em) #line(length: 100%, stroke: rgb(90%, 90%, 90%)) #v(0.35em) The Mirran camp, small as it had been, was razed and gone when the white-haired woman in the wide-brimmed hat appeared, looking warily around, sword at the ready. Nothing moved or motioned to attack her. "Kaito!" she cried. "Kaito, are you here?" Nothing answered. A patch of ground had fallen inward not far away, and the Wanderer ran toward it, recognizing it for what it was. She peered down into the depths and saw no sign of her companions; only rubble on the distant Mirran ground. They were gone. She had returned from the Blind Eternities too late, and they were lost. "I could have warned them," she moaned. "They have no idea what they're walking into. We were naive to think this could be so easily achieved." She straightened. Her time on this plane would be brief. If she was going to see them again, she would. Until that happened, all she could do was wait for her departure and hope for their safety. It wouldn't be enough. It had to be enough. It was all she had.
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/break-continue-00.typ
typst
Other
// Test break. #let var = 0 #let error = false #for i in range(10) { var += i if i > 5 { break error = true } } #test(var, 21) #test(error, false)
https://github.com/HollowNumber/DDU-Rapport-Template
https://raw.githubusercontent.com/HollowNumber/DDU-Rapport-Template/main/src/lib/recursive_count.typ
typst
#let recursive_count(_body) = { let r(cont) = { let _C = 0 if type(cont) == content { for key in cont.fields().keys() { if key == "children" { for _child in cont.fields().at("children") { let resp = r(_child) _C += resp } } else if key == "body" { _C += r(cont.fields().at("body")) } else if key == "text" { _C += cont.fields().at("text").len() }else if key == "child" { _C += r(cont.at("child")) } else if key == "block" { if cont.fields().keys().contains("text") { _C += cont.fields().at("text").len() } } else if key == "caption" { _C += r(cont.fields().at("body")) } else if key == "label" { _C += r(cont.fields().at("body")) } else if key == "supplement" { _C += r(cont.fields().at("body")) } else if ("func", "double", "key", "keys", "update", "base").contains(key) { // we can skip those } else if key == "t" { // math output - idk if I should count it } else if key == "b" { // math output - idk if I should count it } else if key == "path" { // image } else if key == "data" { } else if key == "accent" { // return [#cont] } else if key == "num" { // return [#cont] } else if key == "denom" { } else if key == "dest" { // return [#cont] } else if key == "level" { // return [#cont] } } } else if type(cont) == array { for item in cont { _C += r(item) } } return _C } return r(_body) }
https://github.com/tilman151/pypst
https://raw.githubusercontent.com/tilman151/pypst/main/docs/examples/document/my-document.typ
typst
MIT License
#import "@preview/unify:0.6.0": qty = My Document You can add a paragraph by simply passing a string. Each element is separated in the rendered Typst code by a blank line. You can all any builtin function. #lorem(50) == Using Imported Functions You can use the function imported earlier. It works #qty("100", "%"). == Including Lists You can include bullet point lists: - You can also include lists. - This is the second item. - This is the third item. You can also include numbered lists: + You can also include lists. + This is the second item. + This is the third item. Lists can also be nested: - This is the first item. - This is the first nested item. - This is the second nested item. - This is the second item. == Dynamically Generate Elements You can dynamically generate elements by using a loop - This is item 1. - This is item 2. - This is item 3. == Including Figures You can include figures: #figure( image("img.png"), caption: "This is a placeholder image." )
https://github.com/DawodGAMIETTE/ENSEA_template-Typst
https://raw.githubusercontent.com/DawodGAMIETTE/ENSEA_template-Typst/master/src/template.typ
typst
// The project function defines how your document looks. // It takes your content and some metadata and formats it. // Go ahead and customize it to your liking! #let project(title: "", objective: [], authors: (), body) = { // Set the document's author and title set document(author: authors, title: title) // Configure the page settings including paper size, margins, numbering, and header set page(paper: "a4", margin: (top: 100pt), numbering: "— 1/1 —", number-align: center, header: stack(dir: ltr, image("media/logo-ENSEA.jpg", width: 10%), align(right)[*#title* #linebreak() #authors.map(strong).join(", ", last: " et ") #linebreak() Promotion 2025])) // Set the text font, size, language, and region set text(font: "New Computer Modern", size: 12pt, lang: "en", region: "gb") /* for French => lang: "fr" and region: "fr" for other countries: lang: https://en.wikipedia.org/wiki/ISO_639 region: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 */ // Set paragraph justification set par(justify: true) // Configure heading numbering and spacing set heading(numbering: "1.1.") show heading: set block(spacing: 1em) // Set list indentation and marker style set list(indent: 15pt, marker: [--]) // Configure math equation numbering and supplement set math.equation(numbering: "(1)", supplement: [Eq.]) // Style figure captions to be italicized show figure.caption: emph // Align figure captions based on their width show figure.caption: it => { layout(size => style(styles => [ #let text-size = measure(it.supplement + it.separator + it.body, styles) #let my-align #if text-size.width < size.width { my-align = center } else { my-align = left } #align(my-align, it) ])) } // Disable paragraph justification for raw blocks eg. codelst package show raw.where(block: true): set par(justify: false) // Center align the title block align(center)[ #block(text(weight: 700, 1.75em, title)) ] linebreak() heading(outlined: false, numbering: none, text(0.85em, smallcaps[objectives])) objective body }
https://github.com/dyc3/good-typst-template
https://raw.githubusercontent.com/dyc3/good-typst-template/main/README.md
markdown
The Unlicense
# good-typst-template My personal typst template repo that includes CI, mermaid/plantuml figures, and other useful bits. ## Setup 1. Install [typst](https://typst.app) 2. Install [mermaid-cli](https://github.com/mermaid-js/mermaid-cli) (requires node.js) ``` npm install -g @mermaid-js/[email protected] ``` Note: Running in WSL may require some additional setup for mermaid to work. See: https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-puppeteer-on-wsl-windows-subsystem-for-linux ## Usage if you have `make` installed, you can run these: ```bash # build the document make # rerender all figures make figures ``` Otherwise, you can run the commands in the `makefile` manually. ```bash # produces main.pdf typst compile main.typ # rerenders all figures ./scripts/render-figures.sh ```
https://github.com/Toniolo-Marco/git-for-dummies
https://raw.githubusercontent.com/Toniolo-Marco/git-for-dummies/main/slides/theory/branch.typ
typst
#import "@preview/touying:0.5.2": * #import themes.university: * #import "@preview/numbly:0.1.0": numbly #import "@preview/fletcher:0.5.1" as fletcher: node, edge #let fletcher-diagram = touying-reducer.with(reduce: fletcher.diagram, cover: fletcher.hide) #import "../components/gh-button.typ": gh_button #import "../components/git-graph.typ": branch_indicator, commit_node, connect_nodes, branch Branches are used to work on different features or bugfixes separate from the main branch (`main` or `master`). Through the use of branches in addition to maintaining proper project organization, it allows us to work on multiple features in parallel without interfering with the work of other team members. Branches can be created, renamed, moved, merged (_merge_) and deleted. Merge as you might guess is a key operation, which allows us to merge features developed in two different branches into one, or to bring changes from one branch into the main branch. --- #align(center,[ #stack(dir: ttb,spacing: 15%, [The most common workflow:], scale(100%)[ #set text(10pt) #fletcher-diagram( node-stroke: .1em, node-fill: none, spacing: 4em, mark-scale: 50%, branch( name:"main", color:blue, start:(0,0), length:7), edge((7,0),(8,0),"--",stroke:2pt+blue), //... other commits // develop branch connect_nodes((1,0),(2,1),orange), branch( name:"develop", color:orange, start:(1,1), length:5), connect_nodes((6,1),(7,0),orange), // feature branch connect_nodes((3,1),(4,2),yellow), branch( name:"feature", color:yellow, start:(3,2)), connect_nodes((4,2), (5,1),yellow), // 2nd feature branch connect_nodes((2,1),(3,3),teal), branch( name:"2nd feature", color:teal, start:(2,3), length:3), connect_nodes((5,3), (6,1),teal), ) ], [ All developed features are merged with _develop_, later when you are sure that the code is stable and ready for production, you can merge develop into _main_.] ) ])
https://github.com/ohmycloud/computer-science-notes
https://raw.githubusercontent.com/ohmycloud/computer-science-notes/main/Misc/mimic_chinese_lorem.typ
typst
Sign on the dottred line: #box(width: 1fr, repeat[。]) #set text(10pt) #v(8pt, weak: true) #align(right)[等光落下啊] #show outline.entry.where( level: 1 ): it => { v(12pt, weak: true) strong(it) } #outline( indent: auto, fill: box(width: 1fr, repeat[-]) ) = Apache Spark Spark 在流式处理中的应用 = Apache Flink Flink 在流式处理中的应用 = Apache AirFlow 我不明白 == 定时任务 #lorem(10) #lorem(10) #lorem(20) #lorem(20) #let chinese(start: "\u{4E00}", end: "\u{9FA5}") = { let characters = () for i in range("\u{FE10}".to-unicode(), "\u{FE1F}".to-unicode()) { characters.push(str.from-unicode(i)) } for i in range(start.to-unicode(), end.to-unicode()) { characters.push(str.from-unicode(i)) } characters } #let lorem(number) = { for (idx, value) in chinese().enumerate() { if idx == number { return } value } } = 随机数生成器 #lorem(30) = 需要随机数生成器 #lorem(80)
https://github.com/Myriad-Dreamin/typst.ts
https://raw.githubusercontent.com/Myriad-Dreamin/typst.ts/main/fuzzers/corpora/layout/block-spacing_00.typ
typst
Apache License 2.0
#import "/contrib/templates/std-tests/preset.typ": * #show: test-page #set block(spacing: 10pt) Hello There #block(spacing: 20pt)[Further down]
https://github.com/SWATEngineering/Docs
https://raw.githubusercontent.com/SWATEngineering/Docs/main/src/2_RTB/VerbaliInterni/VerbaleInterno_231114/meta.typ
typst
MIT License
#let data_incontro = "14-11-2023" #let inizio_incontro = "16:00" #let fine_incontro = "18:00" #let luogo_incontro = "Discord"
https://github.com/Origami404/kaoyan-shuxueyi
https://raw.githubusercontent.com/Origami404/kaoyan-shuxueyi/main/微积分/02-极限与导数.typ
typst
#import "../template.typ": sectionline, gray_table, colored = 极限与导数 == 公式 === 等价无穷小 #let 等价 = math.class( "relation", $" " ~ " "$ ) $ x 等价 sin x 等价 tan x 等价 arcsin x 等价 arctan x \ \ x 等价 ln(1 + x) 等价 e^x - 1 \ \ (1 + x)^alpha 等价 1 - alpha x \ 1 - cos x 等价 1/2 x^2 $ === 洛必达 #set list(marker: ([⤥], [›])) 只有 $0\/0$ 或 $c\/infinity$ 的时候能用, 求出来的结果如果存在就是对的, 如果不存在并不代表极限不存在. - $infinity \/ infinity$: 取倒数再洛, 或直接抓最高阶 - $0 dot infinity$: 无穷项取倒数变成 $0\/0$ - $infinity - infinity$: 通分变成 $0\/0$ - $0^0$ 或 $infinity^0$: 取对数 === $n$ 阶导 $ [a^x]^((n)) = a^x colored((ln a)^n) \ [sin x | cos x]^((n)) = [sin | cos](x + colored(n pi/2)) \ [x^a]^((n)) = a (a - 1) dots.c (a - (n - 1)) " " x^(a - n) \ [ln x]^((n + 1)) = [1 / x]^((n)) = (-1)^n n! " " x^(-(n + 1)) \ $ 只需要把 $x$ 替换成 $k x + b$ 并乘以 $k^n$ 就可以得到 $k x + b$ 版本的 $n$ 阶导. === 泰勒展开 有了 $n$ 阶导后, $n$ 阶泰勒就容易推了. 但常见函数的二阶麦克劳林和其无穷小余项还是有必要记的, 可以用来对付一些洛毕达对付不了的,分子分母上有加减的情况: $ e^x &approx 1 + x + x^2 / 2 + o(x^2) \ ln(1 + x) &approx x - x^2 / 2 + o(x^2) \ (1 + x)^alpha &approx 1 + alpha x + (alpha (alpha - 1)) / 2 x^2 + o(x^2) $ 三角系的麦克劳林一般无穷小余项会多一阶, 因为刚好那下一阶的导数是 $0$. $ sin x &approx x - x^3 / 6 + colored(o(x^4)) quad quad arcsin &approx x colored(+) x^3 / 6 + colored(o(x^4)) \ cos x &approx 1 - x^2 / 2 + colored(o(x^3)) quad quad & \ tan x &approx x + x^3 / 3 + colored(o(x^4)) quad quad arctan &approx x colored(-) x^3 / 3 + colored(o(x^4)) \ $ == 定义与概念 #set list(marker: ([★], [⤥], [›])) === 极限和无穷小 - *函数单点极限存在* $<=>$ 左右极限存在且相等 - *数列极限存在* - 放缩夹逼 - 单调有界 - 奇偶项拆分 - 形式为黎曼定积分定义式的数列 - *$A$ 是 $B$ 的无穷小* $eq.def$ $lim A/B = 0$ === 连续和可导 - *函数单点连续* - 极限存在且等于函数值 - 可导 $=>$ 连续 - *函数可导* - 极限 $lim_(x -> x_0) (f(x) - f(x_0)) / (x - x_0)$ 存在 - 左右导数 (上述极限的左右极限) 存在且相等 - *函数可微* - 存在常数 $A$ 使得 $Delta x -> 0$ 时 $Delta y = A Delta x + o(x)$ - 等价于 函数可导 - *函数区间连续* - 开区间: 处处连续 - 闭区间: 开区间连续, 左端点右连续, 右端点左连续 - *函数间断点*: 不连续的点 - 第一类: 左右极限存在 - 可去: 左 $=$ 右 (函数在该点无定义) - 跳跃: 左 $!=$ 右 - 第二类: 左右极限至少一个不存在 - 无穷: 有一个是无穷大 - 震荡: 两个都不是无穷大, 或者单纯不存在 === 应用 - *切线* - $y = f'(x_0) (x - x_0) + f(x_0)$ - 法线: 与切线垂直的线, 斜率是 $- 1 / k$ - *渐进线* - 垂直: $lim_(x -> a^+ "/" a^-) f(x) = plus.minus infinity$ #h(5em) $=>$ #h(1em) $x = a$ - 水平: $lim_(x -> + infinity "/" - infinity) f(y) = a$ #h(5.4em) $=>$ #h(1em) $y = a$ - 倾斜: $lim_(x -> + infinity "/" - infinity) [f(x) - (k x + b)] = 0$ #h(0.3em) $=>$ #h(1em) $y = k x + b$ - *单调性* - 同济的单调性是严格单调性 - 对任意两点 $x_1 > x_2 => f(x_1) > f(x_2)$ - 导数恒大于等于零, 但不在任一子区间上为零 - *凹凸性* - 同济的凹凸是图像向下凹叫凹, 和国际相反. 凹是 $f'' > 0$, 凸是 $f'' < 0$ - 对任意两点, 凹函数 $f((x_1 + x_2) / 2) < (f(x_1) + f(x_2)) / 2$ - *极值点* - 第一充分: $f'(x_0) = 0$, 且左右邻域中导数正负不同 - 第二充分: $f'(x_0) = 0$, $f''(x_0) != 0$ - *拐点*: 一阶导极值点 == 定理 #pagebreak()
https://github.com/mrcinv/nummat-typst
https://raw.githubusercontent.com/mrcinv/nummat-typst/master/domace_03.typ
typst
= 3. domača naloga == Navodila <navodila> Zahtevana števila izračunajte na #strong[10 decimalk] \(z relativno natančnostjo $bold(10^(minus 10))$) Uporabite lahko le osnovne operacije, vgrajene osnovne matematične funkcije `exp`, `sin`, `cos`, …, osnovne operacije z matrikami in razcepe matrik. Vse ostale algoritme morate implementirati sami. Namen te naloge ni, da na internetu poiščete optimalen algoritem in ga implementirate, ampak da uporabite znanje, ki smo ga pridobili pri tem predmetu, čeprav na koncu rešitev morda ne bo optimalna. Kljub temu pazite na #strong[časovno in prostorsko zahtevnost], saj bo od tega odvisna tudi ocena. Izberite #strong[eno] izmed nalog. Domačo nalogo lahko delate skupaj s kolegi, vendar morate v tem primeru rešiti toliko različnih nalog, kot je študentov v skupini. Če uporabljate drug programski jezik, ravno tako kodi dodajte osnovno dokumentacijo in teste. == Težje naloge <težje-naloge> === Ničle Airijeve funkcije <ničle-airijeve-funkcije> Airyjeva funkcija je dana kot rešitev začetnega problema $ A i prime.double lr((x)) minus x thin A i lr((x)) eq 0 comma quad A i lr((0)) eq frac(1, 3^(2 / 3) Gamma lr((2 / 3))) thin A i prime lr((0)) eq minus frac(1, 3^(1 / 3) Gamma lr((1 / 3))) dot.basic $ Poiščite čim več ničel funkcije $A i$ na 10 decimalnih mest natančno. Ni dovoljeno uporabiti vgrajene funkcijo za reševanje diferencialnih enačb. Lahko pa uporabite Airyjevo funkcijo `airyai` iz paketa `SpecialFunctions.jl`, da preverite ali ste res dobili pravo ničlo. ==== Namig <namig> Za računanje vrednosti $y lr((x))$ lahko uporabite Magnusovo metodo reda 4 za reševanje enačb oblike $ y prime lr((x)) eq A lr((x)) y comma $ pri kateri nov približek $bold(Y)_(k plus 1)$ dobimo takole: $ A_1 & eq & A lr((x_k plus lr((1 / 2 minus sqrt(3) / 6)) h))\ A_2 & eq & A lr((x_k plus lr((1 / 2 plus sqrt(3) / 6)) h))\ sigma_(k plus 1) & eq & h / 2 lr((A_1 plus A_2)) minus sqrt(3) / 12 h^2 lr([A_1 comma A_2])\ bold(Y)_(k plus 1) & eq & exp lr((sigma_(k plus 1))) bold(Y)_k dot.basic $ Izraz $lr([A comma B])$ je komutator dveh matrik in ga izračunamo kot $lr([A comma B]) eq A B minus B A$. Eksponentno funkcijo na matriki \($exp lr((sigma_(k plus 1)))$) pa v programskem jeziku julia dobite z ukazom `exp`. === Dolžina implicinto podane krivulje <dolžina-implicinto-podane-krivulje> Poiščite približek za dolžino krivulje, ki je dana implicitno z enačbama $ F_1 lr((x comma y comma z)) & eq x^4 plus y^2 slash 2 plus z^2 eq 12\ F_2 lr((x comma y comma z)) & eq x^2 plus y^2 minus 4 z^2 eq 8 dot.basic $ Krivuljo lahko poiščete kot rešitev diferencialne enačbe $ dot(bold(x)) lr((t)) eq nabla F_1 times nabla F_2 dot.basic $ === Perioda limitnega cikla <perioda-limitnega-cikla> Poiščite periodo limitnega cikla za diferencialno enačbo $ x prime.double lr((t)) minus 4 lr((1 minus x^2)) x prime lr((t)) plus x eq 0 $ na 10 decimalk natančno. === Obhod lune <obhod-lune> <NAME> pošljite iz Zemljine orbite na tir z vrnitvijo brez potiska \(free-return trajectory), ki obkroži Luno in se vrne nazaj v Zemljino orbito. Rešujte sistem diferencialnih enačb, ki ga dobimo v koordinatnem sistemu, v katerem Zemlja in Luna mirujeta \(omejen krožni problem treh teles). Naloge ni potrebno reševati na 10 decimalk. ==== Omejen krožni problem treh teles <omejen-krožni-problem-treh-teles> Označimo z $M$ maso Zemlje in z $m$ maso Lune. Ker je masa sonde zanemarljiva, Zemlja in Luna krožita okrog skupnega masnega središča. Enačbe gibanja zapišemo v vrtečem koordinatnem sistemu, kjer masi $M$ in $m$ mirujeta. Označimo $ mu eq frac(m, M plus m) quad upright(" ter ") quad mu^(‾) eq 1 minus mu eq frac(M, M plus m) upright(". ") $ V brezdimenzijskih koordinatah \(dolžinska enota je kar razdalja med masama $M$ in $m$) postavimo maso $M$ v točko $lr((minus mu comma 0 comma 0))$, maso $m$ pa v točko $lr((mu^(‾) comma 0 comma 0))$. Označimo z $R$ in $r$ oddaljenost satelita s položajem $lr((x comma y comma z))$ od mas $M$ in $m$, tj. $ R & eq R lr((x comma y comma z)) eq sqrt(lr((x plus mu))^2 plus y^2 plus z^2) comma\ r & eq r lr((x comma y comma z)) eq sqrt(lr((x minus mu^(‾)))^2 plus y^2 plus z^2) dot.basic $ Enačbe gibanja sonde so potem: $ x^(̈) & eq x plus 2 dot(y) minus mu^(‾) / R^3 lr((x plus mu)) minus mu / r^3 lr((x minus mu^(‾))) comma\ y^(̈) & eq y minus 2 dot(x) minus mu^(‾) / R^3 y minus mu / r^3 y comma\ z^(̈) & eq minus mu^(‾) / R^3 z minus mu / r^3 z dot.basic $ === Perioda geostacionarne orbite Oblika planeta Zemlja ni čisto pravilna krogla. Zato tudi gravitacijsko polje ne deluje v vseh smereh enako. Gravitacijsko polje lahko zapišemo kot odvod gravitacijskega potenciala $ F_g(bold(r)) = m dot gradient V(bold(r)), $ kjer je $V(bold(r))$ skalarna funkcija položaja $bold(r)$. #link("https://en.wikipedia.org/wiki/Gravity_of_Earth")[Zemljina gravitacija] #link("https://en.wikipedia.org/wiki/Geopotential_model")[Zemljin gravitacijski potencial]. == Lažja naloga \(ocena največ 9) <lažja-naloga-ocena-največ-9> Naloga je namenjena tistim, ki jih je strah eksperimentiranja ali pa za to preprosto nimajo interesa ali časa. === Matematično nihalo <matematično-nihalo> Kotni odmik $theta lr((t))$ \(v radianih) pri nedušenem nihanju nitnega nihala opišemo z diferencialno enačbo $ g / l sin lr((theta lr((t)))) plus theta^(prime prime) lr((t)) eq 0 comma quad theta lr((0)) eq theta_0 comma med theta^prime lr((0)) eq theta_0^prime comma $ kjer je $g eq 9.80665 m slash s^2$ težni pospešek in $l$ dolžina nihala. Napišite funkcijo `nihalo`, ki računa odmik nihala ob določenem času. Enačbo drugega reda prevedite na sistem prvega reda in računajte z metodo Runge-Kutta četrtega reda: $ k_1 & eq & h thin f lr((x_n comma y_n))\ k_2 & eq & h thin f lr((x_n plus h slash 2 comma y_n plus k_1 slash 2))\ k_3 & eq & h thin f lr((x_n plus h slash 2 comma y_n plus k_2 slash 2))\ k_4 & eq & h thin f lr((x_n plus h comma y_n plus k_3))\ y_(n plus 1) & eq & y_n plus lr((k_1 plus 2 k_2 plus 2 k_3 plus k_4)) slash 6 dot.basic $ Klic funkcije naj bo oblike `odmik=nihalo(l,t,theta0,dtheta0,n)` - kjer je `odmik` enak odmiku nihala ob času `t`, - dolžina nihala je `l`, - začetni odmik \(odmik ob času $0$) je `theta0` - in začetna kotna hitrost \($theta prime lr((0))$) je `dtheta0`, - interval $lr([0 comma t])$ razdelimo na `n` podintervalov enake dolžine. Primerjajte rešitev z nihanjem harmoničnega nihala. Za razliko od harmoničnega nihala \(sinusno nihanje), je pri matematičnem nihalu nihajni čas odvisen od začetnih pogojev \(energije). Narišite graf, ki predstavlja, kako se nihajni čas spreminja z energijo nihala.
https://github.com/Gekkio/gb-ctr
https://raw.githubusercontent.com/Gekkio/gb-ctr/main/chapter/peripherals/ppu.typ
typst
Creative Commons Attribution Share Alike 4.0 International
#import "../../common.typ": * == PPU (Picture Processing Unit) #reg-figure( caption: [#hex("FF40") - LCDC - PPU control register] )[ #reg-table( [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [LCD_EN], [WIN_MAP], [WIN_EN], [TILE_SEL], [BG_MAP], [OBJ_SIZE], [OBJ_EN], [BG_EN], [bit 7], [6], [5], [4], [3], [2], [1], [bit 0] ) ] #reg-figure( caption: [#hex("FF41") - STAT - PPU status register] )[ #reg-table( [U], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R-0], [R-0], [R-0], unimpl-bit(), [INTR_LYC], [INTR_M2], [INTR_M1], [INTR_M0], [LYC_STAT], table.cell(colspan: 2)[LCD_MODE\<1:0\>], [bit 7], [6], [5], [4], [3], [2], [1], [bit 0] ) ] #reg-figure( caption: [#hex("FF42") - SCY - Vertical scroll register] )[ #reg-table( [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], table.cell(colspan: 8)[SCY\<7:0\>], [bit 7], [6], [5], [4], [3], [2], [1], [bit 0] ) ] #reg-figure( caption: [#hex("FF43") - SCX - Horizontal scroll register] )[ #reg-table( [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], table.cell(colspan: 8)[SCX\<7:0\>], [bit 7], [6], [5], [4], [3], [2], [1], [bit 0] ) ] #reg-figure( caption: [#hex("FF44") - LY - Scanline register] )[ #reg-table( [R-0], [R-0], [R-0], [R-0], [R-0], [R-0], [R-0], [R-0], table.cell(colspan: 8)[LY\<7:0\>], [bit 7], [6], [5], [4], [3], [2], [1], [bit 0] ) ] #reg-figure( caption: [#hex("FF45") - LYC - Scanline compare register] )[ #reg-table( [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], [R/W-0], table.cell(colspan: 8)[LYC\<7:0\>], [bit 7], [6], [5], [4], [3], [2], [1], [bit 0] ) ]
https://github.com/DashieTM/ost-5semester
https://raw.githubusercontent.com/DashieTM/ost-5semester/main/template.typ
typst
/* page setup */ #let conf(author: "<NAME>", title, subtitle, doc) = { set document(title: title, author: author) set align(center) set par(justify: true) align(center + horizon, [ #text(17pt, title) ]) line(length: 100%) align(center + horizon, [ #text(10pt, author) ]) line(start: (0%, 0%), end: (75%, 0%)) align(center + horizon, [ #text(9pt, subtitle) ]) pagebreak(weak: false) outline(title: "Table of Contents", indent: true, depth: 3) pagebreak(weak: false) set page(paper: "a4", margin: (x: 1.5cm, top: 50pt), header: [ #smallcaps(author) #h(1fr) #emph(title) ], header-ascent: 100% - 30pt, footer: [ #align(center, [#counter(page).display(both: false)]) ], footer-descent: 100% - 25pt) counter(page).update(1) set align(left) show raw.where(block: true): content => block( width: 100%, fill: luma(240), stroke: 1pt + maroon, radius: 3pt, inset: 5pt, clip: false, { let numbers = true let stepnumber = 1 let numberfirstline = false let numberstyle = auto let firstnumber = 1 let highlight = none let (columns, align, make_row) = { if numbers { // line numbering requested if type(numberstyle) == "auto" { numberstyle = text.with(style: "italic", slashed-zero: true, size: .6em) } ( (auto, 1fr), (right + horizon, left), e => { let (i, l) = e let n = i + firstnumber let n_str = if (calc.rem(n, stepnumber) == 0) or (numberfirstline and i == 0) { numberstyle(str(n)) } else { none } (n_str + h(.5em), raw(lang: content.lang, l)) }, ) } else { ((1fr,), (left,), e => { let (i, l) = e raw(lang: content.lang, l) }) } } table( stroke: none, columns: columns, rows: (auto,), gutter: 0pt, inset: 2pt, align: (col, _) => align.at(col), ..content .text .split("\n") .enumerate() .map(make_row) .flatten() .map(c => if c.has("text") and c.text == "" { v(1em) } else { c }), ) }, ) doc }
https://github.com/zeropoolnetwork/sharded-storage-docs
https://raw.githubusercontent.com/zeropoolnetwork/sharded-storage-docs/main/presentation/trilemma/trilemma.typ
typst
MIT License
#import "@preview/touying:0.4.1": * #import "@preview/fletcher:0.4.5" as fletcher: diagram, node, edge #import "university.typ" #import fletcher.shapes: ellipse #set page(width: auto, height: auto, margin: 5mm, fill: white) #set text(font: "Liberation Sans") #set align(center) #let s = university.register(aspect-ratio: "16-9", lang: "en") #let s = (s.methods.info)( self: s, title: [Solving Vitalik's Trilemma with zk-driven DA and Storage], author: [<NAME>], institution: [ZeroPool], ) #let (init, slides, touying-outline, alert, speaker-note) = utils.methods(s) #show: init #show strong: alert #let (slide, empty-slide, title-slide, focus-slide, matrix-slide) = utils.slides(s) #show: slides == Vitalik's trilemma #v(20pt) #diagram( spacing: 40pt, cell-size: (8mm, 10mm), edge-stroke: 1pt, edge-corner-radius: 5pt, mark-scale: 70%, node((0,0), text("Scalability", fill: white, weight: "bold"), fill: blue.darken(45%), shape: ellipse, width: 230pt, height: 80pt), node((1,0), text("Security", fill: white, weight: "bold"), fill: blue.darken(45%), shape: ellipse, width: 230pt, height: 80pt), node((2,0), text("Decentralization", fill: white, weight: "bold"), fill: blue.darken(45%), shape: ellipse, width: 230pt, height: 80pt), ) #pause #v(40pt) Select two of the three #image("../../assets/cat1.svg", height: 160pt) == Why do we need to solve the trilemma? #v(40pt) #align(left)[ - Mass adoption: scalability and decentralization, keeping the security - Transition from Web2 to Web3 ] #place(bottom+right, dx:-30pt, image("../../assets/cat2.svg", height: 300pt)) == Historical approaches to solve the trilemma #v(60pt) Multichain Ecosystem Plasma Optimistic Rollup ZK Rollup ZK Validium Data Availability Layer == Why rollups are not enough? #pause Rollups scale the computation but do not scale the data #pause #image("../../assets/data_floats_up.svg", width: 35%) Data floats up to the L1 #pause #place(top+left, dx:145pt, dy:200pt, text(size:1.5em, fill: blue.darken(60%), weight: "bold", "We need recursive rollups")) == Storage-centric approach #align(left)[ Long-term storage solutions: - Filecoin - Arweave - Ethstorage ] #pause #place(bottom+right, dy:-40pt, align(left, [ Improvements to be implemented: - RS codes for data sharding - whole-chain provable zk proof ])) == RS codes #slide(repeat:4, self => [ #let (uncover, only, alternatives) = utils.methods(self) #alternatives[ #image("../../assets/rs1.png", height:280pt) ][ #image("../../assets/rs2.png", height:280pt) ][ #image("../../assets/rs3.png", height:280pt) ][ #image("../../assets/rs1.png", height:280pt) ] ],[ #v(20pt) $circle.stroked.tiny$ Encode the data as the polynomial #pause $circle.stroked.tiny$ Blowup the data #pause $circle.stroked.tiny$ Forget part of the data #pause $circle.stroked.tiny$ Recover the source data $circle.stroked.tiny$ RS codes are $tilde 10$ times more efficient, than replication, with better security guarantees. ]) == RS codes and SNARKs Commitment to RS encoded shard could be easily proved and verified in SNARK with the following polynomial equation: $ F(x, x^M) - F(x, y_0) = (x^M - y_0) dot Q(x), $ where F(x,y) is a bivariate polynomial, representing the source data $ F(g^i, h^j) = "data"_(M j + i),$ and $F(x, y_0)$ is a polynomial, representing the encoded data of the shard. == zk-driven data-centric rollup #slide(self => [ #image("../../assets/rollup-zkda.svg", width: 340pt) ],[ $circle.stroked.tiny$ DA and Storage contract could be proved and verified in zkSNARK #pause $circle.stroked.tiny$ The proof could be merged with the rollup proof #pause $circle.stroked.tiny$ Full proof of both state transition and data availability $circle.stroked.tiny$ The rollup does not require any data stored outside of it ]) == Some parameters of data-centric rollup #table( columns: (auto, auto), inset: 6pt, stroke: 0.7pt, align: horizon, [Storage cost], [~ 0.15 USD per 1 GB per year #super[1]], [Soundness], [110 bits #super[2]], [Capacity], [>1 PB] ) #v(1.5em) // vertical space #super[1] based on the Hetzner storage node, less for optimized rigs #super[2] the probability of forgetting any chunk of data is less than $2^(-110)$ if half of the network is honest #pause #v(20pt) #text(size:1.5em, fill: blue.darken(60%), weight: "bold", "Rollups recursion: unlocked") == Could we reach more scalability? #slide(repeat:2, self => [ #let (uncover, only, alternatives) = utils.methods(self) #alternatives[ #align(center)[ #image("../../assets/cat4.svg", width: 250pt) ] ][#align(center)[ Yes #image("../../assets/architecture.svg", width: 540pt) ]] ]) == Thank you for your attention! #grid(columns: 3, gutter:18pt, [ #image("../../assets/article-sharded-storage-1.svg", width: 180pt) #link("https://zeropool.network/research/blockchain-sharded-storage-web2-costs-and-web3-security-with-shamir-secret-sharing")[Blockchain Sharded Storage: Web2 Costs and Web3 Security with Shamir Secret Sharing] ], [ #image("../../assets/article-sharded-storage-2.svg", width: 180pt) #link("https://zeropool.network/research/minimal-fully-recursive-zkda-rollup-with-sharded-storage")[Minimal fully recursive zkDA rollup with sharded storage] ], [ #image("../../assets/article-sharded-storage-3.svg", width: 180pt) #link("https://storage.zeropool.network/")[storage.zeropool.network Toy model of data-centric rollup, storing its blocks inside itself] ], [ ])
https://github.com/leyan/cetzpenguins
https://raw.githubusercontent.com/leyan/cetzpenguins/main/src/penguins.typ
typst
MIT License
#import "@preview/cetz:0.2.2" #import cetz.draw: * #let anchor-coords(anchor-name) = { get-ctx(ctx => { let round(a) = a.map(v => calc.round(v,digits:4)) let (ctx,a) = cetz.coordinate.resolve(ctx,anchor-name) let(x,y,z)=a circle((x,y),fill:red,stroke:none,radius:0.02) line(stroke:0.1pt+red,anchor-name,(rel:(-0.1,0.1),to:anchor-name)) content((rel:(0.1,0.1),to:anchor-name), text(1pt,[#anchor-name: #round(a)])) }) } #let penguinInternal(color:none,body-color:none,head-color:none,eyes:none,left-eye:none,right-eye:none)= { let penguin-blue = rgb(3,14,29) let penguin-black = rgb(23,19,19) let penguin-yellow = rgb(252,187,21) let penguin-white = rgb(248,248,248) //default colors let body-color-value = penguin-blue let head-color-value = penguin-blue let right-wing-color-value = penguin-blue let left-wing-color-value = penguin-blue let belly-color-value = penguin-white let hair-color-values = (penguin-blue,penguin-blue,penguin-blue,penguin-blue,penguin-blue) let left-foot-color-value = penguin-yellow let right-foot-color-value = penguin-yellow let bill-color-value = penguin-yellow let eyes-color-value = penguin-black let left-eye-color-value = eyes-color-value let right-eye-color-value = eyes-color-value let eyes-secondary-color-value = penguin-white let left-eye-secondary-color-value = eyes-secondary-color-value let right-eye-secondary-color-value = eyes-secondary-color-value let eyes-shape-value = "normal" let left-eye-shape-value = eyes-shape-value let right-eye-shape-value = eyes-shape-value //Customization parameters if color != none { body-color-value = color head-color-value = color right-wing-color-value = color left-wing-color-value = color hair-color-values = (color,color,color,color,color) } if body-color != none { body-color-value = body-color } if head-color != none { head-color-value = head-color } if eyes != none { eyes-color-value = eyes.at("color",default:eyes-color-value) left-eye-color-value = eyes-color-value right-eye-color-value = eyes-color-value eyes-shape-value = eyes.at("shape",default:eyes-shape-value) left-eye-shape-value = eyes-shape-value right-eye-shape-value = eyes-shape-value } if left-eye != none { left-eye-color-value = left-eye.at("color",default:left-eye-color-value) left-eye-shape-value = left-eye.at("shape",default:left-eye-shape-value) } if right-eye != none { right-eye-color-value = right-eye.at("color",default:right-eye-color-value) right-eye-shape-value = right-eye.at("shape",default:right-eye-shape-value) } let penguin-half-width = 0.9375 //\pingu@w@half let penguin-half-height = 1.275 // \pingu@side@h@half let penguin-head-half-height = 0.9 * penguin-half-width // \pingu@head@h@half let penguin-bend = 0.125 //\pingu@bend let penguin-white-offset = 0.1 let penguin-lower-offset = 0.25 //\pingu@lower@off let penguin-outer-offset = 0.25 //\pingu@outer@off let penguin-foot-outer-width = 0.45 //\pingu@foot@outer@w let penguin-foot-outer-height = 0.25 //\pingu@foot@outer@h let penguin-foot-inner-width = 0.55 //\pingu@foot@inner@w let penguin-foot-inner-height = 0.115//\pingu@foot@inner@h let penguin-foot-single-width = 0.105 //\pingu@foot@single@w let penguin-foot-single-height = 0.2 //\pingu@foot@single@h let penguin-basic-feet-bend = 45deg let penguin-side-half-size = 1.275 //\\pingu@side@h@half let penguin-head-connection-x-offset = 0.39375 // .75*.525 \pingu@headcon@x let penguin-head-connection-y-offset = 0.46735 // .719*.65 \pingu@headcon@y let penguin-angle-head-left = 7deg let penguin-angle-head-right = 93deg let penguin-eye-shift = 0.08//\pingu@eye@shift let eye-base-angle = 38.5deg group(name:"body",{ // BODY OUTLINE merge-path(fill:body-color-value,stroke:none, { arc-through((0,penguin-outer-offset),(rel:(-0.0283,-penguin-outer-offset -0.5125),update:false),(0,- penguin-side-half-size ), name:"right") arc((),start: 180deg, stop: 270deg,radius:(penguin-foot-outer-width,penguin-foot-outer-height),name:"waist-to-foot-right") arc((),start: 270deg, stop: 332deg,radius:(penguin-foot-inner-width,penguin-foot-inner-height),name:"foot-inner-right") arc((),start: 208deg, stop: 270deg,radius:(penguin-foot-inner-width,penguin-foot-inner-height),name:"foot-inner-left") arc((),start: 270deg, stop: 360deg,radius:(penguin-foot-outer-width,penguin-foot-outer-height),name:"waist-to-foot-left") arc-through((),(rel:(0.0283,penguin-outer-offset+0.5125),update:false),(rel:(0,penguin-side-half-size + penguin-outer-offset)), name:"left") line((),(0,penguin-outer-offset),name:"center-horizontal") }) anchor("wings-side-right","right.start") anchor("wings-side-left","left.end") anchor("waist-right","right.77.5%") anchor("waist-right-middle","right.50%") anchor("waist-left-middle","left.50%") anchor("foot-right","waist-to-foot-right.end") anchor("bottom-center","foot-inner-right.end") anchor("foot-left","foot-inner-left.end") anchor("head-center","center-horizontal.mid") // HEAD //line("head-center",(rel:(0,penguin-head-half-height)),name:"head-middle") circle(fill:head-color-value,stroke:none,"head-center",radius:(penguin-half-width,penguin-head-half-height),name: "head") //arc((),start: 90deg, stop: 450deg,radius:(penguin-half-width,penguin-head-half-height)) anchor("head-top","head.north") anchor("head-topleft","head.7.3%") anchor("head-topright","head.92.7%") // BELLY merge-path(fill:belly-color-value,stroke:none, { line(stroke:yellow,(penguin-half-width,-1),(rel:(-penguin-half-width+0.1,penguin-lower-offset)),name:"start-belly") line((),(rel:(0,1.05 - penguin-side-half-size - penguin-outer-offset)),name:"belly-left") //arc-through((),(rel:(-0.01,(1.05 - penguin-side-half-size - penguin-outer-offset)/2),update:false),(rel:(0,1.05 - penguin-side-half-size - penguin-outer-offset)),name:"belly-left") arc((),start: 180deg, stop: 270deg,radius:(0.4,0.2)) arc((),start: 270deg, stop: 331deg,radius:(0.5,0.085)) arc((),start: 207deg, stop: 270deg,radius:(0.5,0.085)) arc((),start: 270deg, stop: 360deg,radius:(0.4,0.2)) line((),(rel:(0,-1.05 + penguin-side-half-size + penguin-outer-offset))) arc((),start:0deg,stop:180deg,radius:(penguin-half-width - 0.096,penguin-half-width - 0.1)) }) anchor("belly-back","start-belly.start") anchor("belly-test","start-belly.end") anchor("belly-left","belly-left.end") anchor("eye-back-left",(rel:(0.275,0.25 - penguin-outer-offset),to:"head-center")) anchor("eye-back-right",(rel:(-0.275,0.25 - penguin-outer-offset),to:"head-center")) anchor("head-back-con-left",(rel:(penguin-head-connection-x-offset, -penguin-head-connection-y-offset),to:"eye-back-left")) anchor("head-back-con-right",(rel:(-penguin-head-connection-x-offset, -penguin-head-connection-y-offset),to:"eye-back-right")) //FACE SHAPE circle(fill:belly-color-value,stroke:none,(rel:(0, - 0.025),to:"eye-back-right"),radius:(0.525,0.625),name:"face-right") circle(fill:belly-color-value,stroke:none,(rel:(0, - 0.025),to:"eye-back-left"),radius:(0.525,0.625),name:"face-left") }) let draw-eye-shiny(origin) = { } let eye-left-position = (rel:( penguin-eye-shift,0.075),to:"body.eye-back-left") let eye-right-position = (rel:(-penguin-eye-shift,0.075),to:"body.eye-back-right") //EYES group(name:"left-eye",{ anchor("eye-left","body.eye-back-left") if left-eye-shape-value=="normal" { circle(fill:left-eye-color-value,stroke:none,eye-left-position,radius:(0.1225,0.1365)) } else if left-eye-shape-value=="shiny" { circle(fill:left-eye-color-value,stroke:none,eye-left-position,radius:(0.22,0.26),name: "eye-shape") circle(fill:left-eye-secondary-color-value,stroke:none,radius:(0.08,0.1),(rel:(angle:eye-base-angle,radius:(0.085,0.1)),to:"eye-shape.center")) circle(fill:left-eye-secondary-color-value,stroke:none,radius:(0.025,0.035),(rel:(angle:eye-base-angle+180deg,radius:(0.12,0.14)),to:"eye-shape.center")) } else if left-eye-shape-value=="wink" { group(name:"left-inner-eye",{ rotate(-4deg,origin:eye-left-position) line(stroke:none, eye-left-position, (rel:(angle:174deg,radius:0.14),to:()), (rel:(0,-0.065),to:()),name:"start-line") merge-path(fill:left-eye-color-value,stroke:none, { arc((),start:174deg,stop:6deg,radius:(0.14,0.14)) arc((),start:-6deg,stop:-174deg,radius:(0.01,0.008)) arc((),start:6deg,stop:174deg,radius:(0.12,0.065)) arc((),start:-6deg,stop:-174deg,radius:(0.01,0.008)) }) }) } else if left-eye-shape-value=="none" { }else { panic("Unknown left-eye shape: ",left-eye-shape-value) } }) group(name:"right-eye",{ if right-eye-shape-value=="normal" { circle(fill:right-eye-color-value,stroke:none,eye-right-position,radius:(0.1225,0.1365)) } else if right-eye-shape-value=="shiny" { circle(fill:right-eye-color-value,stroke:none,eye-right-position,radius:(0.22,0.26),name: "eye-shape") circle(fill:right-eye-secondary-color-value,stroke:none,radius:(0.08,0.1),(rel:(angle:eye-base-angle,radius:(0.085,0.1)),to:"eye-shape.center")) circle(fill:right-eye-secondary-color-value,stroke:none,radius:(0.025,0.035),(rel:(angle:eye-base-angle+180deg,radius:(0.12,0.14)),to:"eye-shape.center")) } else if right-eye-shape-value=="wink" { rotate(4deg,origin:eye-right-position) line(stroke:none, eye-right-position, (rel:(angle:174deg,radius:0.14),to:()), (rel:(0,-0.065),to:())) merge-path(fill:right-eye-color-value,stroke:none, { arc((),start:174deg,stop:6deg,radius:(0.14,0.14)) arc((),start:-6deg,stop:-174deg,radius:(0.01,0.008)) arc((),start:6deg,stop:174deg,radius:(0.12,0.065)) arc((),start:-6deg,stop:-174deg,radius:(0.01,0.008)) }) } else if right-eye-shape-value=="none" { }else { panic("Unknown right-eye shape: ",right-eye-shape-value) } }) //WINGS on-layer(-1,{ group(name:"wings",{ anchor("left-start",(rel:(0,-0.65),to:"body.wings-side-left")) anchor("right-start",(rel:(0,-0.65),to:"body.wings-side-right")) rotate(15deg,origin:"right-start") circle(fill:left-wing-color-value,stroke:none,radius:(0.195,0.9),"left-start",name:"left-wing") circle(fill:none,stroke:none,radius:(0.145,0.75),"left-start",name:"left-wing-internal") anchor("left-wing-tip","left-wing.south") anchor("left-wing","left-wing-internal.south") rotate(-15deg,origin:"right-start") rotate(-15deg,origin:"right-start") circle(fill:right-wing-color-value,stroke:none,radius:(0.195,0.9),"right-start",name:"right-wing") circle(fill:none,stroke:none,radius:(0.145,0.75),"right-start",name:"right-wing-internal") anchor("right-wing-tip","right-wing.south") anchor("right-wing","right-wing-internal.south") rotate(15deg,origin:"right-start") }) }) //HAIR on-layer(-1,{ merge-path(fill:hair-color-values.at(0),stroke:none, { line((rel:(0,-0.1),to:"body.head-top"),(rel:(0.015,0))) arc((),start:0deg,stop:90deg,radius:(0.125,0.135)) arc((),start:90deg,stop:0deg,radius:(0.035,0.075)) }) merge-path(fill:hair-color-values.at(1),stroke:none, { line((rel:(0,-0.1),to:"body.head-top"),(rel:(0.033,0))) arc((),start:0deg,stop:90deg,radius:(0.125,0.175)) arc((),start:90deg,stop:0deg,radius:(0.035,0.1)) }) merge-path(fill:hair-color-values.at(2),stroke:none, { line((rel:(0,-0.1),to:"body.head-top"),(rel:(-0.033,0))) arc((),start:180deg,stop:90deg,radius:(0.17,0.215)) arc((),start:90deg,stop:180deg,radius:(0.075,0.18)) }) merge-path(fill:hair-color-values.at(3),stroke:none, { line((rel:(0,-0.1),to:"body.head-top"),(rel:(0.025,0))) arc((),start:180deg,stop:90deg,radius:(0.125,0.175)) arc((),start:90deg,stop:180deg,radius:(0.035,0.125)) }) merge-path(fill:hair-color-values.at(4),stroke:none, { line((rel:(0,-0.1),to:"body.head-top"),(rel:(0.044,0))) arc((),start:180deg,stop:90deg,radius:(0.125,0.135)) arc((),start:90deg,stop:180deg,radius:(0.035,0.075)) }) }) //FEET on-layer(-1,{ group(name:"left-foot",{ group(name:"first-toe",{ anchor("left-foot-first",(rel:(-0.5*penguin-foot-single-height,0),to:"body.foot-left",)) rotate( 19deg,origin:"left-foot-first") circle(fill:left-foot-color-value,stroke:none,radius:(penguin-foot-single-width,penguin-foot-single-height),"left-foot-first") }) group(name:"second-toe",{ rotate(penguin-basic-feet-bend,origin:"body.foot-left") circle(fill:left-foot-color-value,stroke:none,radius:(penguin-foot-single-width,penguin-foot-single-height),"body.foot-left") }) group(name:"third-toe",{ anchor("left-foot-third",(rel:(0.5*penguin-foot-single-height,0),to:"body.foot-left",)) rotate(penguin-basic-feet-bend + 25deg,origin:"left-foot-third") circle(fill:left-foot-color-value,stroke:none,radius:(penguin-foot-single-width,penguin-foot-single-height),"left-foot-third") }) }) group(name:"right-foot",{ group(name:"first-toe",{ anchor("right-foot-first",(rel:(-0.5*penguin-foot-single-height,0),to:"body.foot-right",)) rotate(- penguin-basic-feet-bend - 25deg,origin:"right-foot-first") circle(fill:right-foot-color-value,stroke:none,radius:(penguin-foot-single-width,penguin-foot-single-height),"right-foot-first") }) group(name:"second-toe",{ rotate(- penguin-basic-feet-bend,origin:"body.foot-right") circle(fill:right-foot-color-value,stroke:none,radius:(penguin-foot-single-width,penguin-foot-single-height),"body.foot-right") }) group(name:"third-toe",{ anchor("right-foot-third",(rel:(0.5*penguin-foot-single-height,0),to:"body.foot-right",)) rotate(- penguin-basic-feet-bend + 25deg,origin:"right-foot-third") circle(fill:right-foot-color-value,stroke:none,radius:(penguin-foot-single-width,penguin-foot-single-height),"right-foot-third") }) }) }) //BILL group(name:"bill",{ merge-path(fill:bill-color-value,stroke:none, { anchor("base",(rel:(0,- penguin-outer-offset -0.005),to: "body.head-center")) line("base",(rel:(-0.19,0))) arc((),start:180deg,stop:360deg,radius:(0.19,0.225),name:"bill-bottom") line((),(rel:(-0.02,0.005))) arc-through((),(rel:(0,0.02),to:"base"),(rel:(-0.17,0.005),to:"base")) line((),(rel:(-0.19,0),to:"base")) anchor("bottom","bill-bottom.mid") }) }) // anchor-coords("wings.right-start") // anchor-coords("wings.right-wing-tip") // anchor-coords("wings.right-wing") // anchor-coords("wings.left-start") // anchor-coords("wings.left-wing-tip") // anchor-coords("wings.left-wing") // anchor-coords("body.wings-side-left") // anchor-coords("body.wings-side-right") // anchor-coords("body.right.end") // anchor-coords("body.waist-right-middle") // anchor-coords("body.waist-right") // anchor-coords("body.foot-right") // anchor-coords("body.bottom-center") // anchor-coords("body.foot-left") // anchor-coords("body.left.start") // anchor-coords("body.left.end") // anchor-coords("body.head-center") // anchor-coords("bill.base") // anchor-coords("bill.bottom") // anchor-coords("body.head-top") // anchor-coords("body.head-topright") // anchor-coords("body.head-topleft") // anchor-coords("body.belly-back") // anchor-coords("body.belly-test") // anchor-coords("body.belly-left") // anchor-coords("body.eye-back-left") // anchor-coords("left-eye.eye-left") // anchor-coords("body.eye-back-right") // anchor-coords("body.head-back-con-left") // anchor-coords("body.head-back-con-right") } #let penguin(width:2.4702cm,..style)= { let unit-width = width/2.4702 //The total width of the base penguin cetz.canvas(length:unit-width,{ penguinInternal(..style) }) }
https://github.com/Cyclone-Robosub/ocean-report-crs
https://raw.githubusercontent.com/Cyclone-Robosub/ocean-report-crs/main/0.1.0/README.md
markdown
# ocean-report-crs This is the official Typst template for Cyclone RoboSub (CRS) @ UC Davis. The organization and logo name found at the bottom right of the document can be modified to fit other organizations. ## Usage You can use this template in the Typst web app by clicking "Start from template" on the dashboard and searching for `ocean-waves`. Alternatively, you can use the CLI to kick this project off using the command ```shell typst init @preview/ocean-report-crs:0.1.0 ``` Typst will create a new directory with all the files needed to get you started. ## Configuration - `title`: The title of your document. - `subtitle`: The subtitle of your document. Can be used to put authors - `date`: The date shown in the topic right - By default, this date updates to reflect the current date. Add this parameter with your preferred date to change it. - `org`: Your organization. Affects the document author and text in bottom-right corner. - By default, this is set to "Cyclone RoboSub @ UC Davis". Add this parameter with your own organization's name to change it. - `logo`: Path to logo image file. - By default, this is set to the Cyclone RoboSub logo.Add this parameter with the path to your own logo image to change it. ## Example ```typst #import "@preview/ocean-report-crs:0.1.0": * #show: report.with( title: "This is the Title", subtitle: "This is the Subtitle", // date: "Don't want the auto date? Add your own date here!", // org: "Add your own org here!", // logo: "Add the path to your own logo file here!", ) // Add your content below! = Heading #lorem(100) == Sub Heading #lorem(50) ``` ## Future Works - Add unique table settings
https://github.com/gumelarme/nuist-master-thesis-proposal
https://raw.githubusercontent.com/gumelarme/nuist-master-thesis-proposal/main/pages/report.typ
typst
#import "/strings/zh.typ" as lang #text(size: 1.5em, lang.section-1) #let title-sources = list( marker: $ballot$, ..lang.title-sources, ) #show table.cell: it => { if it.x > 0 { return text(size: 0.9em, it) } return it } #let table-len = 7 #let table-rows = 4 #block( width: 100%, height: 100%, table( align: horizon + center, // tt rd ts note tt opt pc rows: (5em, 5em, auto, auto, auto, 2em, 1fr), // header title type options notes columns: (auto,) + (table-len - 2) * (2fr, ) + (1fr, ), stroke: 0.5pt + black, lang.th-thesis-title, table.cell(colspan: table-len - 1, []), lang.th-research-dir, table.cell(colspan: table-len - 1, []), table.cell(rowspan: 2, lang.th-title-source), table.cell(rowspan: 2, align: horizon + left, colspan: table-len - 2, title-sources), lang.th-notes, [], table.cell(rowspan: 2, lang.th-title-type), lang.tt-engineering, lang.tt-applied, lang.tt-theoritical, lang.tt-interdis, table.cell(rowspan: 2, lang.tt-other), table.cell(rowspan: 2, []), // Empty column below the title types [ ], [ ], [ ], [ ], table.cell(colspan: table-len , align: top + left, lang.th-proposal-content) ), )
https://github.com/Greacko/typyst_library
https://raw.githubusercontent.com/Greacko/typyst_library/main/library/boxes.typ
typst
#let bluebox(content) = { rect(stroke:blue+1pt, content, radius:3pt, fill:color.linear-rgb(87.14%, 86.32%, 86.32%), width: 100%, height: auto ) } #let bb(content) = { bluebox(content) } #let redbox(content) = { rect(stroke:red+1pt, content, radius:3pt, fill:color.linear-rgb(87.14%, 86.32%, 86.32%), width: 100%, height: auto ) } #let rb(content) = { redbox(content) }
https://github.com/LDemetrios/Typst4k
https://raw.githubusercontent.com/LDemetrios/Typst4k/master/src/test/resources/suite/layout/flow/flow.typ
typst
--- flow-fr --- #set page(height: 2cm) #set text(white) #rect(fill: forest)[ #v(1fr) #h(1fr) Hi you! ] --- issue-flow-overlarge-frames --- // In this bug, the first line of the second paragraph was on its page alone an // the rest moved down. The reason was that the second block resulted in // overlarge frames because the region wasn't finished properly. #set page(height: 70pt) #block(lines(3)) #block(lines(5)) --- issue-flow-trailing-leading --- // In this bug, the first part of the paragraph moved down to the second page // because trailing leading wasn't trimmed, resulting in an overlarge frame. #set page(height: 60pt) #v(19pt) #block[ But, soft! what light through yonder window breaks? It is the east, and Juliet is the sun. ] --- issue-flow-weak-spacing --- // In this bug, there was a bit of space below the heading because weak spacing // directly before a layout-induced column or page break wasn't trimmed. #set page(height: 60pt) #rect(inset: 0pt, columns(2)[ Text #v(12pt) Hi #v(10pt, weak: true) At column break. ]) --- issue-flow-frame-placement --- // In this bug, a frame intended for the second region ended up in the first. #set page(height: 105pt) #block(lorem(20)) --- issue-flow-layout-index-out-of-bounds --- // This bug caused an index-out-of-bounds panic when layouting paragraphs needed // multiple reorderings. #set page(height: 200pt) #lines(10) #figure(placement: auto, block(height: 100%)) #lines(3) #lines(3) --- issue-3641-float-loop --- // Flow layout should terminate! #set page(height: 40pt) = Heading #lines(2) --- issue-3355-metadata-weak-spacing --- #set page(height: 50pt) #block(width: 100%, height: 30pt, fill: aqua) #metadata(none) #v(10pt, weak: true) Hi --- issue-3866-block-migration --- #set page(height: 120pt) #set text(costs: (widow: 0%, orphan: 0%)) #v(50pt) #columns(2)[ #lines(6) #block(rect(width: 80%, height: 80pt), breakable: false) #lines(6) ] --- issue-5024-spill-backlog --- #set page(columns: 2, height: 50pt) #columns(2)[Hello]
https://github.com/Enter-tainer/typstyle
https://raw.githubusercontent.com/Enter-tainer/typstyle/master/tests/assets/unit/math/long.typ
typst
Apache License 2.0
#let aaaaaaaaaaaaaaaaaaaaa = 0 #let bbbbbbbbbbbbbbbbbbbbbb = 0 #let CCCCCCCCCCCCCCCCCCCCCCCCC = 0 $ aaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbb + CCCCCCCCCCCCCCCCCCCCCCCCC $ $ [aaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbb] + CCCCCCCCCCCCCCCCCCCCCCCCC $ $ [aaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbb] + CCCCCCCCCCCCCCCCCCCCCCCCC $
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/silky-report-insa/0.2.0/README.md
markdown
Apache License 2.0
# INSA - Typst Template Typst Template for full documents and reports for the french engineering school INSA. # Examples ## "TP" report By default, the template initializes with the `insa-report` show rule, with parameters that you must fill in by yourself. Here is an example of filled template: ```typst #import "@preview/silky-report-insa:0.2.0": * #show: doc => insa-report( id: 3, pre-title: "STPI 2", title: "Interférences et diffraction", authors: [ *<NAME>* *<NAME>* <NAME> ], date: "11/04/2023", doc) = Introduction Le but de ce TP est d’interpréter les figures de diffraction observées avec différents objets diffractants et d’en déduire les dimensions de ces objets. = Partie théorique - Phénomène d'interférence == Diffraction par une fente double Lors du passage de la lumière par une fente double de largeur $a$ et de distance $b$ entre les centres des fentes... ``` ## Internship report If you want to make an internship report, you will need to use another show rule: `insa-stage`. Here is an example : ```typst #import "@preview/silky-report-insa:0.2.0": * #show: doc => insa-stage( "<NAME>", "INFO", "2023-2024", "Real-time virtual interaction with deformable structure", "Sapienza University of Rome", image("logo-example.png"), "<NAME>", "<NAME>", [ Résumé du stage en français. ], [ Summary of the internship in english. ], doc ) = Introduction Présentation de l'entreprise, tout ça tout ça. #pagebreak() = Travail réalisé == Première partie Blabla == Seconde partie Bleble #pagebreak() = Conclusion Conclusion random #pagebreak() = Annexes ``` ## Blank templates If you do not want the preformatted output with "TP x", the title and date in the header, etc. you can simply use the `insa-document` show rule and customize all by yourself. ### Blank template types The graphic charter provides 3 different document types, that are translated in this Typst template under those names: - **`light`**, which does not have many color and can be printed easily. Has 3 spots to write on the cover: `cover-top-left`, `cover-middle-left` and `cover-bottom-right`. - **`colored`**, which is beautiful but consumes much ink to print. Only has 1 spot to write on the cover: `cover-top-left`. - **`pfe`**, which is primarily used for internship reports. Has 4 spots to write on both the front and back covers: `cover-top-left`, `cover-middle-left`, `cover-bottom-right` and `back-cover`. The document type must be the first argument of the `insa-document` function. Here is an example: ```typst #import "@preview/silky-report-insa:0.2.0": * #show: doc => insa-document( "light", cover-top-left: [*Document important*], cover-middle-left: [ NOM Prénom Département INFO ], cover-bottom-right: "uwu", page-header: "En-tête au pif", doc ) ``` # Fonts The graphic charter recommends the fonts **League Spartan** for headings and **Source Serif** for regular text. To have the best look, you should install those fonts. To behave correctly on computers without those specific fonts installed, this template will automatically fallback to other similar fonts: - **League Spartan** -> **Arial** (approved by INSA's graphic charter, by default in Windows) -> **Liberation Sans** (by default in most Linux) - **Source Serif** -> **Source Serif 4** (downloadable for free) -> **Georgia** (approved by the graphic charter) -> **Linux Libertine** (default Typst font) ## Note on variable fonts If you want to install those fonts on your computer, Typst might not recognize them if you install their _Variable_ versions. You should install the static versions (**League Spartan Bold** and most versions of **Source Serif**). Keep an eye on [the issue in Typst bug tracker](https://github.com/typst/typst/issues/185) to see when variable fonts will be used! # Notes This template is being developed by <NAME> from the INSA de Rennes in [this repository](https://github.com/SkytAsul/INSA-Typst-Template). For now it includes assets from the INSA de Rennes graphic charter, but users from other INSAs can open an issue on the repository with the correct assets for their INSA. If you have any other feature request, open an issue on the repository as well. # License The typst template is licensed under the [MIT license](https://github.com/SkytAsul/INSA-Typst-Template/blob/main/LICENSE). This does *not* apply to the image assets. Those image files are property of Groupe INSA and INSA Rennes.
https://github.com/stephane-klein/typst-sklein-resume-poc
https://raw.githubusercontent.com/stephane-klein/typst-sklein-resume-poc/main/resume.typ
typst
#let headerFont = ("Roboto") #import "@preview/fontawesome:0.1.0": * #import "template.typ": * // Configuration #let firstName = "Stéphane" #let lastName = "Klein" #let personalInfo = ( email: "<EMAIL>", phone: "+33 6 61 48 76 04", github: "stephane-klein", linkedin: "stephanekleinfrommetz", homepage: "sklein.xyz", location: "Paris" ) #let colors = ( subtlegray: rgb("#ededee"), lightgray: rgb("#343a40"), darkgray: rgb("#212529"), ) #let awesomeColors = ( skyblue: rgb("#0395DE"), red: rgb("#DC3522"), nephritis: rgb("#27AE60"), concrete: rgb("#95A5A6"), darknight: rgb("#131A28"), ) #let themeColor = awesomeColors.red // Layout #let fontList = ("Source Sans Pro", "Font Awesome 6 Brands", "Font Awesome 6 Free") #set text( font: fontList, weight: "regular", size: 9pt, ) #set align(left) #set page( paper: "a4", margin: ( left: 1.4cm, right: 1.4cm, top: .8cm, bottom: .4cm, ), header: context { if counter(page).get().first() == 1 [ #pad( y: -20pt, [ #set text( size: 6pt, fill: rgb("#aaa") ) #align( right, [ Ce document date du 26 mars 2024\ Dernière version disponible sur\ https://cv.sklein.xyz\ ] ) ] ) ] }, footer: pad( y: -10pt, [ #set text( size: 8pt, fill: rgb("#aaa") ) #align( right, counter(page).display("1 sur 1", both:true) ) ] ) ) #show footnote.entry: it => { set par(hanging-indent: 0.7em) set text(fill: rgb("#aaa")) let loc = it.note.location() numbering( "1: ", ..counter(footnote).at(loc), ) it.note.body } // Helper utility #let hBar() = [ #h(5pt) | #h(5pt) ] // Section 1 : Header #table( columns: 1fr, inset: 0pt, stroke: none, row-gutter: 6mm, align: center, // Section 1.1 : Firstname Lastname [ #text( font: headerFont, size: 32pt, weight: "light", fill: colors.darkgray, firstName ) #h(5pt) #text( font: headerFont, size: 32pt, weight: "bold", lastName ) ], // Section 1.2 : header info [ #text( size: 10pt, fill: themeColor, box({ // Email fa-envelope() h(5pt) link("mailto:" + personalInfo.email)[#personalInfo.email] hBar() // Phone fa-phone() h(5pt) [#personalInfo.phone] hBar() // Homepage fa-home() h(5pt) [#personalInfo.homepage] hBar() // Location fa-location-dot() h(5pt) [#personalInfo.location] }) ) ], // Section 1.3 : quote [ #text( size: 10pt, weight: "medium", style: "italic", fill: themeColor, [ Software Craftsman avec plus de 20 ans d'expérience pouvant être Individual Contributor ou Manager ] ) ] ) #v(16pt) #Section( title: ("Qui", " suis-je ?"), content: ([ Je suis un artisan développeur de plus de 20 ans d'expérience. De 2019 à 2023, j'ai occupé le poste de CTO dans une startup parisienne, que j'ai rejoint juste après son financement initial. Avec mes collègues, nous avons élargi l'équipe tech de 1 à 16 personnes au cours de deux levées de fonds successives d'environ 5 M€ et 20 M€. Depuis septembre 2023, je travaille sur le développement d'une application SaaS que j'ai lancé avec trois co-fondateurs. ],) ) #Section( title: ("Mes", " objectifs professionnels"), content: ([ Depuis avril 2024, je suis à la recherche de missions freelance de 4 à 8 jours par mois.\ À partir de septembre 2024, il est possible que je sois à la recherche de mission de 16 jours par mois, en freelance ou en tant que salarié. ],) ) #Section( title: ("Pro", "jets susceptibles de m'intéresser"), content: ([ Je suis ouvert aux missions de Contributeur Individuel#footnote[Individual Contributor : https://www.urbandictionary.com/define.php?term=Individual+Contributor] comme de management, au sein de toutes petites structures ou des ETI. ],) ) #Section( title: ("Typ", "es de projets qui ne m'intéressent pas"), content: ([ Actuellement, les secteurs tels que les DeFi, l'AdTech et les jeux vidéos ne m'intéressent pas.\ Les projets d'applications mobiles natives ne sont pas non plus dans mes recherches actuelles, bien que ce soit un domaine qui m'intéresse, je n'ai pas encore développé de compétences spécifiques dans ce secteur. Je ne souhaite pas manager une équipe non francophone. Le management demande de la finesse et de la subtilité en communication, en raison de mon niveau d'anglais oral, je ne me sens pas en mesure de gérer efficacement une équipe non francophone. ],) ) // Section 3 : Areas of Expertise #Section( title: ("Dom", "aine d'expertise"), content: ([ *Frontend Development :* JavaScript (front), ReactJS, Svelte, SvelteKit, HTML/CSS avancé, TailwindCSS, Yjs *Backend Development :* Go, Python, JavaScript (back), GraphQL, REST, gRPC, Playwright, Écriture basique de script Bash, Singer (ETL) *Database :* SQL, PostgreSQL avancé, PL/pgSQL, SQLite, Redis, Clickhouse, Neo4j (Cypher), Metabase *DevOps :* Docker, Vagrant, Postfix, Nginx, Ansible, Terraform, Grafana, Prometheus, Loki, Kubernetes, Debian/Ubuntu/RedHat, Baremetal, AWS, Scaleway, OVH, Vector (data pipeline) *Software Engineering :* Git, Scrum, TDD, Lean, GitOps ],) ) #Section( title: ("Exp", "érience professionnelle"), content: yaml("resume.yaml").experiences.map(element => [ #CvEntry( jobTitle: element.jobTitle, date: element.date, companyName: element.companyName, location: element.location, description: [ - #lorem(40) - #lorem(30) ] ) ] ) )
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/ascii-ipa/1.1.0/ascii-ipa.typ
typst
Apache License 2.0
#import "src/lib.typ": branner, praat, sil, xsampa, phonetic, phnt, phonemic, phnm, orthographic, orth, prosodic, prsd
https://github.com/OrangeX4/typst-pinit
https://raw.githubusercontent.com/OrangeX4/typst-pinit/main/lib.typ
typst
MIT License
#import "simple-arrow.typ": simple-arrow, double-arrow #import "pinit-fletcher.typ": pinit-fletcher-edge #import "pinit-core.typ": * // ----------------------------------------------- // Libs // ----------------------------------------------- /// Draw a rectangular shape on the page **containing all pins** with optional extended width and height. /// /// - `dx`: [`length`] &mdash; Offset X relative to the min-left of pins. /// - `dy`: [`length`] &mdash; Offset Y relative to the min-top of pins. /// - `extended-width`: [`length`] &mdash; Optional extended width of the rectangular shape. /// - `extended-height`: [`length`] &mdash; Optional extended height of the rectangular shape. /// - `pin1`: [`pin`] &mdash; One of these pins. /// - `pin2`: [`pin`] &mdash; One of these pins. /// - `pin3`: [`pin`] &mdash; One of these pins, optionally. /// - `...args`: Additional named arguments or settings for [`rect`](https://typst.app/docs/reference/visualize/rect/), like `fill`, `stroke` and `radius`. #let pinit-rect( dx: 0em, dy: -1em, extended-width: 0em, extended-height: 1.4em, ..args, ) = { pinit( ..args.pos(), callback: (..positions) => { positions = positions.pos() let min-x = calc.min(..positions.map(loc => loc.x)) let max-x = calc.max(..positions.map(loc => loc.x)) let min-y = calc.min(..positions.map(loc => loc.y)) let max-y = calc.max(..positions.map(loc => loc.y)) absolute-place( dx: min-x + dx, dy: min-y + dy, rect( width: max-x - min-x + extended-width, height: max-y - min-y + extended-height, ..args.named(), ), ) }, ) } /// Highlight a specific area on the page with a filled color and optional radius and stroke. It is just a simply styled `pinit-rect`. /// // - `fill`: [`color`] &mdash; The fill color for the highlighted area. // - `radius`: [`length`] &mdash; Optional radius for the highlight. // - `stroke`: [`stroke`] &mdash; Optional stroke width for the highlight. // - `dx`: [`length`] &mdash; Offset X relative to the min-left of pins. // - `dy`: [`length`] &mdash; Offset Y relative to the min-top of pins. // - `extended-width`: [`length`] &mdash; Optional extended width of the rectangular shape. // - `extended-height`: [`length`] &mdash; Optional extended height of the rectangular shape. // - `pin1`: [`pin`] &mdash; One of these pins. // - `pin2`: [`pin`] &mdash; One of these pins. // - `pin3`: [`pin`] &mdash; One of these pins, optionally. // - `...args`: Additional arguments or settings for [`pinit-rect`](#pinit-rect). #let pinit-highlight( fill: rgb(255, 0, 0, 20), radius: 5pt, stroke: 0pt, dx: 0em, dy: -1em, extended-width: 0em, extended-height: 1.4em, ..args, ) = { pinit-rect( fill: fill, radius: radius, stroke: stroke, dx: dx, dy: dy, extended-width: extended-width, extended-height: extended-height, ..args, ) } /// Draw a line on the page between two specified pins with an optional stroke. /// /// - `stroke`: [`stroke`] &mdash; The stroke for the line. /// - `start-dx`: [`length`] &mdash; Offset X relative to the start pin. /// - `start-dy`: [`length`] &mdash; Offset Y relative to the start pin. /// - `end-dx`: [`length`] &mdash; Offset X relative to the end pin. /// - `end-dy`: [`length`] &mdash; Offset Y relative to the end pin. /// - `start`: [`pin`] &mdash; The start pin. /// - `end`: [`pin`] &mdash; The end pin. #let pinit-line( stroke: 1pt, start-dx: 0pt, start-dy: 0pt, end-dx: 0pt, end-dy: 0pt, start, end, ) = { pinit( start, end, callback: (start-pos, end-pos) => { absolute-place( line( stroke: stroke, start: ( start-pos.x + start-dx, start-pos.y + start-dy, ), end: ( end-pos.x + end-dx, end-pos.y + end-dy, ), ), ) }, ) } /// Draw an line from a specified pin to a point on the page with optional settings. /// /// - `stroke`: [`stroke`] &mdash; The stroke for the line. /// - `pin-dx`: [`length`] &mdash; Offset X of arrow start relative to the pin. /// - `pin-dy`: [`length`] &mdash; Offset Y of arrow start relative to the pin. /// - `body-dx`: [`length`] &mdash; Offset X of arrow end relative to the body. /// - `body-dy`: [`length`] &mdash; Offset Y of arrow end relative to the body. /// - `offset-dx`: [`length`] &mdash; Offset X relative to the pin. /// - `offset-dy`: [`length`] &mdash; Offset Y relative to the pin. /// - `pin-name`: [`pin`] &mdash; The name of the pin to start from. /// - `body`: [`content`] &mdash; The content to draw the arrow to. #let pinit-line-to( pin-dx: 5pt, pin-dy: 5pt, body-dx: 5pt, body-dy: 5pt, offset-dx: 35pt, offset-dy: 35pt, pin-name, body, ..args, ) = { pinit-line(pin-name, pin-name, start-dx: pin-dx, start-dy: pin-dy, end-dx: offset-dx, end-dy: offset-dy, ..args) pinit-place(pin-name, body, dx: offset-dx + body-dx, dy: offset-dy + body-dy) } /// Draw an arrow between two specified pins with optional settings. /// /// - `start-dx`: [`length`] &mdash; Offset X relative to the start pin. /// - `start-dy`: [`length`] &mdash; Offset Y relative to the start pin. /// - `end-dx`: [`length`] &mdash; Offset X relative to the end pin. /// - `end-dy`: [`length`] &mdash; Offset Y relative to the end pin. /// - `start`: [`pin`] &mdash; The start pin. /// - `end`: [`pin`] &mdash; The end pin. /// - `...args`: Additional arguments or settings for [`simple-arrow`](#simple-arrow), like `fill`, `stroke` and `thickness`. #let pinit-arrow( start-dx: 0pt, start-dy: 0pt, end-dx: 0pt, end-dy: 0pt, start, end, ..args, ) = { pinit( start, end, callback: (start-pos, end-pos) => { absolute-place( simple-arrow( start: ( start-pos.x + start-dx, start-pos.y + start-dy, ), end: ( end-pos.x + end-dx, end-pos.y + end-dy, ), ..args, ), ) }, ) } /// Draw an double arrow between two specified pins with optional settings. /// /// - `start-dx`: [`length`] &mdash; Offset X relative to the start pin. /// - `start-dy`: [`length`] &mdash; Offset Y relative to the start pin. /// - `end-dx`: [`length`] &mdash; Offset X relative to the end pin. /// - `end-dy`: [`length`] &mdash; Offset Y relative to the end pin. /// - `start`: [`pin`] &mdash; The start pin. /// - `end`: [`pin`] &mdash; The end pin. /// - `...args`: Additional arguments or settings for [`double-arrow`](#double-arrow), like `fill`, `stroke` and `thickness`. #let pinit-double-arrow( start-dx: 0pt, start-dy: 0pt, end-dx: 0pt, end-dy: 0pt, start, end, ..args, ) = { pinit( start, end, callback: (start-pos, end-pos) => { absolute-place( double-arrow( start: ( start-pos.x + start-dx, start-pos.y + start-dy, ), end: ( end-pos.x + end-dx, end-pos.y + end-dy, ), ..args, ), ) }, ) } /// Draw an arrow from a specified pin to a point on the page with optional settings. /// - `pin-dx`: [`length`] &mdash; Offset X of arrow start relative to the pin. /// - `pin-dy`: [`length`] &mdash; Offset Y of arrow start relative to the pin. /// - `body-dx`: [`length`] &mdash; Offset X of arrow end relative to the body. /// - `body-dy`: [`length`] &mdash; Offset Y of arrow end relative to the body. /// - `offset-dx`: [`length`] &mdash; Offset X relative to the pin. /// - `offset-dy`: [`length`] &mdash; Offset Y relative to the pin. /// - `double`: [`bool`] &mdash; Draw a double arrow, default is `false`. /// - `pin-name`: [`pin`] &mdash; The name of the pin to start from. /// - `body`: [`content`] &mdash; The content to draw the arrow to. /// - `...args`: Additional arguments or settings for [`simple-arrow`](#simple-arrow), like `fill`, `stroke` and `thickness`. #let pinit-point-to( pin-dx: 5pt, pin-dy: 5pt, body-dx: 5pt, body-dy: 5pt, offset-dx: 35pt, offset-dy: 35pt, double: false, pin-name, body, ..args, ) = { let arrow-fn = if double { pinit-double-arrow } else { pinit-arrow } arrow-fn(pin-name, pin-name, start-dx: pin-dx, start-dy: pin-dy, end-dx: offset-dx, end-dy: offset-dy, ..args) pinit-place(pin-name, body, dx: offset-dx + body-dx, dy: offset-dy + body-dy) } /// Draw an arrow from a point on the page to a specified pin with optional settings. /// /// - `pin-dx`: [`length`] &mdash; Offset X relative to the pin. /// - `pin-dy`: [`length`] &mdash; Offset Y relative to the pin. /// - `body-dx`: [`length`] &mdash; Offset X relative to the body. /// - `body-dy`: [`length`] &mdash; Offset Y relative to the body. /// - `offset-dx`: [`length`] &mdash; Offset X relative to the left edge of the page. /// - `offset-dy`: [`length`] &mdash; Offset Y relative to the top edge of the page. /// - `double`: [`bool`] &mdash; Draw a double arrow, default is `false`. /// - `pin-name`: [`pin`] &mdash; The name of the pin that the arrow to. /// - `body`: [`content`] &mdash; The content to draw the arrow from. /// - `...args`: Additional arguments or settings for [`simple-arrow`](#simple-arrow), like `fill`, `stroke` and `thickness`. #let pinit-point-from( pin-dx: 5pt, pin-dy: 5pt, body-dx: 5pt, body-dy: 5pt, offset-dx: 35pt, offset-dy: 35pt, double: false, pin-name, body, ..args, ) = { let arrow-fn = if double { pinit-double-arrow } else { pinit-arrow } arrow-fn(pin-name, pin-name, start-dx: offset-dx, start-dy: offset-dy, end-dx: pin-dx, end-dy: pin-dy, ..args) pinit-place(pin-name, body, dx: offset-dx + body-dx, dy: offset-dy + body-dy) }
https://github.com/mxsdev/lee-top-man-solutions
https://raw.githubusercontent.com/mxsdev/lee-top-man-solutions/main/solutions.typ
typst
MIT License
#import "@preview/ctheorems:1.1.2": * #show: thmrules.with(qed-symbol: $square$) #set page(width: 16cm, height: auto, margin: 1.5cm) #set heading(numbering: "1-1") #set enum(numbering: "(a)") #let problem = thmbox("theorem", "Problem", fill: rgb("#ffeeee")).with(numbering: "1-1") #let proof = thmproof("proof", "Proof") #let interior = $op("Int")$ #outline() = Introduction = Topological Spaces #problem(number: "2-5")[ For each of the following properties, give an example consisting of two subsets $X, Y subset.eq RR^2$, both considered as topological spaces with their Euclidian topologies, together with a map $f : X -> Y$ that has the indicated property. + $f$ is open but neither closed nor continuous. + $f$ is closed but neither open nor continuous. + $f$ is continuous but neither open nor closed. + $f$ is continuous and open but not closed. + $f$ is continuous and closed but not open. + $f$ is open and closed but not continuous. ] #proof[ Define the following topological spaces: - $D$ is the discrete topology on ${x, y}$ - $T$ is the trivial topology on ${x, y}$ - $X$ is the one-point topology on the set ${x, y, z}$ on $x$ Then we can proceed like so: + Consider the inclusion map $f : T -> X$. This map is not continuous because the preimage $f^(-1)({x}) = {x}$ of the open set in $X$ is not open in $T$. The map is also not closed because the image $f({x, y}) = {x, y}$ is not closed in $X$. The image of the lone non-null open set $f({x, y}) = {x, y}$ is open, however, implying that the map is open. + Consider the map $f : T -> X$ mapping $x |-> y$ and $y |-> z$. This map is not continuous because the preimage of the open set ${y}$ in $X$ is not open in $T$. The map is also not open because the image $f({x, y}) = {y, z}$ is not open in $X$. The image of the lone non-null closed set $f({x, y}) = {y, z}$ is closed, however, implying that the map is closed. + Consider the identity map $f : D -> T$. This map is continuous (since any map from a discrete space is continuous), but neither open nor closed, since the image of the clopen set ${x}$ in $D$ is neither closed nor open in $T$. + Consider the constant map $f : D -> X$ sending all elements of $T$ to $x$. This map is continuous since any map from a discrete space is continuous. The image of any set in $D$ is either $emptyset$ or ${x}$, both of which are open in $X$. In particular, the image of any open set is open. But the image of the closed set ${x}$ in $D$ is ${x}$, which is not closed in $X$. + Consider the constant map $f : D -> X$ sending all elements of $T$ to $y$. Similar to (d), this map is continuous, but the image of any set in $T$ is either $emptyset$ or ${y}$, both of which are closed. Similar to the reasoning in (d), this map is closed but not open. + Consider the identity map $f : T -> D$. This map is open and closed, since the image of the lone non-null clopen set ${x, y}$ is clopen in $D$, but it is also not continuous because the preimage $f^(-1)({x}) = {x}$ of the open set in $D$ is not open in $T$. ] #problem(number: "2-6")[ Suppose $X$ and $Y$ are topological spaces, and $f : X -> Y$ is a continuous map. + $f$ is continuous if and only if $f(overline(A)) subset.eq overline(f(A))$ for all $A subset.eq X$. + $f$ is closed if and only if $f(overline(A)) supset.eq overline(f(A))$ for all $A subset.eq X$. + $f$ is continuous if and only if $f^(-1)(interior B) subset.eq interior f^(-1)(B)$ for all $B subset.eq Y$. + $f$ is open if and only if $f^(-1)(interior B) supset.eq interior f^(-1)(B)$ for all $B subset.eq Y$. ] #proof[ We will use repeatedly the following set-theoretic identities that hold for any function $f : X -> Y$ and subsets $A subset.eq X$, $B subset.eq Y$: - $f(A) subset.eq B <==> A subset.eq f^(-1)(B)$ (adjunction identity) - $f^(-1)(f(A)) supset.eq A$ (unit identity) - $f(f^(-1)(B)) subset.eq B$ (co-unit identity) + ($==>$) By the "unit identity" above, we have, $ A subset.eq f^(-1)(f(A)) subset.eq f^(-1)(overline(f(A))) $ From which it follows that, $ overline(A) subset.eq overline(f^(-1)(overline(f(A)))) = f^(-1)(overline(f(A))) $ Where the last equality follows from the fact that $f$ is continuous, so that $f^(-1)(overline(f(A)))$ is closed. Then this is equivalent to, $ f(overline(A)) subset.eq overline(f(A)) $ By the "adjunction identity" above. // Suppose $f$ is continuous and $x in overline(A)$. We wish to show that $f(x) in overline(f(A))$. Suppose $U$ is a neighborhood of $f(x)$, so that by continuity $V := f^(-1)(U)$ is a nhood of $x$. Since $x in overline(A)$, there is some $y in A sect f^(-1)(U)$. Then $f(y) in f(A)$ and $f(y) in U$ so that $U$ intersects $f(A)$. ($<==$) Suppose $f(overline(A)) subset.eq overline(f(A))$ for all $A subset.eq X$, and $U$ is a closed set in $Y$. We wish to show that $f^(-1)(U)$ is closed in $X$. First note that by the "co-unit identity" above: $ f(f^(-1)(U)) subset.eq U $ So, $ overline(f(f^(-1)(U))) subset.eq overline(U) = U $ Where the last equality follows from $U$ closed. Then by the assumption, $ f(overline(f^(-1)(U))) subset.eq overline(f(f^(-1)(U))) subset.eq U $ So that $overline(f^(-1)(U)) subset.eq f^(-1)(U)$, implying that $f^(-1)(U)$ is closed. + ($==>$) Suppose $f$ is closed. Then $f(overline(A))$ is closed since $overline(A)$ is closed so $overline(f(overline(A))) subset.eq f(overline(A))$. Then $A subset.eq overline(A) ==> f(A) subset.eq f(overline(A)) ==> overline(f(A)) subset.eq overline(f(overline(A)))$, and thus, $ overline(f(A)) subset.eq overline(f(overline(A))) subset.eq f(overline(A)) $ ($<==$) Suppose $overline(f(A)) subset.eq f(overline(A))$ for all $A subset.eq X$ and $U$ is closed in X. Since $U$ is closed $overline(U) = U$, so $overline(f(U)) subset.eq f(overline(U)) = f(U)$. It follows that $f(U)$ is closed. + First note that, $ &f^(-1)(interior B) subset.eq interior f^(-1)(B) quad forall B subset.eq Y \ <==>& X - interior f^(-1)(B) subset.eq X - f^(-1)(interior B) = f^(-1)(X - interior B) quad forall B subset.eq Y \ <==>& overline(X - f^(-1)(B)) subset.eq f^(-1)(overline(X - B)) quad forall B subset.eq Y \ <==>& overline(f^(-1)(X - B)) subset.eq f^(-1)(overline(X - B)) quad forall B subset.eq Y \ <==>& overline(f^(-1)(B)) subset.eq f^(-1)(overline(B)) quad forall B subset.eq Y $ Next we can show that, $ overline(f^(-1)(B)) subset.eq f^(-1)(overline(B)) quad forall B subset.eq Y <==> f(overline(A)) subset.eq overline(f(A)) quad forall A subset.eq X $ Which is equivalent to, $ f(overline(f^(-1)(B))) subset.eq overline(B) quad forall B subset.eq Y <==> overline(A) subset.eq f^(-1)(overline(f(A))) quad forall A subset.eq X $ Since for any subsets $A subset.eq X$ and $B subset.eq Y$ we have $f(A) subset.eq B <==> A subset.eq f^(-1)(B)$. To show this we have: $ (==>) quad &overline(f(A)) supset.eq f(overline(f^(-1)(f(A)))) supset.eq f(overline(A)) \ (<==) quad &overline(f^(-1)(B)) subset.eq f^(-1)(overline(f(f^(-1)(B)))) subset.eq f^(-1)(overline(B)) $ Where in either case, the leftmost inclusion follows from substitution of either $overline(f(A))$ or $overline(f^(-1)(B))$ into the antecedent, and the rightmost inclusion follows from the "(co-)unit" identities. Therefore the subset condition in (c) is equivalent to the one in (a). + ($==>$) We have, $ &interior f^(-1)(B) subset.eq f^(-1)(B)\ ==> &f(interior f^(-1)(B)) subset.eq f(f^(-1)(B)) subset.eq B $ But since $f$ is open by assumption, that makes $f(interior f^(-1)(B))$ a subset not just of $B$ but of $interior B$. So: $ &f(interior f^(-1)(B)) subset.eq interior B \ ==> &interior f^(-1)(B) subset.eq f^(-1)(interior B) $ ($<==$) Let $U$ be an open set in $X$. Then, $ interior f^(-1)(f(U)) subset.eq f^(-1)(f(U)) subset.eq U $ So, $ interior f(U) subset.eq f(interior f^(-1)(f(U))) subset.eq f(U) $ Where the leftmost inclusion follows from the assumption applied to $f(U) subset.eq Y$. It follows that $f(U)$ is open. ]
https://github.com/Shambuwu/stage-docs
https://raw.githubusercontent.com/Shambuwu/stage-docs/main/Verantwoordingsrapport_Levi_Leuwol.typ
typst
#align( center, text( size: 1.2em, [ *Verkenning van Voedselrecepten en Ingrediënten* \ Een Verantwoordingsrapport over de Ontwikkeling van een Intuïtieve Datavisualisatietool \ ], ) ) #align( center, figure( image("bijlagen/OIG.png", width: 400pt ) ) ) #let date = datetime( year: 2023, month: 6, day: 30 ) #place( bottom + left, text[ *Student:* <NAME> \ *Studentnummer:* 405538 \ *Datum:* #date.display() \ *Onderwerp:* Verantwoordingsrapport \ *Opleiding:* HBO-ICT \ *Studiejaar:* 3 \ ] ) #place( bottom + right, image("bijlagen/logo.png", width: 175pt) ) #pagebreak() #set text( font: "Linux Libertine", ) = Voorwoord Voor u ligt het verantwoordingsrapport dat mijn persoonlijke groei en competentieontwikkeling weergeeft. Ik presenteer in dit rapport mijn ervaringen, verworven kennis en vaardigheden. Graag wil ik van deze gelegenheid gebruik maken om mijn waardering en dankbaarheid uit te spreken naar mijn medestudent <NAME>. De voortdurende ondersteuning, samenwerking en motivatie gedurende dit traject waren van onschatbare waarde voor het behalen van de gestelde doelen. Samen hebben we kennis gedeeld en elkaar geïnspireerd om het beste uit onszelf te halen. Zonder zijn waardevolle bijdrage zou dit verantwoordingsrapport niet compleet zijn. ~ Le<NAME>, #date.display() #pagebreak() #set heading(numbering: "1.1") #show heading: it => { set block(below: 10pt) set text(weight: "bold") align(left, it) } #outline( title: [ *Inhoudsopgave* ], ) #set page( numbering: "1 / 1", number-align: right, ) = Samenvatting In dit verslag wordt de ontwikkeling en implementatie van een datavisualisatietool voor de voedingsindustrie weergegeven. Het doel van het project was om een gebruiksvriendelijke webapplicatie te ontwerpen en ontwikkelen waarmee gebruikers recepten en ingrediënten kunnen zoeken, filteren, visualiseren en alternatieve ingrediënten kunnen identificeren. Het ontwerpproces omvatte een grondige analyse van de vereisten, het opstellen van gedetailleerde functionele specificaties en het ontwikkelen van een passende architectuur. De tool biedt gebruikers een intuïtieve zoek- en filterfunctionaliteit op basis van verschillende criteria, zoals naam, type en voedingswaarde. Daarnaast kunnen gebruikers de gegevens visualiseren en de relaties tussen de knooppunten identificeren. Een vervangingsfunctie stelt gebruikers in staat om alternatieve ingrediënten te vergelijken op basis van voedingswaarde, smaak en vergelijkbare recepten. Tijdens de uitvoering van het project zijn er enkele uitdagingen opgetreden, met name in de samenwerking en het nakomen van de planning, wat resulteerde in een gebrek aan overzicht en evaluatie. Om dit in de toekomst te verbeteren, worden aanbevelingen gedaan voor een gestructureerdere aanpak met duidelijke doelen en mijlpalen. Het uiteindelijke resultaat van het project is een functionele datavisualisatietool die waardevolle inzichten biedt aan de voedingsindustrie. De tool kan bijdragen aan het verduurzamen van recepten, het identificeren van alternatieven en het versnellen van productinnovatie. Het succesvolle ontwikkelingsproces van deze datavisualisatietool benadrukt het belang van een goed inzicht in voedingsingrediënten en het vermogen om recepten aan te passen. Door het gebruik van deze tool kunnen mkb-bedrijven in de voedingsindustrie traditionele recepten transformeren naar plantaardige alternatieven zonder concessies te doen aan smaak en textuur. #pagebreak() = Inleiding == Context en achtergrond Tijdens de stageperiode is er gewerkt aan een datavisualisatietool. Het project is in opdracht van <NAME> Tenwise gedaan. Tenwise is gespecialiseerd in data-analyse. Op basis van publieke databronnen heeft Tenwise een netwerk van voedselrecepten gebouwd. Op basis van AI kan er met de relaties voorspeld worden welke recepten vervangen kunnen worden door gezondere en goedkopere alternatieven. Tenwise heeft niet alleen behoefte bij het analyseren van dit netwerk met AI, maar ook bij het interactief verkennen van de data via een webinterface. == Aanleiding Bedrijven in de voedingsindustrie staan voor de uitdaging om recepten, processen en producten voortdurend aan te passen aan de veranderende markt en nieuwe wetgeving. Inzicht in de eigenschappen van voedingsingrediënten is essentieel om deze aanpassingen succesvol te kunnen maken. Het ontwikkelen van recepten met uitsluitend plantaardige ingrediënten, terwijl traditioneel dierlijke ingrediënten worden gebruikt, vormt een specifieke uitdaging. Om dit probleem aan te pakken, hebben voedingsontwikkelaars Exter en Euroma samengewerkt met het Kenniscentrum Biobased Economy van de Hanzehogeschool Groningen, onder leiding van lector <NAME> en docent-onderzoeker <NAME>. Met de steun van een KIEM-subsidie hebben zij zich gericht op data science voor de voedingsindustrie. Door gebruik te maken van tekst-mining en algoritmen hebben ze meer dan één miljoen recepten, bijna net zoveel ingrediënten, 1668 moleculen en 35 miljoen wetenschappelijke papers geïndexeerd. Hierdoor ontstond een netwerkstructuur waarin de relaties tussen ingrediënten, moleculen, recepten en beschrijvingen zichtbaar werden. Dit biedt voedingsontwikkelaars snel inzicht in de samenstelling, bereidingswijze, smaak en textuur van recepten, waardoor ze kunnen zoeken naar goedkopere en duurzame alternatieven. == Opdracht en doelstellingen De opdracht die is toevertrouwd door de opdrachtgever is het ontwikkelen van een intuïtieve webapplicatie die gebruikers helpt bij het verkennen van voedselrecepten en ingrediënten. De focus ligt hierbij op het oplossen van de volgende problemen: beperkte kennis over queries, gebrek aan overzichtelijke informatie en het ontbreken van contextuele informatie. De doelstel van de opdracht is tweedelig. Ten eerste wordt er gestreven naar een gebruiksvriendelijke en intuïtieve zoekfunctionaliteit. Dit omvat het implementeren van zoektechnieken en -functies, zodat gebruikers zonder verstand van de Cypher query-taal zoekopdrachten kunnen uitvoeren. Daarnaast dienen gebruikers voorzien te worden van overzichtelijke informatie over recepten en ingrediënten, zodat ze snel en gemakkelijk relevante informatie kunnen vinden. Ook is de prestatie van de webapplicatie een belangrijk aandachtspunt. De webapplicatie moet snel en responsief zijn, zodat gebruikers snel en efficiënt kunnen zoeken. Ten tweede moet er contextuele informatie geleverd worden, die gebruikers helpt om de data beter te begrijpen. Dit omvat het presenteren van aanvullende informatie over de ingrediënten, zoals vervangingsmogelijkheden, voedingswaarde en smaak. Met deze contextuele informatie kunnen gebruikers betere beslissing nemen, de juiste conclusies trekken en hun recepten aanpassen op basis van hun behoeften en voorkeuren. == Probleemstelling Het verkennen van een dataset kan lastig zijn voor gebruikers zonder verstand van de achterliggende technologieën. Zonder een intuïtieve en gebruiksvriendelijke tool kunnen zij moeite hebben met het vinden van specifieke recepten en het identificeren van alternatieve ingrediënten. Uitdagingen die hierbij kunnen ontstaan, zijn bijvoorbeeld: - *Beperkte zoekmogelijkheden:* Gebruikers zonder kennis van de Cypher querytaal kunnen het moeilijk vinden om gerichte zoekopdrachten uit te voeren. Ze missen mogelijk de kennis van de juiste zoektermen, filteropties of syntax van de querytaal. Dit kan resulteren in een overweldigend aantal ongerelateerde resultaten. - *Gebrek aan overzichtelijke informatie:* Een intuïtieve tool kan gebruikers helpen om de juiste informatie te vinden. Het kan bijvoorbeeld een overzicht geven van de meest voorkomende ingrediënten in een recept, of een lijst van alternatieve ingrediënten die gebruikers kunnen gebruiken om een recept aan te passen. - *Gebrek aan contextuele informatie:* Gebruikers kunnen moeite hebben om de context van een recept te begrijpen. Ze kunnen bijvoorbeeld niet weten welke ingrediënten het meest voorkomen in een bepaald recept, of welke ingrediënten het beste kunnen worden vervangen door een alternatief. Gezien de complexiteit en uitdagingen die gebruikers zonder query-ervaring mogelijk ondervinden bij het verkennen van voedselrecepten en het identificeren van alternatieve ingrediënten, lopen gebruikers het risico om foutieve informatie te vinden of de verkeerde conclusies te trekken. Dit kan leiden tot misvattingen over de data en het nemen van verkeerde beslissingen. // == Doel van het verantwoordingsrapport // Het doel van dit verantwoordingsrapport is om een gedetailleerd overzicht te geven van de competenties die zijn ontwikkeld en toegepast tijdens de stageperiode. Het rapport dient als bewijs van de professionele ontwikkeling en vermogen van de student om de opgedane kennis en vaardigheden, van de opleiding HBO-ICT, toe te passen in de praktijk. // Daarnaast dient het rapport inzicht te geven in hoe de stage bijdraagt aan de persoonlijke groei en vaardigheden van de student. De verschillende technieken en methoden die zijn toegepast tijdens de stageperiode worden beschreven en er wordt uitgelegd hoe deze bijdragen aan de professionele ontwikkeling van de student. // Dit rapport dient als een belangrijk document om de professionele ontwikkeling en groei van de student binnen de opleiding HBO-ICT te documenteren en als reflectie op de stage-ervaringen. == Overzicht van de structuur In het verantwoordingsrapport wordt de volgende structuur gehanteerd om de verschillende aspecten van de stage toe te lichten: #box(height: 225pt, columns(2)[ #set par(justify: true) + *Inleiding:* In de inleiding wordt de context en achtergrond van de stage beschreven. Daarnaast wordt de probleemstelling van het stageproject toegelicht. + *Projectaanpak en risico's:* In dit hoofdstuk wordt de projectaanpak en de risico's van het project beschreven. Er wordt toegelicht hoe de doelstellingen van het project zijn geoperationaliseerd. + *HBO-i competenties:* In dit hoofdstuk wordt toegelicht hoe de verschillende HBO-i competenties zijn toegepast tijdens de stageperiode. + *Conclusie en aanbevelingen:* In dit hoofdstuk zal wordt een conclusie gegeven over de stage-ervaringen en de verschillende competenties. + *Reflectie:* Hier zal een reflectie worden gegeven op de stage-ervaringen en de verschillende competenties die zijn ontwikkeld tijdens de stageperiode. + *Referenties:* In dit hoofdstuk worden de referenties en bronnen vermeld die zijn gebruikt tijdens het schrijven van het rapport. + *Begrippenlijst:* In dit hoofdstuk worden de belangrijkste begrippen gedefinieerd die worden gebruikt in het rapport. ] ) #pagebreak() = Projectaanpak en risico's == Operationalisering van de doelstellingen De volgende eisen zijn opgesteld om de doelstellingen van de opdracht te operationaliseren: - Het implementeren van een zoekfunctionaliteit die gebruikers in staat stelt om recepten en ingrediënten te zoeken op basis van verschillende criteria, zoals naam, type en voedingswaarde. - Het implementeren van een filterfunctionaliteit die gebruikers in staat stelt om recepten en ingrediënten te filteren op basis van verschillende criteria, zoals naam, type en voedingswaarde. - Het implementeren van een visualisatiefunctie die gebruikers in staat stelt om de data te visualiseren en de relaties tussen de knooppunten te identificeren. - Het implementeren van een vervangingsfunctie die gebruikers in staat stelt om alternatieve ingrediënten te identificeren en te vergelijken. Vergelijkbare ingrediënten worden getoond op basis van de voedingswaarde, smaak en aantal overeenkomende recepten. - Om de prestatie van de webapplicatie te waarborgen, moet de webapplicatie snel en responsief zijn. Dit betekent dat de webapplicatie binnen een acceptabele tijd moet reageren op gebruikersinteractie. De webapplicatie hoort te voldoen aan de Google PageSpeed Insights#footnote[https://developers.google.com/speed/docs/insights/v5/about] richtlijnen voor een goede gebruikerservaring. == Ondersteunend onderzoek Google PageSpeed Insights#footnote[https://developers.google.com/speed/docs/insights/v5/about#categories] geeft inzicht over de prestatiecriteria die belangrijk zijn voor een goede gebruikerservaring. In dit geval is de Largest Contentful Paint (LCP) het meest van toepassing. Dit metriek meet de laadtijd van het grootste zichtbare element op een webpagina. In het geval van de datavisualisatietool verwijst LCP naar de laadtijd van de grafiek die op de webpagina wordt weergegeven. De LCP moet binnen 2.5 seconden worden geladen om een goede gebruikerservaring te garanderen. Ook de Interaction to Next Paint (INP) is een belangrijk metriek voor de webapplicatie. Het is een metriek dat de tijd meet tussen de interactie van een gebruiker en de daaropvolgende visuele update van de pagina. In het geval van de datavisualisatietool geeft de INP aan hoe snel de pagina reageert op gebruikersinteracties, zoals het wijzigen van de weergave van de grafiek, het filteren van gegevens en het uitvoeren van zoekopdrachten. De INP moet minder dan 0.2 seconden zijn om een goede gebruikerservaring te garanderen. Op basis van deze metrieken kan de prestatie van de webapplicatie geoperationaliseerd worden. De prestatie van de webapplicatie moet voldoen aan de volgende eisen: - De LCP (de laadtijd van de grafiek) moet binnen 2.5 seconden geladen zijn. - De INP (de reactietijd van de pagina) moet minder dan 0.2 seconden zijn. Op basis van deze eisen wordt de prestatie van de webapplicatie gewaarborgd. == Risico's Bij het uitvoeren van dit project kunnen zich enkele risico's voordoen. Het is belangrijk om deze risico's te identificeren en maatregelen te ondernemen om deze te beheersen. Hiervoor is een lijst gemaakt met mogelijk risico's. De ernst van de risico's is op een schaal van 1 tot 5, waarbij 1 staat voor een laag risico en 5 voor een hoog risico. De volgende risico's zijn geïdentificeerd: #table( columns: (auto, 1fr, auto), [*Risico*], [*Beschrijving*], [*Ernst*], [Technische complexiteit], [Het ontwikkelen van de functionaliteiten kan technisch complex zijn. Het kan uitdagend zijn om alle gewenste functionaliteiten te implementeren in de gestelde tijd.], [4], [Prestatieproblemen], [Prestatieproblemen kunnen een significante impact hebben op de gebruikerservaring. Het is belangrijk om de gestelde prestatiecriteria in de gaten te houden.], [4], [Onvoldoende samenwerking en communicatie], [Het gebrek aan samenwerking en communicatie kan leiden tot enkele negatieve gevolgen.], [3], [Onbedoelde toegang tot de applicatie], [Het project bevat geen gebruikersaccounts of inlogfunctionaliteit, wat betekent dat de applicatie voor iedereen toegankelijk is. Dit kan leiden tot onbedoelde toegang tot de applicatie.], [2] ) // #pagebreak() // = Productomschrijving // Deze sectie beschrijft de webapplicatie die ik heb ontwikkeld tijdens de stageperiode. Het product is een intuïtieve en gebruiksvriendelijke visualisatietool die gebruikers zonder query-ervaring ondersteunt bij het verkennen van grafendata. // == Interactieve visualisaties // Één van de kenmerken van het product is de mogelijkheid om interactieve visualisaties te genereren. Gebruikers kunnen via de webapplicatie verschillende instellingen kiezen, zoals de grootte van de visualisatie, de kleuren van de knooppunten en de relaties, en de layout van de visualisatie. De visualisatie wordt vervolgens gegenereerd op basis van de geselecteerde instellingen. Deze visuele representatie van de data kan gebruikers helpen om de data beter te begrijpen en de relaties tussen de knooppunten te identificeren. // == Zoeken en filteren // De webapplicatie biedt krachtige zoek- en filtermogelijkheden waarmee gebruikers specifieke informatie op kunnen zoeken en relevante resultaten kunnen filteren. Bijvoorbeeld het type ingredient, de naam van een recept of de naam van een ingredient. Dit geeft gebruikers de mogelijkheid om snel specifieke resultaten te vinden, verbonden knooppunten te identificeren en de relaties tussen de knooppunten te analyseren. Door deze zoek- en filtermogelijkheden kunnen gebruikers de data verkennen en de informatie vinden die ze nodig hebben. // == Intuïtieve gebruikersinterface #pagebreak() = HBO-i competenties == Analyseren In het kader van het project is de competentie Analyseren toegepast. Er is gewerkt aan het identificeren van kernelementen, klant- en gebruikersbehoeften en het analyseren van gebruikerservaringen. Een belangrijk aspect van de analytische aanpak was het identificeren van de kernelementen van de opdracht, waarbij er is samengewerkt met de opdrachtgever om een duidelijk beeld te krijgen van de doelstellingen en vereisten. Vervolgens zijn deze kernelementen vertaald naar een concrete technische oplossing, waarbij functionaliteit en gebruiksvriendelijkheid centraal stonden. Er is iteratief gewerkt met de opdrachtgever. Doormiddel van regelmatige feedbacksessies zijn de kernwaarden en vereisten van de opdrachtgever continu geëvalueerd en geanalyseerd. Door dit iteratief proces is ervoor gezorgd dat het ontwerp optimaal aansluit op de wensen en behoeften van de opdrachtgever. Bovendien is er tijdens het traject voortdurend geëvalueerd vanuit het perspectief van de gebruiker. Er zijn gebruikerstesten en gebruikersfeedbacksessies georganiseerd om de projectvoortgang te evalueren. Tijdens de gebruikerstesten zijn gebruikers gevraagd om de webapplicatie te gebruiken en feedback te geven over de functionaliteit en gebruiksvriendelijkheid. De input van de gebruikers is gebruikt om aanpassingen en optimalisaties door te voeren in het ontwerp en functionaliteit van de webapplicatie. Tenslotte is er ook uitgebreid onderzoek gedaan naar verschillende technologieën op het gebied van database-integratie. Eén van de belangrijke aspecten van de applicatie was het effectief en efficiënt kunnen werken met grafendata. Er zijn verschillende frameworks geanalyseerd om de integratie van een grafendatabase te realiseren. Eén van de technologieën die grondig is geanalyseerd, is de Neo4j-OGM. Deze library biedt een abstractielaag bovenop de Neo4j database, waardoor objectgeoriënteerde interacties met de database mogelijk zijn. Tijdens de analyse van de Neo4j-OGM is er gekeken naar de functionaliteit, prestaties en ondersteuning van de library. Er is ook gekeken naar vergelijkebare libraries en technologieën om de voordelen en nadelen beter te begrijpen. Op basis van deze analyse is er een keuze gemaakt om de Neo4j-OGM te gebruiken voor de integratie van de Neo4j database. == Adviseren In het kader van het project is de competentie Adviseren toegepast door effectief advies te verstrekken op het gebied van interactieontwerp en het realiseren van de gewenste functionaliteit. - *Adviseren over het ontwerp:* Tijdens het traject is er advies verstrekt over het ontwerp van de webapplicatie. Er is actief geadviseerd over de doelstellingen van elke iteratie van het project, waarbij er rekening is gehouden met de wensen en behoeften van de opdrachtgever. Het advies had betrekking op het ontwerp van specifieke functionaliteiten die elke iteratie aangepakt moesten worden om zo een gestructureerde en succesvolle ontwikkeling te waarborgen. - *Adviseren over de user experience:* Er is advies verstrekt over de user experience van de webapplicatie. Er is gekeken naar de verschillende interacties die gebruikers kunnen uitvoeren en hoe deze interacties de gebruikerservaring kunnen beïnvloeden. Er is advies verstrekt over de verschillende interacties en hoe deze kunnen worden geoptimaliseerd om de gebruikerservaring te verbeteren. - *Adviseren over technologische trends:* Op basis van uitgebreid onderzoek dat is gedaan naar technologische trends en ontwikkelingen, is er advies verstrekt over de technologieën die het beste kunnen worden gebruikt voor de ontwikkeling van de webapplicatie. Er is gekeken naar de voordelen en nadelen van verschillende technologieën en hoe deze kunnen worden toegepast om de gewenste functionaliteit te realiseren. - *Adviseren over de architectuur:* Er is gekeken naar verschillende componenten van de architectuur, zoals de frontend, backend en database. Er is advies verstrekt over de architectuur van de applicatie en hoe deze kan worden geoptimaliseerd om de gewenste functionaliteit te realiseren. Door actief te adviseren over het ontwerp, de user experience, technologische trends en de architectuur van de applicatie, is er een belangrijke bijdrage geleverd aan de ontwikkeling van de webapplicatie. Het advies heeft bijgedragen aan het realiseren van de gewenste functionaliteit en het waarborgen van de gebruiksvriendelijkheid van de webapplicatie. == Ontwerpen In het kader van het project is de competentie Ontwerpen toegepast door het creëren van een gebruiksvriendelijk en intuïtief ontwerp voor de webapplicatie. Er is gewerkt aan de volgende aspecten bij het ontwerpen van de applicatie: - *Client-server database architectuur:* Op basis van de hiervoor genoemde adviezen, is ervoor gekozen om een client-server database architectuur te gebruiken. Dit betekent dat de database zich op een centrale server bevindt. De client heeft via de server toegang tot de database. De client-server architectuur is gekozen om de prestatie van de webapplicatie te waarborgen. De server kan efficiënter omgaan met de database doormiddel van extra abstractielagen. Dit zorgt ervoor dat de client minder belast wordt en de webapplicatie sneller en responsiever is. - *Neo4j-OGM:* Er is gekozen om de Neo4j-OGM te gebruiken. Deze dient als abstractielaag bovenop de Neo4j database, waardoor objectgeoriënteerde interacties met de database mogelijk zijn. De Neo4j-OGM is gekozen om de complexiteit van de database-interacties te verminderen en de prestatie van de webapplicatie te verbeteren. // - *Docker:* Docker is gebruikt om de webapplicatie te containeriseren. Dit betekent dat de webapplicatie in een geïsoleerde omgeving wordt uitgevoerd. Omdat de webapplicatie in een geïsoleerde omgeving wordt uitgevoerd, is het gemakkelijker om de webapplicatie te implementeren en te distribueren. Dit zorgt ervoor dat de webapplicatie gemakkelijk kan worden geïnstalleerd en geïmplementeerd op verschillende systemen. - *Schaalbaarheid:* De webapplicatie is ontworpen met schaalbaarheid in gedachten. Dit betekent dat de webapplicatie flexibel is in termen van schaalbaarheid en uitbreidbaarheid. Het is ontworpen om gemakkelijk nieuwe functionaliteiten en componenten te kunnen integreren, en tegelijkertijd in staat te zijn om grote hoeveelheden data efficiënt te verwerken. - *UX-ontwerp:* Een gebruiksvriendelijk en intuïtief ontwerp is gecreëerd voor de webapplicatie. Er is gekeken naar de verschillende interacties die gebruikers kunnen uitvoeren en hoe deze interacties de gebruikerservaring kunnen beïnvloeden. Tijdens het ontwerpproces is er nauw samengewerkt met de opdrachtgever om de gewenste functionaliteit en gebruiksvriendelijkheid te waarborgen. De hiervoor genoemde aspecten zijn belangrijke onderdelen van het ontwerp van de webapplicatie. Door deze aspecten te implementeren, is er een gebruiksvriendelijk en intuïtief ontwerp gecreëerd voor de webapplicatie. == Realiseren In het kader van het project is de competentie Realiseren toegepast door het ontwikkelen van de webapplicatie. Er is gewerkt aan het implementeren van de gewenste functionaliteit en het waarborgen van de gebruiksvriendelijkheid van de webapplicatie. Tijdens het project is er actief gewerkt aan de implementatie van de gewenste functionaliteit in de webapplicatie. Er zijn programmeer- en ontwikkelvaardigheden toegepast om de benodigde functionaliteiten te implementeren. Dit omvat onder andere het realiseren, configureren en integreren van verschillende componenten. Ook is er gedurende het project veel aandacht besteed aan het optimaliseren van de webapplicatie en het oplossen van eventuele bugs en technische problemen. Doormiddel van systematische testen en debugging is er gestreefd naar een stabiele en goed functionerende applicatie. == Projectmatig werken Voor de competentie Projectmatig werken is er gestreefd naar een gestructureerde en georganiseerde aanpak gedurende het hele project. Dit omvatte het stellen van duidelijke doelen, het plannen van taken en het samenwerken met het team en de opdrachtgever. Enkele voorbeelden van de projectmatige aanpak zijn: - *Doelstellingen:* Er zijn heldere doelstellingen geformuleerd aan het begin van het project, die als hulpmiddel dienden om de voortgang van het project te evalueren. Deze doelstellingen waren gericht op het realiseren van de gewenste functionaliteit en het voldoen aan de eisen van de opdrachtgever. - *Agile aanpak:* Er is gekozen om Agile te werken, hierbij is de Scrum methode gebruikt. Dit betekent dat er in korte iteraties is gewerkt, waarbij er elke iteratie een werkend product is opgeleverd. De Scrummethode is gekozen om de voortgang van het project te waarborgen en de gewenste functionaliteit te realiseren. Door Agile te werken, kon er flexibel worden omgegaan met veranderende eisen en toenemende inzchten. Dit zorgde ervoor dat het project zich kon aanpassen aan de veranderende behoeften van de opdrachtgever en hierdoor konden aanpassingen snel en effectief worden doorgevoerd Tenslotte is een belangrijk aspect van de Scrummethode de transparantie en samenwerking. Er is nauw samengewerkt met het team en de opdrachtgever om eventuele uitdagingen te identificeren en gezamenlijk beslissingen te nemen. Dit heeft bijgedragen aan een positieve en effectieve samenwerking. In conclusie is er een gestructureerde en georganiseerde aanpak gehanteerd gedurende het project. Dit heeft bijgedragen aan een succesvolle en effectieve ontwikkeling van de webapplicatie. == Onderzoek Voor de competentie Onderzoek is er een grondige en systematische aanpak toegepast om relevante informatie te verzamelen en te analyseren met betrekking tot de prestaties van de webapplicatie. Het onderzoek was gericht op het verkrijgen van inzicht in de prestaties van de webapplicatie, het identificeren van optimalisatiemogelijkheden en het implementeren van passende technologieën. Enkele voorbeelden van het onderzoek zijn: - *Performance-analyse:* Er is uitgebreid onderzoek gedaan naar de prestaties van de applicatie, waarbij verschillende aspecten zijn geëvalueerd, zoals responstijden, laadtijden en schaalbaarheid. Dit omvatte het uitvoeren van tests en metingen om de prestaties van de applicaties in verschillende scenario's te analyseren. Op basis van deze analyse zijn er knelpunten en optimalisatiemogelijkheden geïdentificeerd. - *Caching-technieken:* Er is onderzoek gedaan naar caching-technieken om de prestaties van de applicatie te verbeteren. Verschillende caching-methoden zijn onderzocht en geïmplementeerd om veelgebruikte gegevens en queryresultaten efficiënt op te slaan en op te halen. - *Database-optimalisatie:* Er is onderzoek gedaan naar optimalisatiemogelijkheden met betrekking tot de grafendatabase. Verschillende technieken en configuraties zijn geëvalueerd en toegepast om de databaseprestaties te verbeteren. Dit omvatte het optimaliseren van queries en het implementeren van indexen. Het onderzoek resulteerde in een betere responsiviteit van de database. De toegepaste onderzoeksmethoden hebben bijgedragen aan een beter begrip van de prestatie-aspecten van de applicatie en hebben geleid tot de succesvolle optimalisatie van de applicatie. #pagebreak() = Conclusie en aanbevelingen == Conclusie Tijdens de stageperiode is er gewerkt aan een datavisualisatietool, waarmee gebruikers recepten en ingrediënten kunnen zoeken, filteren en visualiseren. De implementatie van zoekfunctionaliteit, filterfunctionaliteit en visualisatiefunctionaliteit heeft bijgedragen aan een intuïtieve en gebruiksvriendelijke webapplicatie. De webapplicatie biedt gebruikers de mogelijkheid om de data te verkennen en de informatie te vinden die ze nodig hebben. Door het volgen van een gestructureerde projectaanpak en het nauw samenwerken met de opdrachtgever, is er een succesvolle en effectieve ontwikkeling van de webapplicatie gewaarborgd. De datavisualisatietool biedt gebruikers de mogelijkheid om recepten en ingrediënten te vinden op basis van verschillende criteria, zoals naam en type. Daarnaast kan deze data gevisualiseerd worden, zodat gebruikers de relaties tussen de knooppunten kunnen identificeren. == Aanbevelingen Op basis van het uitgevoerde project en de verkregen resultaten, worden de volgende aanbevelingen gedaan voor verdere verbeteringen en de toekomstige ontwikkeling van de datavisualisatietool: - *Verbetering van de gebruikerservaring:* Hoewel de applicatie responsief en snel is, kunnen er nog steeds verbeteringen worden gedaan om de gebruikerservaring verder te optimaliseren. Er kunnen verschillende technieken worden toegepast, zoals het implementeren van een lazy loading mechanisme. Dit betekent dat de visualisatie wordt geladen op basis van de zichtbare knooppunten. Dit kan de prestaties van de applicatie aanzienlijk verbeteren. - *Gebruik van Neo4j-PHP-OGM package:* Vanwege het gebruik van een fork van de Neo4j-PHP-OGM package is het belangrijk om bewust te zijn van de risico's. Aangezien de fork wordt onderhouden door een kleiner aantal gebruikers, kunnen er beperkingen zijn op het gebied van actieve ontwikkeling, bugfixes en beveiligingsupdates. Het is daarom belangrijk om de ontwikkeling van de Neo4j-PHP-OGM package in de gaten te houden en te evalueren of het gebruik van de fork nog steeds de beste optie is. #pagebreak() = Reflectie In dit hoofdstuk wil ik graag terugblikken op de uitvoering van de opdracht en mijn reflectie geven op het ontwikkelde product. Met de kennis die ik heb opgedaan, wil ik benadrukken hoe deze ervaring mij heeft gevormd. Tijdens de uitvoering van de opdracht heb ik het product grondig geëvalueerd. Ik ben tevreden met de functionaliteit en het gebruikersvriendelijke ontwerp van de webapplicatie. De zoek- en filterfunctionaliteit biedt gebruikers de mogelijkheid om specifieke informatie op te zoeken en relevante resultaten te filteren. Ook de visualisatiefunctie is erg krachtig en intuïtief. Echter zijn er ook verbeterpunten, voornamelijk bij het projectmatig werken. Ik heb gemerkt dat het soms moeilijk was om de voortgang van het project te evalueren en de juiste prioriteiten te stellen. Dit heeft geleid tot enkele uitdagingen, zoals het niet halen van de deadline voor de implementatie van de vervangingsfunctionaliteit. De uitdaging ontstond door de mate van samenwerking tussen mijn projectgenoot en mij. Hoewel we nauw hebben samengewerkt om de gewenste functionaliteit te realiseren, resulteerde dit ook in een gebrek aan overzicht en evaluatie. Het continue bewustzijn van de projectvoortgang leidde tot gebrekkige planning en beperkte documentatie, aangezien we al op de hoogte waren van elkaars ontwikkelingen. Om dit in de toekomst te verbeteren, wil ik meer aandacht besteden aan het creëren van een duidelijk projectplan en het vroeger stellen van heldere doelen en mijlpalen. Hierdoor zullen we beter in staat zijn om de voortgang te bewaken en eventuele knelpunten tijdig te identificeren. Daarnaast wil ik streven naar een gestructureerdere aanpak bij het documenteren van ons werk, zodat er een completer overzicht is van het projectverloop en de genomen beslissingen. Al met al kijk ik terug op een waardevolle ervaring. De uitvoering van deze opdracht heeft me veel technische kennis en vaardigheden bijgebracht. Ik ben vastbesloten om deze kennis en vaardigheden te blijven ontwikkelen en te streven naar het leveren van hoogwaardige producten. #pagebreak() = Referenties #align(left, table( columns: (auto, auto, auto), rows: (auto, auto, auto), align: left, inset: 10pt, stroke: none, [*Onderdeel*], [*Auteur*], [*Vindplaats*], [*Broncode*], [], [], [Client], [<NAME>, <NAME>], [broncode -> frontend], [Server], [<NAME>, <NAME>], [broncode -> backend], [], [], [], [*Documenten*], [], [], [Analyseren: Analyse en inzichten], [<NAME>], [documenten -> analyserapport.pdf], [Adviseren: Adviesrapport en aanbevelingen], [<NAME>], [documenten -> adviesrapport.pdf], [Ontwerpen: Ontwerpdocument en onderbouwing], [<NAME>], [documenten -> ontwerprapport.pdf], [Realiseren: Implementatierapport en uitvoering], [<NAME>], [documenten -> implementatierapport.pdf], [Projectmatig werken: Projectplan en -evaluatie], [<NAME>], [documenten -> projectrapport.pdf], [Onderzoeken: Onderzoeksrapport en resultaten], [<NAME>], [documenten -> onderzoeksrapport.pdf], [], [], [], [*Bijlagen*], [], [], [Hanzehogeschool Groningen logo], [Hanzehogeschool Groningen], [https://freebiesupply.com/logos/hanzehogeschool-groningen-logo/], [Titelpagina figuur], [DALL-E-2, OpenAI], [bijlagen -> OIG.png], ) ) #pagebreak() = Begrippenlijst #align(left, table( columns: (auto, auto), rows: (auto, auto), align: left, gutter: 3pt, stroke: none, [*Begrip*], [*Definitie*], [Client-server database model], [Een architectuurmodel waarbij de database zich op een centrale server bevindt, terwijl de client via een netwerkverbinding toegang heeft tot de database.], [Client], [Een client verwijst naar een computer of apparaat dat verbonden is met een netwerk en diensten of bronnen aanvraagt bij een andere computer, bekend als de server.], [Server], [Een computer of apparaat dat verantwoordelijk is voor het ontvangen, verwerken en leveren van diensten of bronnen aan clients in een client-server model. Het dient als een centrale bron waar clients verbinding mee maken om toegang te krijgen tot de gewenste gegevens of functionaliteiten.], [Database], [Een gestructureerde verzameling gegevens die op georganiseerde wijze wordt opgeslagen en beheerd], [Graph database], [Een type database dat is geoptimaliseerd voor het opslaan, beheren en vragen van grafendata.], [Neo4j], [Een open-source graph database management systeem dat gebaseerd is op het property graph model.], [Symfony], [Een open-source PHP-framework voor de ontwikkeling van webapplicaties.], [PHP], [Een populaire programmeertaal die veel wordt gebruikt voor de ontwikkeling van webapplicaties.], [Framework], [Een softwareontwikkelingsplatform dat een set van tools, bibliotheken en standaardpraktijken biedt om de ontwikkeling van applicaties te vergemakkelijken en versnellen.], [OGM (Object Graph Mapping)], [Een techniek die de mapping en interactie tussen objectgeoriënteerde applicaties en een graph database mogelijk maakt], [Webapplicatie], [Een applicatie die via een webbrowser kan worden gebruikt en toegankelijk is via het internet of een intranet], [Gebruikersinterface (GUI)], [Het visuele en interactieve deel van een softwareapplicatie waarmee gebruikers kunnen communiceren en taken kunnen uitvoeren], [Datavisualisatie], [Het visueel weergeven en presenteren van gegevens om patronen, trends en inzichten gemakkelijker te begrijpen en communiceren], [Backend], [Het gedeelte van een softwareapplicatie dat verantwoordelijk is voor de verwerking van gegevens, logica en database-interactie, vaak op de server], [Frontend], [Het gedeelte van een softwareapplicatie dat de gebruikersinterface presenteert en gebruikersinteractie mogelijk maakt, vaak in de webbrowser], [Frontend framework], [Een softwareframework dat specifiek is ontworpen voor de ontwikkeling van de gebruikersinterface en de interactie met een webapplicatie], [Backend framework], [Een softwareframework dat specifiek is ontworpen voor de ontwikkeling van de serverzijde van een webapplicatie, inclusief de gegevensverwerking en logica], [API (Application Programming Interface)], [Een set regels en protocollen die de communicatie en interactie tussen verschillende softwareapplicaties mogelijk maakt], [MVC (Model-View-Controller)], [Een ontwerppatroon voor de structurering van een softwareapplicatie, waarbij de functionaliteit wordt opgesplitst in drie componenten: het model, de view en de controller], [Git], [Een gedistribueerd versiebeheersysteem dat wordt gebruikt voor het bijhouden van wijzigingen in de broncode van softwareprojecten], [Continuous Integration (CI)], [Een ontwikkelingspraktijk waarbij ontwikkelaars regelmatig hun code integreren in een gedeelde repository, wat automatische build- en testprocessen triggert], [INP], [Interaction to Next Paint, een metriek dat de tijd meet tussen de interactie van een gebruiker en de daaropvolgende visuele update van de pagina], [LCP], [Largest Contentful Paint, een metriek dat de laadtijd van het grootste zichtbare element op een webpagina meet], [Library], [Een verzameling van vooraf geschreven code die ontwikkelaars kunnen gebruiken om specifieke functionaliteit toe te voegen aan hun eigen softwareprojecten.], [Package], [Een georganiseerde verzameling van codebestanden en functionaliteiten die kunnen worden hergebruikt om de ontwikkeling van softwaretoepassingen te vergemakkelijken.], [Agile], [Een flexibele aanpak in softwareontwikkeling waarbij teams samenwerken, zich aanpassen aan veranderingen en zich richten op het leveren van continue waarde aan de klant.], [Scrum], [Scrum is een Agile projectmanagementframework waarbij het ontwikkelproces is opgedeeld in korte iteratieve perioden, genaamd sprints, waarin het team zich richt op het opleveren van werkende en waardevolle software.], ) )
https://github.com/frectonz/the-pg-book
https://raw.githubusercontent.com/frectonz/the-pg-book/main/book/134.%20control.html.typ
typst
control.html Founder Control Want to start a startup? Get funded by Y Combinator. December 2010Someone we funded is talking to VCs now, and asked me how common it was for a startup's founders to retain control of the board after a series A round. He said VCs told him this almost never happened.Ten years ago that was true. In the past, founders rarely kept control of the board through a series A. The traditional series A board consisted of two founders, two VCs, and one independent member. More recently the recipe is often one founder, one VC, and one independent. In either case the founders lose their majority.But not always. <NAME> kept control of Facebook's board through the series A and still has it today. <NAME> has kept control of Zynga's too. But are these just outliers? How common is it for founders to keep control after an A round? I'd heard of several cases among the companies we've funded, but I wasn't sure how many there were, so I emailed the ycfounders list.The replies surprised me. In a dozen companies we've funded, the founders still had a majority of the board seats after the series A round.I feel like we're at a tipping point here. A lot of VCs still act as if founders retaining board control after a series A is unheard-of. A lot of them try to make you feel bad if you even ask — as if you're a noob or a control freak for wanting such a thing. But the founders I heard from aren't noobs or control freaks. Or if they are, they are, like <NAME>, the kind of noobs and control freaks VCs should be trying to fund more of.Founders retaining control after a series A is clearly heard-of. And barring financial catastrophe, I think in the coming year it will become the norm.Control of a company is a more complicated matter than simply outvoting other parties in board meetings. Investors usually get vetos over certain big decisions, like selling the company, regardless of how many board seats they have. And board votes are rarely split. Matters are decided in the discussion preceding the vote, not in the vote itself, which is usually unanimous. But if opinion is divided in such discussions, the side that knows it would lose in a vote will tend to be less insistent. That's what board control means in practice. You don't simply get to do whatever you want; the board still has to act in the interest of the shareholders; but if you have a majority of board seats, then your opinion about what's in the interest of the shareholders will tend to prevail.So while board control is not total control, it's not imaginary either. There's inevitably a difference in how things feel within the company. Which means if it becomes the norm for founders to retain board control after a series A, that will change the way things feel in the whole startup world.The switch to the new norm may be surprisingly fast, because the startups that can retain control tend to be the best ones. They're the ones that set the trends, both for other startups and for VCs.A lot of the reason VCs are harsh when negotiating with startups is that they're embarrassed to go back to their partners looking like they got beaten. When they sign a termsheet, they want to be able to brag about the good terms they got. A lot of them don't care that much personally about whether founders keep board control. They just don't want to seem like they had to make concessions. Which means if letting the founders keep control stops being perceived as a concession, it will rapidly become much more common.Like a lot of changes that have been forced on VCs, this change won't turn out to be as big a problem as they might think. VCs will still be able to convince; they just won't be able to compel. And the startups where they have to resort to compulsion are not the ones that matter anyway. VCs make most of their money from a few big hits, and those aren't them.Knowing that founders will keep control of the board may even help VCs pick better. If they know they can't fire the founders, they'll have to choose founders they can trust. And that's who they should have been choosing all along.Thanks to <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME> for reading drafts of this.
https://github.com/tingerrr/typst-test
https://raw.githubusercontent.com/tingerrr/typst-test/main/docs/book/src/reference/tests/annotations.md
markdown
MIT License
# Annotations Tests may contain annotations at the start of the file. These annotations are placed on the leading doc comment of the file itself. ```typst /// [ignore] /// [custom: foo] /// /// Synopsis: /// ... #import "/src/internal.typ": foo ... ``` Annotations may only be placed at the start of the doc comment on individual lines without anything between them (no empty lines or other content). The following annotations exist: |Annotation|Description| |---|---| |`ignore`|Takes not arguments, marks the test as part of the `ignored` test set, can only be used once.| |`custom`|Takes a single identifier as argument, marks the test as part of a custom test set of the given identifier, can be used multiple times.| A test with an annotation like `[custom: foo]` can be selected with a test set like `custom(foo)`.
https://github.com/xsro/xsro.github.io
https://raw.githubusercontent.com/xsro/xsro.github.io/zola/typst/Control-for-Integrator-Systems/lib/operation.typ
typst
#let sign(x)={ if calc.abs(x)==0 { 0 } else if x > 0 { 1 } else if x < 0 { -1 } } #let sig(x,q)={ if x==0{ 0 } else{ sign(x)*calc.pow(calc.abs(x),q) } } #let sat(x,th)={ if calc.abs(x)>th{ sign(x) * th } else{ x } }
https://github.com/noahjutz/CV
https://raw.githubusercontent.com/noahjutz/CV/main/sidebar/langskill.typ
typst
#let levels = ("A1", "A2", "B1", "B2", "C1", "C2") #let langskill(language, level) = align( horizon, stack( dir: ltr, spacing: 1fr, language, stack( dir: ltr, spacing: 3pt, ..levels.map(l => if level == l { text(font: "Noto Sans Mono", size: 11pt, l) } else { circle(radius: 2pt, stroke: none, fill: gray) } ) ) ) )
https://github.com/jgm/typst-hs
https://raw.githubusercontent.com/jgm/typst-hs/main/test/typ/compiler/let-13.typ
typst
Other
// Error: 13-14 not enough elements to destructure #let (a, b, c) = (1, 2)
https://github.com/mem-courses/discrete-mathmatics
https://raw.githubusercontent.com/mem-courses/discrete-mathmatics/main/homework/week8.typ
typst
MIT License
#import "../template.typ": * #import "../functions.typ": * #show: project.with( course: "Discrete Mathmatics", course_fullname: "Discrete Mathematics and Application", course_code: "211B0010", title: "Homework #8: Recurrence Relations", authors: (( name: "<NAME>", email: "<EMAIL>", id: "A10" ),), semester: "Spring-Summer 2024", date: "April 16, 2024", ) = 6.6 Generating Permutations and Combinations #hw("6(f)")[ Find the next larger permutation in lexicographic order after each of these permutations. (f) 23587416 ][ The answer is 23587461. ] #hw("7")[ Use Algorithm 1 to generate the 24 permutations of the first four positive integers in lexicographic order. #align(center, image("./images/6-6 algorithm-1.jpg", width: 90%)) ][ 1234, 1243, 1324, 1342, 1423, 1432; 2134, 2143, 2314, 2341, 2413, 2431; 3124, 3142, 3214, 3241, 3412, 3421; 4123, 4132, 4213, 4231, 4312, 4321. ] #hw("9")[ Use Algorithm 3 to list all the 3-combinations of ${1, 2, 3, 4, 5}$. #align(center, image("./images/6-6 algorithm-3.jpg", width: 90%)) ][ {1,2,3}, {1,2,4}, {1,2,5}, {1,3,4}, {1,3,5}, {1,4,5}, {2,3,4}, {2,3,5}, {2,4,5}, {3,4,5}. ] == 8.1 Applications of Recurrence Relations #hw("8")[ (a) Find a recurrence relation for the number of bit strings of length $n$ that contain three consecutive 0s. (b) What are the initial conditions? (c) How many bit strings of length seven contain three consecutive 0s? ][ #parts( a: [ The recurrence relation is $a_n=a_(n-1) + a_(n-2) + a_(n-3) + 2^(n-3)$ for all $n>=3$, just discuss the last three digit in the string is xx1, x10, 100 or 000. ], b: [ The initial conditions are $a_0=a_1=a_2=0$. ], c: [ $a_3=0+0+0+1=1,space a_4 = 0+0+1+2=3,space a_5=0+1+3+4=8,space a_6=1+3+8+8=20,space a_7=3+8+20+16=47.$ ] ) ] #hw("10")[ (a) Find a recurrence relation for the number of bit strings of length $n$ that contain the string 01. (b) What are the initial conditions? (c) How many bit strings of length seven contain the string 01? ][ (a) The recurrence relation is $a_n = 2 a_(n-1) - a_(n-2) + 2^(n-2)$. (b) The initial conditions are $a_1 = 0$ and $a_2 = 1$. (c) $a_3=2-0+2=4$; $a_4=8-1+4=11$; $a_5=22-4+8=26$; $a_6=52-11+16=57$; $a_7=114-26+32=120$ ] #hw("26")[ (a) Find a recurrence relation for the number of ways to completely cover a $2 times n$ checkerboard with $1 times 2$ dominoes. _Hint_: Consider separately the coverings where the position in the top right corner of the checkerboard is covered by a domino positioned horizontally and where it is covered by a domino positioned vertically. (b) What are the initial conditions for the recurrence relation in part (a)? (c) How many ways are there to completely cover a $2 times 17$ checkerboard with $1 times 2$ dominoes? ][ (a) The recurrence relation is $a_n=a_(n-1)+a_(n-2)$. (b) The initial conditions are $a_1=1$ and $a_2=2$. (c) The sequence ${a}$ is ${1,2,3,5,8, 13,21,34,55,89, 144,233,377,610,987, 1597,2584}$. So the answer is 2584. ] #hw("29")[ Let $S(m, n)$ denote the number of onto functions from a set with $m$ elements to a set with $n$ elements. Show that $S(m, n)$ satisfies the recurrence relation $ S(m,n)=n^m-sum_(k=1)^(n-1) C(n,k) S(m,k) $ whenever $m>=n$ and $n>1$, with the initial condition $S(m,1)=1$. ][ Assume the size of domain is $m$ and the size of codomain is $n$. The number of functions from the domain to the codomain without any restrictions is $n^m$. Then we need to deduce the number of non-onto functions. Consider an non-onto function, it must be a onto function defined on a subset of original codomain. Assume the size of such subset is $k$, then we have $C(n,k)$ ways to choose the codomain and $S(m,k)$ ways to conduct such functions. Therefore, the number of onto functions satisfies the recurrence relation $S(m,n)=n^m-sum_(k=1)^(n-1) C(n,k) S(m,k)$. ] #hw("32")[ In the Tower of Hanoi puzzle, suppose our goal is to transfer all $n$ disks from peg 1 to peg 3, but we cannot move a disk directly between pegs 1 and 3. Each move of a disk must be a move involving peg 2. As usual, we cannot place a disk on top of a smaller disk. (a) Find a recurrence relation for the number of moves required to solve the puzzle for $n$ disks with this added restriction. (b) Solve this recurrence relation to find a formula for the number of moves required to solve the puzzle for $n$ disks. (c) How many different arrangements are there of the $n$ disks on three pegs so that no disk is on top of a smaller disk? (d) Show that every allowable arrangement of the $n$ disks occurs in the solution of this variation of the puzzle. ][#parts( a: [ To move $n$ disks from peg 1 to 3, we can: - Step 1: move top $n-1$ disks from peg 1 to peg 3; - Step 2: move the $n$-th disk from peg 1 to peg 2; - Step 3: move top $n-1$ disks from peg 3 to peg 1; - Step 4: move the $n$-th disk from peg 2 to peg 3; - Step 5: move top $n-1$ disks from peg 1 to peg 3. So the recurrence relation is $a_n=3 a_(n-1) + 2$. ], b: [ The initial condition is $a_1=2$. The recurrence relation is equivalent to $ (a_n+1)=3(a_(n-1) + 1) = 3^(n-1) (a_1 + 1) = 3^n $ So we can derived that $a_n=3^n-1$. ], c: [ Assign each disk to a peg, then the order of disks on each peg is uniquely determined by their size. So the number of ways is $3^n$. ], d: [ We take $3^n-1$ moves, so there are $3^n$ arrangements occured in the solution. Just prove that these arrangements are distinct by contridiction. Suppose that there are two identical arrangements, then all step between them can be omitted. This goes against the fact that the number of moves is minimal. So the assumption is wrong, which means that all $3^n$ arrangements are distinct. Thus all $3^n$ arrangements occur in the solution. ] )] == 8.2 Solving Linear Recurrence Relations #hw("2")[ Determine which of these are linear homogeneous recurrence relations with constant coefficients. Also, find the degree of those that are. (a) $a_n=3 a_(n-2)$ (b) $a_n=3$ (c) $a_n=a_(n-1)^2$ (d) $a_n=a_(n-1)+2a_(n-3)$ (e) $a_n=a_(n-1)"/"n$ (f) $a_n=a_(n-1)+a_(n-2)+n+3$ (g) $a_n=4a_(n-2)+5a_(n-4)+9a_(n-7)$ ][ (a) linear, homogeneous, with constant coefficients, with degree 2 (b) linear, not homogeneous, with constant coefficients (c) not linear (d) linear, homogeneous, with constant coefficients, with degree 3 (e) linear, homogeneous, not with constant coefficients, with degree 1 (f) linear, not homogeneous, with constant coefficients, with degree 2 (g) linear, homogeneous, with constant coefficients, with degree 7 ] #hw("4(g)")[ Solve these recurrence relations together with the initial conditions given. (g) $a_(n+2) = -4 a_(n+1) + 5 a_n "for " n >= 0, a_0=2,a_1=8$ ][ Solving the equation $r^2 = - 4r + 5$, we obtain that $r_1=-5, r_2=1$. Therefore, the general solution is $a_n = c_1 (-5)^n + c_2 1^n$. Substitute the initial conditions, we have $-5c_1+c_2=2$ and $25c_1+c_2=8$. Solving this, we obtain $c_1=1"/"5$ and $c_2=3$. So the solution is $a_n=5^(n-1)+3^n$. ] #hw("20")[ Find the general form of the solutions of the recurrence relation $a_n=8 a_(n-2) - 16 a_(n-4)$. ][ The characteristic polynomial is $(r^4-8r^2+16)=(r-2)^2 (r+2)^2$. So the general form of the solution is $a_n=(c_1 + c_2 n) 2^n + (c_3 + c_4 n) (-2)^n$. ] #hw("30")[ (a) Find all solutions of the recurrence relation $a_n = −5a_(n−1) − 6a_(n−2) + 42 dot 4^n$. (b) Find the solution of this recurrence relation with $a_1 = 56$ and $a_2 = 278$. ][#parts( a: [ Solving the characteristic equation $r^2+5r+6=0$, we obtain $r_1=-2,space r_2=-3$. So the general form of the solution is $ a_n^((h))=alpha (-2)^n + beta (-3)^n $ Substitute $a_n^((p))=c dot 4^n$ into the recurrence relation, we have $ c dot 4^n = -5c dot 4^(n-1) - 6c dot 4^(n-2) + 42 dot 4^n $ which implies that $c=16$. So $a_n^((p))=4^(n+2)$. The general solution of this recurrence relation is $ a_n=alpha (-2)^n + beta (-3)^n + 4^(n+2) $ ], b:[ Substitute the initial conditions, we have $alpha=1$ and $beta=2$, so the solution is $ a_n=(-2)^n + 2 (-3)^n + 4^(n+2) $ ] )] #hw("35")[ Find the solution of the recurrence relation $a_n=4 a_(n-1) - 3 a_(n-2) + 2^n + n + 3$ with $a_0=1$ and $a_1=4$. ][ Solving the characteristic equation $r^2-4r+3=0$, we obtain that $r_1=3,space r_2=1$. Then we have $ a_n^((h)) = alpha_1 dot 3^n + alpha_2 $ Substitute $a_n^((p))=c_1 dot 2^n + c_2 n^n + c_3$ into the recurrence relation, we have $ & c_1 dot 2^n + c_2 n^2 + c_3\ =& 4 (c_1 dot 2^(n-1) + c_2 (n-1)^2 + c_3 (n-1)) \ &- 3 (c_1 dot 2^(n-2) + c_2 (n-2)^2 + c_3 (n-2))\ &+ 2^n + n + 3 $ which implies that $c_1=-4,space c_2=display(-1/4),space c_3=display(-5/2)$. And by substituting initial conditions, we have $alpha_1=display(39/8),space alpha_2=display(1/8)$. So the general solution is $ a_n=39/8 dot 3^n + 1/8 - 4 dot 2^n - 1/4 n^2 - 5/2 n $ ]
https://github.com/japrozs/resume
https://raw.githubusercontent.com/japrozs/resume/master/README.md
markdown
# Resume My resume template that is built is with Typst with a configuration file. If you want to customize the data for your own use, you only have to change the [resume.yml](./resume.yml) file. You can also customize the layout of this resume from a double-column layout by default to a single-column layout by customizing [resume.typ](./resume.typ). The starting code for this template has been taken from https://github.com/jskherman/cv.typ Fonts used in this template: 1. Linux Libertine 2. New Computer Modern 3. EB Garamond ## Preview <p align="center"> <img alt="Light" src="assets/resume-double-column.png" width="45%"> &nbsp; &nbsp; &nbsp; &nbsp; <img alt="Dark" src="assets/resume-single-column.png" width="45%"> </p> ### License ``` Copyright 2024-Present <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ```
https://github.com/EricWay1024/Homological-Algebra-Notes
https://raw.githubusercontent.com/EricWay1024/Homological-Algebra-Notes/master/ha/3-tp.typ
typst
#import "../libs/template.typ": * = Tensor Product of Modules <tp-module> == Existence and Functoriality #let Bil = [$"Bil"$] #definition[ Let $R$ be a ring. Consider #rrm $M$, #lrm $N$ and abelian group $A$. A map $f: M times N -> A$ is called a *balanced product* (or we say it is *$R$-biadditive*) if it satisfies: $ f(x + x', y) = f(x, y) + f(x', y), \ f(x, y + y') = f(x, y) + f(x, y'), \ f(x r , y) = f(x, r y). $ for all $x, x' in M$, $y, y' in N$ and $r in R$. // Denote the set of all balanced products $B : M times N -> A$ as $Bil\(M, N; A)$, which is an abelian group under addition. ] #definition[ The *tensor product* of a #rrm $M$ and a #lrm $N$ is an abelian group $M tpr N$ with a balance product $M times N -> M tpr N$ such that for any balanced product $f: M times N -> A$, there exists a unique group homomorphism $M tpr N -> A$ that makes the diagram commutes: // https://t.yw.je/#N4Igdg9gJgpgziAXAbVABwnAlgFyxMJZABgBpiBdUkANwEMAbAVxiRAFkACPAW3k4ByIAL6l0mXPkIoAjOSq1GLNlxxoAToJFiQGbHgJE5MhfWatEIAIIiFMKAHN4RUADN1EHkjIgcEJABM1GbKlgBC2m4eXohyvv6IQSAMWGAWIFB0cAAW9iDBSukwAB5YcDhwnACEkSDunt7UfkgywhTCQA #align(center, commutative-diagram( node-padding: (50pt, 50pt), node((0, 0), [$M times N$]), node((0, 1), [$M tpr N$]), node((1, 1), [$A$]), arr((0, 0), (1, 1), [$f$]), arr((0, 1), (1, 1), [$exists !$], "dashed"), arr((0, 0), (0, 1), []), )) We might simply say $M tpr N$ is the tensor product of $M$ and $N$. ] #remark[ In other words, $M tpr N$ is an initial object of the category of all balanced products $M times N -> A$ (where a morphism is a group homomorphism $A -> A'$). ] #lemma[The tensor product $M times N -> M tpr N$ exists, up to a unique isomorphism, for any #rrm $M$ and #lrm $N$.] #proof[ // #definition[ // For any set $X$, let $k[X]$ denote the vector space spanned by $X$: $ sum_(x in X) a_x x $ where $a_x = 0$ for almost all $x in X$. // ] Consider the free abelian group $F$ with basis $M times N$, and let $i : M times N -> F$ be the inclusion map. $F$ has a subgroup $I$ generated by all elements of the following forms $ (x+x', y) - (x, y) - (x', y) \ (x, y+y') - (x, y) - (x, y') \ (x r , y) - (x, r y) $ for $x, x' in M$, $y , y' in N$ and $r in R$. Denote $M tpr N := F over I$, and denote the coset $(x, y) + I$ by $x tp y$, and define $ h : M times N &-> M tpr N \ (x, y) &|-> x tp y $ It is clear that $h$ is biadditive. Let $f : M times N -> A$ be a balanced product, then there exists a homomorphism $f': F -> A$ which linearly extends $f$ such that $f = f' oo i$. Now we see that $I subset.eq Ker f'$ and thus $f'$ induces a map $ hat(f) : F over I &-> A \ (x, y) + I &|-> f'(x, y) = f(x, y) $ which is the same as saying $ hat(f) : M tpr N &-> A \ x tp y &|-> f(x, y) $ Thus we see that $hat(f) oo h = f$, and we can conclude that $M tpr N$ is a tensor product of $M$ and $N$. The uniqueness follows from the universal property. // Now we look at $k[V times W]$ (a free vector space). This is large: even if $V$ and $W$ have finite dimensions this vector space has infinite. We look at the subspace spanned by the relations $ M = "span"{(v + v', w) - (v, w) - (v', w) \ (v, w + w') - (v, w) - (v, w') \ c(v, w) - (c v, w) \ c(v, w) - (c w, v)} $ TODO // for all $v, v' in V$ and $w, w' in W$ and $c in k$. Then we have // $ // homk (k[V times W] \/ M, L) tilde.eq "Bilin"(V, W; L) // $ // due to the first isomorphism theorem. We see that if $T in homk (k[V times W] \/ M, L)$ then $T$ is a linear transformation $k[V times W] -> L$ such that $T(M) = 0$. For any $(v, w) in V times W$: // $ V times W -> k[V times W] -> k[V times W] \/ M \ // (v, w) |-> (v, w) |-> (v, w) + M $ // We can thus define $V tpk W := k[V times W] \/ M$. ] #lemma[ Let $phi : M -> M'$ and $psi : N -> N'$ be module homomorphisms, then there exists a unique group homomorphism $phi tp psi : M tpr N -> M' tpr N'$, such that $ phi tp psi : m tp n mapsto phi(m) tp psi(n) $ ] #proof[ The function $ f : M times N &-> M' tpr N' \ (m, n) &|-> phi(m) tp psi(n) $ is $R$-biadditive. Therefore, $f$ induces a unique homomorphism $ hat(f): M tpr N &-> M' tpr N' \ m tp n &|-> phi(m) tp psi(n) $ which we write as $phi tp psi$. ] #corollary[ $(phi' oo phi) tp (psi' oo psi) = (phi' tp psi') oo (phi tp psi)$ for any $M ->^phi M' ->^(phi') M''$ and $N ->^psi N' ->^(psi') N''$. ] <tp-composition> #proof[Both send $m tp n$ to $phi'(phi(m)) tp psi'(psi(n))$, but such a homomorphism should be unique. ] #corollary[ Let $M$ be a #rrm and $N$ be a #lrm, then we have functors $ M tpr - : RMod &-> Ab \ B &|-> M tpr B \ (g:B-> B') &mapsto id_M tp g $ $ - tpr N : ModR &-> Ab \ A &|-> A tpr N \ (f: A -> A') &mapsto f tp id_N $ ] == Bimodules and Bilinearity #definition[ Let $R, S$ be rings. An *$R$-$S$-bimodule* is an abelian group $M$ being both a left $R$-module and a right $S$-module, satisfying: $ r(m s) = (r m) s $ for all $m in M, r in R, s in S$. ] #example[ Any #lrm is an $R$-$ZZ$-bimodule, and any #rrm is a $ZZ$-$R$-bimodule. ] #example[ When $R$ is commutative, any $R$-module can be seen as an $R$-$R$-bimodule. ] #proposition[ Let $Q, R, S$ be rings, $M$ be a $Q$-$R$-bimodule, and $N$ be a $R$-$ S$-bimodule. Then $M tpr N$ is a $Q$-$S$-bimodule. ] #proof[ // See @li[Proposition 6.5.9]. Let $q in Q$ and $s in S$. Then $f: m |-> q m$ is a homomorphism $M -> M$ and $g: n |-> n s$ is a homomorphism $N -> N$. Then $f tp id_N$ gives a left multiplication on $M tpr N$ and $id_M tp g$ gives a right multiplication on $M tpr N$, which satisfies $(f tp id_N) oo (id_M tp g) = f tp g = (id_M tp g) oo (f tp id_N)$. ] // In this case we know $M tpr N$ is not merely an abelian group. #definition[ If $R$ is a commutative ring and $M, N, A$ are $R$-modules, a map $f : M times N -> A$ is called *$R$-bilinear* if it is $R$-biadditive and also $ f(r x , y) = f(x, r y) = r f(x, y) $ for all $x in M$, $y in N$ and $ r in R$. ] #proposition[ Let $R$ be a commutative ring and $A, B$ be $R$-modules. Then $A tpr B$ is an $R$-module and $h: A times B -> A tpr B$ is $R$-bilinear. Further, for any $R$-bilinear map $g : A times B -> C$, there exists an $R$-homomorphism $hat(g) : A tpr B -> C$ such that $g = hat(g) oo h$. ] #proof[ // See @rotman[Proposition 2.55]. We view $A$, $B$ as $R$-$R$-bimodules, then we easily see that $A tpr B$ is also an $R$-$R$-bimodule (i.e., an $R$-module) with (left) multiplication given by $(a |-> r a) tp id_B$, hence $r(a tp b) = (r a) tp b = a tp (r b)$ and $h$ is $R$-bilinear. Suppose $g : A times B -> C$ is an $R$-bilinear map. Then $g$ is $R$-biadditive and $g$ induces a $ZZ$-homomorphism $hat(g) : A tpr B -> C$ such that $g = hat(g) oo h$. We only need to show that $hat(g)$ is also an $R$-homomorphism. Let $r in R$. Then $hat(g) (r (a tp b)) = hat(g) ((r a) tp b) = g(r a, b) = r g(a, b) = r hat(g) (a tp b)$. ] // In this section, let $R$ be a commutative ring unless stated otherwise. // #definition[ // Let $M, N, P$ be $R$-modules. A map $f : M times N -> P$ is called *bilinear* if the following identities are satisfied: // $ // f(m + m', n) = f(m, n) + f(m', n) \ // f(m, n + n') = f(m, n) + f(m, n') \ // f(r m, n) = r f(m, n) = f(m, r n) // $ // ] // Let $k$ be a field and let $veck$ denote the category of $k$-vector spaces. Let $V, W, L in veck$, denote by $"Bilin"(V, W; L)$ the set of bilinear transformations $V times W -> L$. Let $T in "Bilin"(V, W; L)$, then can we write $T$ in terms of linear algebra? // Denote // $ "Hom"_k (V, W) := "Hom"_veck (V, W) $ as the set of linear transformations $V -> W$, and it is a $k$-vector space (in a natural way). // // We denote the same set by $#underline("Hom") (V, W)$ to emphasise the vector space structure. // We see that // $ // "Bilin"(V, W; L) tilde.eq homk (V, homk (W, L)) tilde.eq homk(V, homk(V, L)) // $ // #remark[This is currying in computer science.] // Is $"Bilin"(V, W; -): veck -> bd("Set")$ _representable_? In other words, is there a $k$-vector space $V times.circle_k W$ with a _natural_ isomorphism $ "Bilin"(V, W; -) tilde.eq homk (V times.circle_k W, -) $ // where naturality means that the isomorphism is compatible with changes in $L$: if there is a linear map $T: L -> L'$ then this isomorphism should be compatible with $T$. // Also equivalent to: is there a vector space $V times.circle_k W$ with a bilinear map $V times W -> V times.circle_k W$ which is universal? We want to find bilinear map $V times W -> V times.circle_k W$ such that for any bilinear map $V times W -> L$, there exists a unique linear map $V times.circle_k W -> L$ such that the diagram commutes: // // #image("imgs/1.png", width: 50%) // // https://t.yw.je/#N4Igdg9gJgpgziAXAbVABwnAlgFyxMJZABgBoBGAXVJADcBDAGwFcYkQA1AAjwFt4uAdRABfUuky58hFOQrU6TVuwAyo8SAzY8BInOIKGLNok480AayGiFMKAHN4RUADMAThF5IyIHBCTkYq4eXog+fkgATEEg7p5RNBGIciCMWGAmIFD0cAAWdiA0RsqmMAAeWHA4cFwAhDYiQA // #align(center, commutative-diagram( // node((1, 0), [$V times W$]), // node((1, 1), [$L$]), // node((0, 1), [$V tpk W$]), // arr((1, 0), (1, 1), []), // arr((1, 0), (0, 1), []), // arr((0, 1), (1, 1), [$exists !$], "dashed"), // )) // Remark: it is also equivalent to asking for an adjoint functor. // For any $(v, w) in V times W$ we want to find $v times.circle w in V tpk W$. This is a bilinear map, so for example: // $ // (v+v') tp w = v tp w + v' tp w // $ // Note: $V tpk W$ is defined up to a unique isomorphism. This follows from the universal property (ref. category theory). // // https://t.yw.je/#N4Igdg9gJgpgziAXAbVABwnAlgFyxMJZABgBoBGAXVJADcBDAGwFcYkQA1AAjwFt4uAdRABfUuky58hFOVLFqdJq3bccaANZDR4kBmx4CROQCZFDFm0ScemgOTaRimFADm8IqABmAJwi8kMhAcCCRyMW8-AMQgkKQTCJBff3iaOMQ5EEYsMCsQK<KEY> // #align(center, commutative-diagram( // node((1, 0), [$V times W$]), // node((0, 1), [$V tpk W$]), // node((2, 1), [$V tpk' W$]), // arr((1, 0), (0, 1), []), // arr((1, 0), (2, 1), []), // arr((2, 1), (0, 1), [$exists !$], curve: 30deg, "dashed"), // arr((0, 1), (2, 1), [$exists !$], curve: 30deg, "dashed"), // )) // #image("imgs/2.png") == Further Properties #proposition[ If $R$ is a ring, $M$ is a #rrm and $N$ is a #lrm, then there is a natural $ZZ$-isomorphism $ tau : M tpr N &-> N tp_(R^op) M \ m tp n &|-> n tp m $ ] #proof[ This follows from the fact that a map $f: M times N -> A$ is $R$-biadditive if and only if the map $g: N times M -> A$ defined by $g(n, m) = f(m, n)$ is $R^op$-biadditive. ] // #proof[ // Consider map // $ // f: M times N &-> N tp_(R^op) M \ // (m, n)&|-> n tp m // $ // then $f$ is $R$-biadditive so that it induces a unique $ZZ$-homomorphism $ tau : M tpr N &-> N tp_(R^op) M \ m tp n &mapsto n tp m $ // Similarly, we can get a $ZZ$-homomorphism $n tp m |-> m tp n$ which gives an inverse to $tau$, and thus $tau$ is an isomorphism. // ] #corollary[ If $R$ is a commutative ring and $M$, $N$ are $R$-modules, then there is a natural $R$-isomorphism $ tau: M tpr N &-> N tpr M \ m tp n &mapsto n tp m $ ] <r-tpr> #proposition[ Given #rrm $A$, $R$-$S$-bimodule $B$, and left $S$-module $C$, there is an isomorphism $ theta : A tpr (B tp_S C) iso (A tpr B) tp_S C $ given by $a tp (b tp c) |-> (a tp b) tp c$. ] #proof[ @rotman[Proposition 2.57]. They are both solutions to the universal mapping problem of *triadditive functions*, but the solution is unique. ] #corollary[ Let $R$ be a commutative ring, and let $M_1, M_2, ..., M_n$ be $R$-modules. Let $sigma in S_n$ (where $S_n$ is the symmetric group of degree $n$), then $ ( ... (M_1 tpr M_2) tpr ... tpr M_n) iso ( ... (M_sigma(1) tpr M_sigma(2)) tpr ... tpr M_sigma(n)) $ ] #proof[ Notice that both solve the universal mapping problem of $R$-$n$-linear functions. ] #proposition[ Given ring $R$ and #lrm $M$, there is a natural $R$-isomorphism $ phi_M : R tpr M &-> M \ r tp m &|-> r m $ ] <r-tp-m> #proof[ $f : R times M -> M$ defined by $(r, m) mapsto r m$ is $R$-biadditive and thus induces an $R$-homomorphism $phi: R tpr M -> M$ with $r tp m |-> r m$. Now $g : M -> R tpr M$ defined by $g : m mapsto 1 tp m$ satisfies that $phi g$ and $g phi$ are identity maps, so $phi$ is an $R$-isomorphism. ] // $"Bilin"(V, W; -) tilde.eq "Bilin"(W, V; -)$ in a canonical way and hence $V tpk W tilde.eq W tpk V$. Also $k tpk W tilde.eq W$. // $ "Trilin"(V_1, V_2, V_3; L) tilde.eq "Bilin"(V_1 tpk V_2, V_3; L) tilde.eq "Bilin"(V_1, V_2 tpk V_3; L) $ // and hence $ (V_1 tpk V_2) tpk V_3 tilde.eq V_1 tpk (V_2 tpk V_3) $ // It is obviously hard to verify these properties using the definition directly (but good exercise?). // More generally, let $sigma in S_n$ (symmetric group), // $ ( ... (V_1 tpk V_2) tpk ... tpk V_n) tilde.eq ( ... (V_sigma(1) tpk V_sigma(2)) tpk ... tpk V_sigma(n)) $ // Remark: in category theory, we think of things by their properties not their definition. // Remark: we now have $(veck, - tpk -)$ which also preserves liner maps. If we have $T: V-> V'$ and $S: W-> W'$ then $T tpk S: V tpk W -> V' tpk W$. (This is heavily used in quantum computing.) This is a symmetric monoidal category. == Monoidal Categories and $k$-algebras #definition[ A *monoidal category* is a category $cC$ equipped with a bifunctor $tp: cC times cC -> cC$ associative up to a natural isomorphism, and an object $I$ that is both a left and right identity for $tp$ up to a natural isomorphism. ] #proposition[ Let $R$ be a commutative ring, then the category $(RMod, tpr)$ is a monoidal category. In particular, for a field $k$, the category $(veck, tpk)$ is a monoidal category. ] #proof[ The identity for $tpr$ in $RMod$ is clearly given by $R$. ] #definition[ A *monoid object* in a monoid category $(cC, tp, I)$ is an object $M$ with two morphisms: - $mu: M tp M -> M$ called *multiplication*, - $eta: I -> M$ called *unit*, such that the following diagrams commute: // https://t.yw.je/#N4Igdg9gJgpgziAXAbVABwnAlgFyxMJZABgBpiBdUkANwEMAbAVxiRAAoBZAAhzW84BKXv04gAvqXSZc+QigCM5KrUYs2PPty4iBgiVJAZseAkQBMy6vWatEITaIPSTci6QUqb6+2MkvZMxQyT2s1OwddPxUYKABzeCJQADMAJwgAWyQAFmocCCQAZjDbNgymZxA0zKRLEHyikp8QcsrqrMQyeoLEXNVS+3LdLCgAfT9DdqQuhsQlfubGNAALOjb0jvnZuu8IkfHuXVbxCnEgA #align(center, commutative-diagram( node-padding: (70pt, 50pt), node((0, 0), [$(M tp M) tp M$]), node((0, 1), [$M tp (M tp M)$]), node((0, 2), [$M tp M$]), node((1, 2), [$M$]), node((1, 0), [$M tp M$]), arr((1, 0), (1, 2), [$mu$]), arr((0, 2), (1, 2), [$mu$]), arr((0, 0), (1, 0), [$mu tp id_M$]), arr((0, 0), (0, 1), [$alpha$]), arr((0, 1), (0, 2), [$id_M tp mu$]), )) // https://t.yw.je/#N4Igdg9gJg<KEY>wn<KEY> #align(center, commutative-diagram( node-padding: (70pt, 50pt), node((0, 0), [$I tp M$]), node((0, 1), [$M tp M$]), node((0, 2), [$M tp I$]), node((1, 1), [$M$]), arr((0, 0), (0, 1), [$eta tp id_M$]), arr((0, 2), (0, 1), label-pos:-.8em, [$id_M tp eta$]), arr((0, 1), (1, 1), [$mu$]), arr((0, 0), (1, 1), [$lambda$]), arr((0, 2), (1, 1), label-pos:-0.8em, [$rho$]), )) where $alpha$, $lambda$, $rho$ are natural isomorphisms for the associativity, the left identity and the right identity, respectively. ] #definition[ Let $k$ be a field. A (unital associative) *$k$-algebra* is a monoid object in $(veck, tpk)$. ] #remark[ Let $M$ be a $k$-algebra, then $M$ is a $k$-vector space equipped with bilinear multiplication $mu: M tpk M -> M$ and unit $eta: k -> M$ which sends $1 in k$ to $i(1) in M$, the multiplicative unit. An equivalent definition: $M$ is both a $k$-vector space and a unital ring, where the ring multiplication satisfies $ a (x y) = (a x) y = x( a y) \ $ for all $a in k$ and $x,y in M$. ] // #definition[ // #TODO // A $k$-algebra is a monoid object in $(veck, tpk)$; namely, Associativity of $m$ is shown in commuting diagram: // // #image("imgs/3.png") // ] // == Non-linear version #remark[ $(veck, tpk)$ is a *symmetric monoidal category*, where the tensor product is commutative. ] #remark[ In $Set$, $ hom(X times Y, Z) = hom(X, hom (Y, Z)). $ $Set$ is a *cartesian monoidal category*, where the categorical product is the same as the tensor product. ] // #endlec(1) // == More discussions #remark[ For vector spaces $V, W$ over field $k$, $ dim (V ds W) = dim V + dim W, quad dim (V tp W) = dim V dot dim W. $ An alternative definition of the tensor product: let $V$ be a vector space with basis ${v_i}_(i in I)$ and $W$ with ${w_j}_(j in J)$ and define $V tpk W$ to have basis ${v_i tp w_j}_(i in I, j in J)$. This definition relies on the choice of basis and can be inconvenient when we have to change basis. ] == Tensor-hom Adjunction #proposition[ If $B$ is an $R$-$S$-bimodule and $C$ is a right $S$-module, then $hom_S (B, C)$ is a right $R$-module. ] <hom-module> #proof[ Take any $f in hom_S (B, C)$ and $r in R$, define right multiplication $f r : B -> C$ by $f r(b) = f(r b)$. Then we see that $(f r) (b s) = f (r b s) = f (r b) s = (f r) (b) s$ which indicates that $f r$ still an $S$-homomorphism $B -> C$. ] Then this makes $hom_S (B, -)$ a functor from $ModS$ to $ModR$. #theorem[ Let $R$, $S$ be rings. Let $A$ be a #rrm, $B$ be a $R$-$S$-bimodule, and $C$ be a right $S$-module. Then we have a canonical isomorphism $ tau: hom_S (A tpr B, C) bij hom_R (A, hom_S (B, C)) $ where for $f : A tpr B -> C$, $a in A$, and $b in B$, $ tau(f)(a)(b) = f(a tp b) $ ] <tensor-hom> #proof[ $tau$ is a group homomorphism because for any $a in A$ and $b in B$, $ tau(f + g)(a)(b) = (f+g)(a tp b) = f(a tp b) + g(a tp b) = tau(f)(a)(b) + tau(g)(a)(b) $ and hence $tau(f+g) = tau(f) + tau(g)$. $tau$ is injective because if $tau(f) = 0$, then $f(a tp b) = tau(f)(a)(b) = 0$ for all $a in A$ and $b in B$. Thus $f = 0$ since it vanishes on all generators of $A tpr B$. It remains to be shown that $tau$ is surjective. Take any $R$-homomorphism $g : A -> hom_S (B, C)$, define $phi: A times B -> C$ by $phi(a, b) = g(a)(b)$. Now it is easy to check that $phi$ is $R$-biadditive and hence there exists a group homomorphism $hat(phi) : A tpr B -> C$ such that $hat(phi) (a tp b) = phi(a, b) = g(a)(b)$ for all $a in A$ and $b in B$. Therefore $g = tau(hat(phi))$ and $tau$ is surjective. Verifying the naturality of $tau$ is omitted. ] #corollary[ Let $R$, $S$ be rings and let $B$ be a $R$-$S$-bimodule. We have an adjunction $ (- tpr B) tack.l hom_S (B, -) $ where $(- tpr B) : ModR -> ModS$ and $hom_S (B, -) : ModS -> ModR$. ] #corollary[ The functor $(- tpr B) : ModR -> ModS$ preserves colimits. In particular, it preserves cokernels and is thus right exact; it also preserves direct sums. ] <tensor-right-exact> #theorem[ Let $A$ be a left $R$-module, $B$ be a $S$-$R$-bimodule, and $C$ be a left $S$-module, then there is a canonical isomorphism $ hom_S (B tpr A, C) bij hom_R (A, hom_S (B, C)) $ Thus $(B tpr -): RMod -> SMod$ and $hom_S (B, -): SMod -> RMod$ form an adjunction $ (B tpr -) tack.l hom_S (B, -) $ Hence $(B tpr -)$ preserves colimits and in particular is right exact. ] <tensor-right-exact-2> // which is right adjoint to $ - tpr B : Mod hyph S -> Mod hyph R $, // #remark[ // We haven't really defined $ - tpr B $ when $R$ is not commutative but this can be a definition itself, as left adjoint of $Hom_S (B, -)$. // We can define $ M tpr N := Coeq (M tp_ZZ R tp_ZZ N arrows M tp_ZZ N ) $ // This coequaliser basically just makes sure $m r tp n = m tp r n$. // ] == Computations #example[ Let $R$ be a commutative ring. Given $R$-modules $N$ and $M$, suppose we want to calculate $N tp_R M$, then we can pick the relations and generators of $N$: $ R^(ds J) -> R^(ds I) -> N -> 0 $ Consider $R^(ds J) -> R^(ds I)$, this homomorphism between free modules can be represented by a (possibly infinite) matrix $a_(i j) in R$. Therefore, we can write $ N = Coker(R^(ds J) rgt((a_(i j))) R^(ds I)) $ Notice that the same matrix can also act as $M^(ds J) -> M^(ds I) $, hence $ N tpr M = Coker(R^(ds J) rgt((a_(i j))) R^(ds I)) tpr M iso Coker(R^(ds J) tpr M rgt((a_(i j))) R^(ds I) tpr M) \ iso Coker((R tpr M)^(ds J) rgt((a_(i j))) (R tpr M)^(ds I)) iso Coker (M^(ds J) rgt((a_(i j))) M^(ds I)) $ ] // The functor $- tp_R M$ is left adjoint so it commutes with colimits. In particular, it sends cokernels to cokernels and is right exact. It also preserves direct sums. // If we want to calculate $N tp_R M$. // Then we have // $ R^(ds J) tp_R M -> R^(ds I) tp_R M -> N tp_R M -> 0 $ // Note that $ R^(ds J) tp_R M iso (R tp_R M)^(ds J) = M^(ds J) $ and so it becomes // $ M^(ds J) -> M^(ds I) -> N tp_R M -> 0 $ // Assume $R$ is a commutative ring. The functor $- tp_R M$ is left adjoint which implies that it is right exact and commutes with all colimits. #example[ Suppose $I$ is an ideal of $R$ generated by ${x_i}_(i in J)$, then we have #sest $ ses(I, R, R \/ I) $ On the other hand, $ R^(ds J) -> I -> 0 $ and thus $ R^(ds J) rgt((x_j)) R -> R \/ I -> 0 $ Let $M$ be a left $R$-module, then $ (R over I )tpr M iso Coker (M^(ds J) rgt((x_j)) M) = M over I M $ ] <tensor-ideal> // [Check https://math.stackexchange.com/questions/175789/how-to-prove-that-r-i-otimes-r-m-cong-m-im] #example[ The *localisation* of a commutative ring $R$ at element $x$ is defined as $R[t] over (t x - 1)$, denoted as $R[x^(-1)]$. For $R$-module $M$, we have $ R[x^(-1)] tpr M iso M[x^(-1)] $ ] #proof[ Notice that $R[t] tpr M = M[t]$, because $R[t] iso plus.circle.big_(i=1)^(infinity) R$ in $RMod$. ]
https://github.com/Enter-tainer/typstyle
https://raw.githubusercontent.com/Enter-tainer/typstyle/master/tests/assets/unit/math/multi-char-var.typ
typst
Apache License 2.0
#let total_order = 123 #let another-order = 456 $ a^#total_order + a^#another-order$
https://github.com/pku-typst/ichigo
https://raw.githubusercontent.com/pku-typst/ichigo/main/src/lib.typ
typst
MIT License
#import "@preview/linguify:0.4.1": load_ftl_data, linguify #import "@preview/numbly:0.1.0": numbly #import "@preview/valkyrie:0.2.1" as z #import "model.typ" as model #import "title.typ": title-content #let languages = ( "zh", "en", ) #let lgf_db = eval(load_ftl_data("./L10n", languages)) #let linguify = linguify.with(from: lgf_db) /// Main document processing function /// /// - doc (content): the whole document /// - course-name (str): the name of the course, must be provided /// - serial-str (str): the serial number of the document, must be provided /// - author-info (content): the author information, default to `[]` /// - author-names (array | str): the array of author names, default to `""` /// /// - title-style (str | none): expected to be `"whole-page"`, `none` or `"simple"`, default to `"whole-page"` /// -> doc #let config( doc, course-name: none, serial-str: none, author-info: [], author-names: "", heading-numberings: (none, none, "(1)", "a."), title-style: "whole-page", theme-name: "simple", ..opt, ) = { let meta = ( course-name: course-name, serial-str: serial-str, author-info: author-info, author-names: author-names, ..opt.named(), ) title-style = z.parse(title-style, model.title-style) meta = z.parse(meta, model.meta-schema) z.parse(theme-name, model.theme-name) let theme = model.get-theme(theme-name) theme = z.parse(theme(meta), model.theme-schema, scope: (theme-name,)) return { // Document metadata set document( title: meta.course-name + "-" + meta.serial-str, author: meta.author-names, ) // heading numbering set heading(numbering: (..n) => { let (..n) = n.pos() let numb = heading-numberings.at( n.len() - 1, default: heading-numberings.last(), ) numb = if numb != none { numbering(numb, n.last()) } else { none } return numb }) // Page header & footer set page( header: (theme.page-setting.header)(), footer: (theme.page-setting.footer)(), ) // Fonts set text(font: theme.fonts.text) show heading: set text(font: theme.fonts.heading) show math.equation: set text(font: theme.fonts.equation) // Title title-content(meta, theme, title-style) if title-style == "whole-page" { counter(page).update(x => x - 1) } doc } } #let top-prob-counter = state("ichigo.top-prob-counter", 0) #let prob( question, solution, title: auto, ) = { if title == auto { title = context [#linguify("problem") #top-prob-counter.get().] } return [ #top-prob-counter.update(x => x + 1) = #title #set heading(offset: 2) #question #set heading(offset: 1) = #linguify("solution") #set heading(offset: 2) #solution ] }
https://github.com/Myriad-Dreamin/tinymist
https://raw.githubusercontent.com/Myriad-Dreamin/tinymist/main/crates/tinymist-query/src/fixtures/def_use/base.typ
typst
Apache License 2.0
// most simple def-use case #let x = 1; #x
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/wordometer/0.1.0/test/tests.typ
typst
Apache License 2.0
#import "/src/lib.typ": * #set page(width: 15cm, height: auto) #show heading.where(level: 1): it => pagebreak(weak: true) + it + v(1em) = Basics #let el = [ One two _three_ four *five* six. == Seven eight #box[Nine #h(1fr) ten eleven $ sqrt(#[don’t mind me]) $ twelve.] Thirteen #text(red)[fourteen] - #highlight[fifteen] - sixteen #box(rotate(-5deg)[seventeen]) - eighteen! ] #rect(el) #word-count-of(el) = More basics #let el = [ #stack( dir: ltr, spacing: 1fr, table(columns: 3, [one], [two], [three #super[four]], [#sub[five] six], [seven]), rotate(180deg)[eight], circle[nine ten], ) #figure(circle(fill: red, [eleven]), caption: [twelve thirteen]) ] #rect(el) #word-count-of(el) #map-tree(x => x, el) = Punctuation #let el = [ "One *two*, three!" #text(red)[Four], five. #rect[Six, *seven*, eight.] ] #rect(el) Raw tree: #map-tree(x => x, el) Stats: #word-count-of(el) = Word edge cases #let f(el) = { let s = word-count-of(el) highlight(el) + [ [Words: #s.words]] } #f[One two three] #f[Acronyms count as O.N.E. word!] #f[Hyphen-words] are one, but #f[En–Dash] are two. #f[Punctuation doesn't count !?] #f[Qu'est-ce que c'est ?] #f[The amount is \$4,599.99!] #f[One (or so).] = Scoped counts #word-count-callback(stats => box(stroke: blue, inset: 1em)[ Guess what, this box contains #stats.words words! Full statistics: #stats ]) #rect[ #show: word-count One two three four. There are #total-words total words and #total-characters characters. ] = Master function #word-count(totals => [ Hello, stats are in! #totals ]) #block(fill: orange.lighten(90%), inset: 1em)[ #show: word-count One two three four. There are #total-words total words and #total-characters characters. ] = Sentences #let el = [ Pour quoi ? Qu'est-ce que c'est !? "I don't know anything." ] #el #word-count-of(el) = Excluding elements by type #word-count(total => [ == Not me. One, two, three. #strike[Not me, either.] Four. #strike[Words: #total.words] ], exclude: (heading, strike)) = Excluding elements by label #word-count(total => [ === One two Three, four. === Not me! <no-wc> Five, six. #total ], exclude: (raw, <no-wc>)) #line(length: 100%) #word-count(total => [ One, two, three, four. #[That was #total.words, not counting this sentence!] <no-wc> ], exclude: <no-wc>) = Where-selectors #let el = [ == One === Not me! ==== Two three four five ] #el #word-count-of(el, exclude: heading.where(level: 3))) = Custom counters #let el = [ Hello there are vowels here. ] #el #word-count-of(el, counter: txt => (vowels: lower(txt).matches(regex("[aeiou]")).len())) = Raw text extraction #let el = [ Hello, _this_ is a bunch of `content`. \ New line. == What we have - This and that - etcetera #text(red)[what we *need*] #circle[love] ] #el #extract-text(el)
https://github.com/Pablo-Gonzalez-Calderon/apuntes-botanica
https://raw.githubusercontent.com/Pablo-Gonzalez-Calderon/apuntes-botanica/main/src/months/august.typ
typst
Other
#import "../template.typ": new-class, examplebox, obsbox, gloss #import "../figures/fig.typ": * #let clase-1-2 = [ #new-class(new-page: false, "Arquitectura de plantas leñosas", "07 de agosto de 2023") #gloss(size: 21em)[ / Plantas perennes: Presentan crecimiento ilimitado, debido a la mantención de tejido embrionario o meristemático en las yemas. / Módulo: Es todo lo que proviene de una yema (i.e. tallos con hojas y flores). / Módulo vegetativo: Tallos con hojas. / Módulo reproductivo: Flor o infloresencia. / Yema: Estructuras morfológicas que tienen por función proteger una zona responsable del crecimiento de la planta: el meristema. / Yema axilar: Se encuentras en la posición (nudo) de la hoja. Su número _se aproxima_ al número de hojas de la planta. / Yema apical: Se encuentran al final del tallo. / Nudos: Zonas donde están las hojas en un tallo. / Entrenudos: Distancia entre los nudos. / Brácteas: Son hojas modificadas que protegen a la yema. / Tallo: Posee un solo eje. / Rama: Es una composición de tallos / Afolios: Tallos sin hojas. / Hojas filiformes: Hojas muy delgadas, con forma de aguja (e.g. las hojas de pino). ] Las plantas tienen una diversidad de formas de crecer. En general, en un ambiente determinado encontramos distintas formas de paisaje debido a las adaptaciones evolutivas que las plantas del entorno han sufrido (*selección natural*) para _sobrevivir_ y _reproducirse._ Usualmente las presiones evolutivas que sufren las plantas se manifiestan en (a) las formas de las plantas y (b) las características morfológicas y fisiológicas de estas. #examplebox()[Ejemplos de adaptaciones, son resistencia a los fuertes vientos, la máxima conservación y utilización del agua, o la supervivencia a climas nevados.] En este sentido, es importante entender que la supervivencia de una planta se maximiza con la fotosíntesis (mayor luz solar y mayor intercambio gaseoso favorecerán la supervivencia). A pesar de que las especies de plantas son todas diferentes y han evolucionado de manera diferente, es posible identificar *patrones anatómicos generales* entre ellas. #examplebox()[A grandes rasgos, se sabe que las plantas han evolucionado desde (1) plantas vasculares sin semillas, a (2) gimnospermas y, finalmente, a (3) angiospermas. Dentro de este proceso, además, es relevante considerar que ocurrió la deriva continental, incidiendo en la evolución de las plantas.] Las plantas perennes son aquellas que poseen un crecimiento ilimitado debido a que conservan tejido meristemático en sus yemas. Estas, además, se pueden clasificar en *vegetativas* (ramas con hojas) y *reproductivas* (flores). En este contexto, además, entenderemos las ramas como un conjunto de tallo + hojas + yemas, siendo estas últimas capaces de originar una nueva rama. Al producto del desarrollo de una yema lo denominaremos *módulo arquitectónico*. Cuando una yema genera una flor o una espina, deja de haber meristema y, por tanto, desde ese módulo no podrá seguir el crecimiento vegetativo. Cuando se estudia la "arquitectura" de una planta, en realidad se estudia la _forma_ que adquieren las plantas. Las diferencias que se hallan en esta _forma_ se deben a la manera en la que crecen las plantas mismas: periódicamente se van formando *módulos*. De esta forma, *las plantas son entidades modulares.* Cada módulo tiene su origen en una *yema.* Las yemas se pueden clasificar según su función en *vegetativas* (crecimiento ilimitado - _perenne_), *reproductivas* (crecimiento limitado) o *mixtas* (contiene tanto partes con funciones vegetativas como partes con funciones reproductivas); y según su localización en *axilares* o *apicales.* = Yemas vegetativas ("de renuevo") Por dentro, se encuentran los *primordios foliares* que formarán las futuras hojas, usualmente encerrados por las brácteas. Dan origen a los *módulos vegetativos.* == Módulos vegetativos Son tallos con hojas, los cuales se pueden subdividir en *dolicoblastos* y *braquiblastos.* La principal fuente para diferenciar ambos es su _morfología y su función_; siempre serán distintas en la misma planta. === Dolicoblastos Son tallos con hojas largos, con entrenudos alejados y visibles a simple vista. Es la rama responsable de la arquitectura basal de la planta y de mantener su crecimiento ilimitado. Pueden tener hojas alternadas, opuestas o verticiladas. ==== Clasificación de las hojas según su distribución Si hay solo una hoja por nudo, se denominan *alternas*. Cuando hay dos hojas por nudo, se llaman *opuestas*. Y cuando hay tres o más hojas por nudo, se denominan *verticiladas.* #examplebox()[ Una planta trepadora se caracteriza por crecer ilimitadamente por su meristema apical o yema terminal. ] === Braquiblasto Son tallos con hojas cortos, con entrenudos estrechos que no son viisbles a simple vista. Dentro de los braquiblastos se hallan los *braquiblastos absolutos* (no pueden volver a crecer, pues agotan todo su meristema) y los *braquiblastos temporales* (pueden volver a crecer en la siguiente temporada). #columns(2)[ ==== Braquiblastos temporales Pueden originar otro braquiblasto como él (*mesoblasto*), una flor sin usar todas sus yemas, un dolicoblasto en su yema apical, o extender sus entrenudos y pasar a ser un dolicoblasto. #colbreak() ==== Braquiblastos absolutos Sus yemas pueden dar origen a espinas (*braquiblasto absoluto espinoso*) o pueden dar origen a flores o inflorescencias (*braquiblasto absoluto folioso* -- _consultar @modulos-reproductivos _). ] #examplebox()[Una espina es un braquiblasto absoluto. Sabemos que una espina puede ser un *tallo* cuando viene de una yema axilar; y que puede ser una *hoja modificada* si sobre ella hay una yema. Existen casos especiales, empero, donde la espina no proviene de la yema ni tampoco es una hoja modificada (e.g. las espinas de la rosa).][En el caso de las cactáceas, las espinas son hojas modificadas, permitiendo que las yemas puedan dedicarse a otras funciones.] // Clase 1 - GM - p.23: "Estructura de una flor" = Yemas reproductivas Por dentro tienen los *primoridos florales*, junto con todos los componentes internos de una flor. Dan origen a los *módulos reproductivos.* == Módulos reproductivos<modulos-reproductivos> Las flores son, esencialmente, ramas que han sido modificadas para poder maximizar la reproducción sexual (a través de polinizadores como las abejas). Se pueden producir dos situaciones: el crecimiento de una flor solitaria (simplemente *flor*), o el crecimiento de varias flores a partir de la misma yema (*inflorescencia*). A su vez, las inflorescencias se dividen en *inflorescencias cimosas* (a.k.a determinadas; las flores crecen _basípetamente_ --desde el ápice hacia la base) e *inflorescencias racemosas* (a.k.a. indeterminadas; las flores crecen _acrópetamente_ --desde la base hacia el ápice). Además, las inflorescencias cimosas pueden subdividirse en 3 situaciones generales: + *Monocasio:* El eje principal termina en una flor y desarrolla una sola ramificación florífera lateral (comportamiento que se repite en esta última, al igual que en las nuevas ramas que genere). + *Dicasio:* El eje principal termina en una flor y desarrolla dos ramas laterales floríferas (comportamiento que se repite en estas últimas, al igual que en las nuevas ramas que generen). + *Pleocasio:* El eje principal termina en una flor y desarrolla tres o más ramas laterales floríferas, las cuales, presentan este mismo comportamiento. = Yemas mixtas Originan tallos cuyas yemas axilares y/o apical producen flores durante *la misma* temporada de crecimiento. = Origen de un módulo Cuando hablamos del origen de un módulo, podemos aludir a: + La yema que lo origina: apical o axilar + El módulo que lo origina: dolicoblasto (edad), o braquiblasto (edad). = Protección de las yemas Para proteger una yema, existen al menos 4 mecanismos: + Mediante *escamas* o *brácteas.* + A través de las hojas mismas. + Con espinas. + Mediante *estípulas.* = Clasificacion de leñosas según la vida de las hojas La vida de las hojas usualmente presenta dos tipos de comportamiento, causando que las leñosas puedan clasificarse como: #box(height: .8cm, columns(2)[ - *Deciduos o caducos:* Las hojas viven menos de un año (se caen). - *Siempreverdes:* Las hojas viven más de un año y luego se caen. ]) == Morfología de ramas de árboles deciduos Existen dos situaciones particulares que marcan la morfología de las ramas de los árboles deciduos: 1. *Cuando las hojas se caen,* aún se conserva la yema axilar, pero queda una *cicatriz foliar* (donde estaba la hoja --más específicamente su _peciolo_) y una *cicatriz vascular* (donde estaban las vascularizaciones --xilema y floema-- que iban hacia la hoja). 2. *Cuando la yema apical se desarrolla,* quedan *cicatrices bracteales*, "anillos" perpendiculares alrededor del tallo. ] #let clase-3-4 = [ #new-class(new-page: true, "Arquitectura de plantas herbáceas", "21 de agosto de 2023") #gloss(size: 23em)[ / Tallo acaule: _a_ (sin) + _caule_ (tallo). Se usa para mencionar plantas con tallos muy cortos, como si prácticamente no existiesen. / Variedad vegetal: Representa a un grupo de plantas definido con mayor precisión, seleccionado dentro de la especie, que presenta una serie de características comunes. / Tallo suculento: Tipo de tallo que almacena una gran cantidad de agua o nutrientes en su interior como método de supervivencia. / Mucilago: Sustancia vegetal viscosa. Es una solución acuosa espesa de una goma o dextrina utilizada para suspender sustancias insolubles y para aumentar la viscosidad. / Raíces adventicias: Raíces que no se forman del embrión de la semilla. / Raíz contráctil: Raíz adventicia cuya función es desplazar el brote hasta una ubicación cercana a la superficie del suelo. / Tallo plagiótropo: Tallo de crecimiento horizontal. / Tallo ortótropo: Tallo de crecimiento vertical / Brotes epígeos: Brote que saldrá a la superficie, dando origen a un tallo aéreo. / Nervio medio foliar: Vascularización central en la lámina de la hoja. ] *Las yemas de renuevo siempre van en el tallo* Hoy vamos a estudiar todo lo que es "hierba", las plantas que muchas veces cocinamos en nuestras casas (hortalizas). Las plantas herbáceas se diferencian de las plantas leñosas en que #enum( numbering: "(a)", [Tienen más *células parenquimáticas* que *vasos xilemáticos* y *fibras esclerenquimáticas*], [Desarrollan menor cantidad de tejido con paredes lignificadas] ) A pesar de ello, tienen en común con las plantas leñosas en que también son estructuras modulares. Se pueden dividir en tres grupos, según su tiempo de crecimiento: limitado (anuales y bianuales --todas sus yemas florecen) e ilimitado (perennes --quedan yemas vegetativas en el tallo). = Plantas anuales Crecen durante una *única estación de crecimiento*, la cual puede ser un conjunto de meses. En este lapso, hacen tanto crecimiento vegetativo como reproductivo, donde todas sus yemas terminan transformándose en flor (i.e. *no dejan yemas de renuevo*). El único lugar donde queda un yema de renuevo de la planta es en la *semilla.* = Plantas bianuales Crecen durante *dos períodos de crecimiento*, siendo *uno vegetativo y el otro de florecimiento.* Al igual que con las plantas anuales, todas sus yemas terminan transformándose en flor y la únicas yemas de renuevo quedan en la semilla. También se caracterizan por tener *tallo acaule.* == Hortalizas La gran mayoría son plantas modificadas genéticamente (no confundir con transgénicos). Usualmente se presentan en un gran número de *variedades* y corresponden, en general, a *plantas acaules con ciclo más corto, pero agradables al paladar.* Hay tanto anuales como bianuales. #examplebox()[Un ejemplo de familia que presenta varias variedades es la familia _Brassica oleracea,_ a la cual pertenecen la coliflor, el brócoli, las coles de bruselas, colirrábano, etc.] Muchas veces, las diferencias más notables entre variedades puede ser el color (su pigmentación). En este sentido, es importante recordar que los *pigmentos antocianos* (que dan color) siempre van ubicados en las vacuolas de las células == Plantas transgénicas Por otro lado, a diferencia de una planta modificada genéticamente, una *planta transgénica* es una planta cuyo genoma ha sido modificado mediante ingeniería genética. Como consecuencia, la planta transgénica muestra una nueva característica. En Chile está prohibida la venta de alimentos transgénicos. Únicamente está permitida la exportación de semillas a mercados del hemisferio norte. A nivel mundial, el 99% de los productos transgénicos producidos son algodón, maíz, soya, canola y arroz. = Plantas perennes *Dejan yemas de renuevo* en órganos que almacenan mucho material nutritivo para rebrotar en la "época favorable". = Diferenciación morfológica por la variabilidad en la estructura foliar #grid( columns: (1fr, 1fr), column-gutter: 11pt, [ Morfológicamente, la *base* de la hoja es la encargada de unir el *peciolo* al tallo. A su vez, el peciolo se encarga de unir la *lámina* a la base. La variabilidad en forma y tamaño de estas estructuras da origen a distintos tipos de plantas. Así, por ejemplo, las cebollas poseen más desarrollada la base y la lámina que el peciolo, el cual no existe (*hoja sésil*); o el apio posee más desarrollado su peciolo. ], figure( caption: "Estructura básica de la hoja", image("../figures/fig_hoja.svg", height: 4.5cm) ) ) = Morfología de tallos herbáceos Son aquellos que no tienen adición de madera, debido a que viven menos de un año (condición para que no sea una planta leñosa). Los tallos pueden clasificarse de la siguiente manera: #align(center, [ #table( columns: (2fr, 1fr), inset: 1em, stroke: (dash: "dashed", paint: luma(220)), [ #text(weight: 700, size: 1.1em, "Aéreos") #align(left, columns(2)[ - Dolicoblastos - Braquiblastos - Volubles (plantas trepadoras) - Espinas - Zarcillos (caulomáticos y filomáticos) #colbreak() - Filóclados - Cladodios - Estolones - Comprimidos (plantas acaules) ]) ], [ #text(weight: 700, size: 1.1em, "Subterráneos") #align(left)[ - Tubérculos - Rizomas - Cormos - Bulbos #enum( numbering: "i.", [Tunicados], [Lobulados], [Escamosos] ) ] ] ) ]) Los tallos aéreos se caracterizan por tener hojas "normales" (para hacer fotosíntesis), y también contener a los órganos reproductivos (flores o inflorescencias); mientras que los subterráneos tienen hojas modificadas, y, usualmente, almacenan nutrientes para que la planta pueda sobrevivir en un periodo desfavorable. De esta manera, si una planta tiene tallos subterráneos y aéreos, ante malas condiciones ambientales, pueden morir los tallos aéreos, quedando solo los tallos subterráneos. Y estos permitirán que la planta vuelva a crecer cuando las condiciones vuelvan a ser favorables. Se debe hacer la distinción, no obstante, de que *los tallos aéreos en las herbáceas son los que no viven más de un año., pero no así los tallos subterráneos.* == Tallos volubles Este tipo de tallos se caracteriza por #enum( numbering: "(a)", [Tienen escasa lignificación], [No pueden mantenerse erguidos por sí solos (les falta *lignina* o fibra)], [Engrosan parejamente], [Portan hojas y yemas], [Tienen un xilema escasamente desarrollado] ) Además, estos tallos se caracterizan porque evolucionaron su manera de crecer en forma helicoidal, con la ayuda de un "tutor" natural o entregado por un agricultor. Por lo anterior, pueden crecer o enrollarse en sentido horario (*dextrógiro*) o antihorario (*levógiro*), dependiendo de la especie. Además, este tallo voluble puede, eventualmente, transformarse en un tallo leñoso _*si dura más de un año*_, convirtiéndose en una *liana.* #obsbox()[ _Observación._ A fin de cuentas, el crecimiento helicoidal es un mero mecanismos para trepar. También pueden emplear *espinas retrorsas* ("miran" hacia abajo), *raíces aéreas* o *zarcillos* (caulomáticos y filomáticos). ] == Zarcillos Mientras que el tallo voluble es un tallo "normal" que se enrolla, un zarcillo es una estructura independiente (un tallo especializado o una hoja especializada --zarcillo caulomático y filomático, respectivamente) para enrollarse y trepar. El zarcillo será herbáceo o leñoso dependiendo de la planta. Por ejemplo, será herbáceo en plantas como el zapallo y todas las _cucurbitaceas_; pero será leñoso en plantas como la vid (_vitaceae_). === Zarcillos caulomáticos Estos zarcillos provienen del tallo de la planta, y poseen las siguientes características: #enum( numbering: "(a)", [Provienen de yemas axilares], [Pueden ser simples o ramificados], [*No* portan ni hojas ni yemas], [Permiten asirse a la planta] ) Como no tienen ni hojas ni yemas tienen un *crecimiento limitado.* Por lo tanto, se pueden clasificar como un tipo de braquiblasto absoluto. === Zarcillo filomático Estos zarcillos provienen de una hoja (del *nervio medio* de la hoja --son una hoja modificada). Su diferencia con el caulomático es que no proviene de una yema axilar. #obsbox()[ _Observación._ Para que el zarcillo filomático sea leñoso, la planta tiene que ser siempreverde (vivir más de un año). ] == Filóclados Esencialmente, son tallos con forma de hoja. Usualmente se halla en plantas en ambientes fríos y sombríos, por eso su forma busca maximizar la fotosíntesis. Poseen las siguientes características: #enum( numbering: "(a)", [Portan una hoja modificada (escama o bráctea) con una yema reproductiva], [Los tallos normales también presentan escamas o brácteas con yemas vegetativas, que luego generarán un tallo modificado (filóclado)], [Los tallos normales se generan desde un *rizoma*] ) #examplebox()[ _Ruscus sp._ es una planta que posee tres tipos de tallos: normales, filóclados y rizomas. ] == Cladodios Es un tallo fotosintetizador almacenador de agua (posee un parénquima acuífero --en su interior, el *mucilago* es la sustancia que absorbe y retiene el agua en la vacuola). Poseen las siguientes características: #enum( numbering: "(a)", [Tallos suculentos almacenadores de agua], [Portan hojas transformadas en espinas], [Las espinas (hojas) se disponen en braquiblastos temporales llamado *areolas*], [Los braquiblastos temporales pueden originar un nuevo dolicoblasto, o flores], ) == Tallos aéreos comprimidos (plantas acaules) Son tallos con entrenudos muy cortos. Estos tallos dan origen a las plantas acaules y poseen las siguientes características generales: #enum( numbering: "(a)", [Los poseen las plantas herbáceas bianuales con un solo tallo, o también las herbáceas perennes], [Es un tallo comprimido con hojas dispuestas en una *roseta basal*], [Poseen una yema apical reproductiva que origina una inflorescencia en la segunda estación de crecimiento en la _mayoría_ de las veces. (e.g. de excepción es la frutilla)], [Tienen hojas normales u hojas modificadas], [Pueden ser aéreos o subterráneos], [Pueden tener *raíces adventicias* o raíces no adventicias] ) Las *hemicriptófitas* (_"semi_ - _escondidas"_) son las plantas acaules con tallo (comprimido) aéreo. Las *criptófitas* (_"escondidas"_) son las plantas acaules con tallo (comprimido) subterráneo. == Estolones Sirven para el proceso de reproducción _asexual_ de ciertas plantas, también denominado *reproducción vegetativa*, donde se producen clones genéticos de una planta madre. Poseen las siguientes características: #enum( numbering: "(a)", [Tienen *tallo plagiótropo*], [Se generan de las yemas axilares de una planta acaule], [Portan hojas transformadas en escamas o brácteas (su función principal *no* es la fotosíntesis)], [Generan *brotes* de la planta madre desde sus yemas axilares o apicales], [Los brotes pueden generar raíces adventicias y, con esto, independizarse de la planta madre], [La planta madre porta hojas normales y produce flores] ) Si una planta que puede reproducirse por estolones proviene de un cigoto (producto de una flor fecundada --reproducción sexual), entonces se le denomina *genet*. Si esta planta, a través de los estolones, se reproduce asexualmente, entonces sus *clones* se llaman *ramet.* == Rizomas Sus principales características son: #enum( numbering: "(a)", [Son tallos plagiótropos subterráneos], [Son tallos engrosados que almacenan nutrientes], [Sus hojas corresponden a escamas o brácteas], [Poseen crecimiento ilimitado], [Generan raíces adventicias de los rizomas nuevos], [De sus yemas se generan dos tipos de tallos: (1) aéreos con hojas y flores, y (2) subterráneos (otros rizomas)] ) Pueden presentar dos formas de crecimiento: *simpodial* o *monopodial.* === Crecimiento simpodial Se da cuando de las yemas axilares crece un nuevo rizoma y, en el nuevo rizoma, su yema apical generará el brote aéreo que florecerá. === Crecimiento monopodial Se da cuando el rizoma siempre crece por las yemas apicales, mientras que las yemas axilares solo generan *brotes epígeos*. == Tubérculos Poseen las siguientes características: #enum( numbering: "(a)", [Son tallos donde se almacenan sustancias de reserva], [*Solo tienen yemas axilares*], [Las yemas axilares generan tallos aéreos, que, en sus bases, generarán raíces adventicias], [Desde la base de los tallos aéreos saldrán tallos plagiótropos subterráneos, los cuales *agotan su yema apical*. Al agotarse, el extremo más alejado del tallo dará origen a un nuevo tubérculo], [En el nuevo tubérculo, el extremo más cercano a la planta es el *extremo proximal,* mientras que el más lejano es el *extremo distal*], [En el extremo proximal se haya la *cicatriz de desprendimiento* del tallo plagiótropo] ) == Cormos Poseen las siguientes características: #enum( numbering: "(a)", [Son tallos subterráneos], [Están engrosados para almacenar nutrientes], [Presentan hojas que corresponden a escamas], [Poseen crecimiento limitado], [Generan raíces adventicias], [De sus yemas se generan dos tipos de tallos: (1) aéreos con hojas y flores de la yema apical, y (2) subterráneos (cormos) de las yemas axilares] ) == Bulbos tunicados Poseen las siguientes características: #enum( numbering: "(a)", [Almacenan nutrientes en la parte basal de sus hojas (hojas modificadas llamadas *catáfilos*)], [Los catáfilos pueden ser *reservantes* o suculentos si sirven para almacenar nutrientes, o *protectores* o fibrosos si se encuentran en la parte externa del bulbo], [Tienen crecimiento limitado], [En la base hay un tallo comprimido, del cual se desarrollan las raíces adventicias], [Su yema apical es reproductiva], [Desarrollan raíces adventicias] ) #examplebox()[ Un conocido ejemplo de bulbo tunicado es la cebolla, donde lo que comemos corresponde, efectivamente al bulbo, desechando cualquier tallo aéreo que pueda salir de él. ] == Bulbo lobulado Se caracterizan por: #enum( numbering: "(a)", [Almacenar sustancias de reserva en yemas axilares desarrolladas ("diente" o lóbulo)], [Cada yema axilar tiene sus propios catáfilos almacenadores y protectores. Y todos están protegidos por fuera por otros catáfilos protectores], [Al centro de los catáfilos almacenadores se hayan los catáfilos fotosintetizadores, que pueden dar origen a una nueva planta], [Tener crecimiento ilimitado], [En la base hay un tallo comprimido, del cual se desarrollan las raíces adventicias], [Desarrollar raíces adventicias] ) #examplebox()[ Un conocido ejemplo de bulbo lobulado es el ajo, donde la parte que comemos es, efectivamente, una yema axilar. ] == Bulbo escamoso Se caracteriza por: #enum( numbering: "(a)", [Almacenar nutrientes en la parte basal de sus hojas (catáfilos de reserva)], [Estar dispuestos como tejas, a diferencia de los tunicados que están simplemente superpuestos], [No presentan catáfilos protectores] ) = Hipocotilo A veces, consumimos otras partes de la planta. Los hipocotilos son una mezcla entre la raíz y el tallo, es decir, a la *parte basal del tallo.* Así, en estas especies hay un engrosamiento del tallo y, en mayor o menor medida, también un engrosamiento de la raíz. Cuando el engrosamiento se da más hacia la raíz, decimos que estamos antes un *tubérculo radical o raíz tuberosa.* ] #let clase-5-6 = [ #new-class(new-page: true, "Citología y diferenciación celular", "28 de agosto de 2023") Recordemos que la célula vegetal debe ser considerada como un sistema de membranas lipoprotéicas. El modelo de Singer dice que las membranas están compuestas por bicapas de fosfolípidos con proteínas que atraviesan la membrana (proteínas integrales) y proteínas periféricas adheridas a la membrana, que no cruzan la membrana completamente. Además, dentro de la célula, las membranas son fundamentales, porque dentro van las enzimas que son imprescindibles para el desarrollo celular. Las vesículas son esenciales para mover moléculas sintetizadas en el *dictiosoma* a su área de consumo. #obsbox()[ _Observación._ El Aparato de Golgi se denomina Dictiosoma en las células vegetales. ] #obsbox()[ En una célula, cuando se desee unir dos moléculas para formar un enlace, se requerirá de energía. Esa energía puede ser recuperada luego, al romper ese enlace. A este proceso de síntesis y degradación se le llama *metabolismo,* el cual ocurre, principalmente, con *moléculas orgánicas* (aquellas que poseen átomos de carbono en su estructura). ] Dentro de las plantas, diremos que las *células parenquimáticas* son las células con contenido vivo, que todavía *no están especializadas.* ¿Y de donde se obtiene el material para "armar" estas células? De la fotosíntesis, la cual transforma energía lumínica en energía química (que queda *contenida en moléculas de glucosa* -- $C_6H_12O_6$) a través de los cloroplastos. La glucosa puede transformarse en *almidón* o *celulosa* al agruparse con otras glucosas. Estas agrupaciones reciben la denominación de *macromoléculas.* El almidón se caracterizará por ser soluble en agua, mientras que la celulosa no. = Almidón Se puede formar de *amilosa* (cadena lineal de glucosas unidas por enlaces #sym.alpha *hidrolizables* entre los carbonos 1 y 4), o *amilopectina* (cadenas ramificadas de glucosas, donde la cadena principal es una amilosa, a la cual se le unen otras amilosas con enlaces #sym.alpha entre los carbonos 1 y 6). #obsbox()[ La amilosa posee menor cantidad de glucosa que la amilopectina, y esta última es responsable de formar 75% de los almidones comunes. ][ Plantas como la papa, el camote o la betarraga tienen un 20% de almidón. ] = Celulosa Forma entre el 40% al 60% de la pared celular, pudiendo presentarse de forma pura. Una sola molécula de celulosa consiste de 100 a 15.000 unidades de glucosa, y más de 1000 moléculas forman una *microfibrilla de celulosa.* #obsbox()[ Las macromoléculas de celulosa se unen mediante enlaces de *puente de hidrógeno.* ] Las características más relevantes de la celulosa son: #enum( numbering: "1.", [Es un polisacárido], [Se denomina como *glucano* (i.e. polímero de glucosa)], [Es un polímero lineal de resistencia -- Todas las moléculas constituyentes son iguales], [Es una biomolécula orgánica que se produce _naturalmente_ en la planta], [Posee enlaces #sym.beta glucosídicos *no hidrolizables* entre el $C_1$ y el $C_4$], // El almidón es alpha [Es muy permeable] ) #obsbox()[ _Observación._ Que sea un enlace no hidrolizables implica que solo se puede romper con una enzima (*celulasa*) ] == Microfibrillas de celulosa Como se mencionó, polisacáridos lineales formados por monómeros de glucosa unidos por enlaces #sym.beta (1-4) forman la celulosa. Las largas moléculas de celulosa se asocian entre sí por enlaces de puente de hidrógeno y forman la estructura denominada *microfibrilla de celulosa,* la cual contiene alrededor de 50 moléculas de este polisacárido, siendo muy resistentes. = Pared celular La *pared primaria* es una estructura mecánicamente dinámica que rodea ala célula durante su proceso de rápida expansión o elongación celular --proceso que sigue a la división celular. Crece hacia afuera de la membrana plasmática. La *pared secundaria* es una estructura mecánicamente estática que determina la forma y tamaño de la célula en su máxima diferenciación. Crece entre la pared primaria y la membrana plasmática. == Capas + Lamina media + Pared primaria + Pared secundaria (solo si necesita --e.g. las células que deben transportar agua) == Lámina media Se encuentra *entre dos células adyacentes* y está constituida de *2 pectinas* (polímeros muy ramificados): #enum( numbering: "(1)", [Arabinogalactano (Arabinosa + Galactosa) Pectina neutra], [Ramnogalacturonano (Ramnosa + Ácido Galacturónico) Pectina ácida] ) Estas pectinas tienen un papel importante en la *flexibilidad de la pared celular para el crecimiento.* Pueden hidratarse (forman geles), aportando plasticidad a la pared celular. Es la responsable de *cementar y mantener unidas las células.* Además, se adicionan iones de calcio ($"Ca"^(++)$) para formar *Pectatos de Calcio*, los cuales son menos susceptibles de romperse, y corresponden a pectinas muy unidas. == Pared Primaria Corresponde a la primera capa claramente visible de la pared celular. Se localiza entre las membrana plasmática y la lámina media, siendo responsable de la forma y tamaño inicial de la célula vegetal y su posterior desarrollo. Esta pared celular aparece en _todas las células vegetales,_ originándose en la división celular y se sintetiza durante todo el crecimiento de las células metabólicamente activas (e.g. parenquimáticas, secretoras, etc.). Está formada por los siguientes elementos: #enum( numbering: "(1)", [Microfibrillas de celulosa (70%) *laxa y desordenada*], [*Xiloglucano* o hemicelulosa (Xilosa + Glucosa)], [Arabinogalactano y Ramnogalacturonano], [Glicoproteínas] ) == Plasmodesmos Pese a que la pared celular es permeable, existen conductos denominados *plasmodesmos* que conectan los citoplasmas de células adyacentes, formando el *simplasto.* Por los plasmodesmos pasan membranas de ambas células que, si corresponden a membranas de retículo endoplasmático, se denominan *desmotúbulos* al atravesar la pared primaria. Los plasmodesmos pueden ser primarios o secundarios, según si se formaron en la división celular, o en una etapa más avanzada de la vida de la célula, respectivamente. Además, pueden cerrarse si la planta así lo requiere (e.g. ante la presencia de un virus). La contraparte de la vía simplástica, es la vía *apoplástica*, la cual es más rápida y pasa por fuera de paredes celulares de las células. == Pared secundaria Aparece una vez que la célula ya se especializó y no va a crecer más. Presenta un *grado de polimerización de la celulosa mayor que la pared primaria,* teniendo _más tipos_ de polímeros que esta. Además, la disposición de las microfibrillas de celulosa es más *ordenada y compacta.* Usualmente aparece en células que tienen la misión de dar soporte o conducir sustancias. Cuando se va construyendo, se van depositando sucesivas capas de microfibrillas de celulosa (*aposición*). Así, una vez terminada, *la célula muere por apoptosis.* Además, es el componente principal de la madera, teniendo muy *pocas pectinas y glicoproteínas.* Está constituida principalmente por: #enum( numbering: "(1)", [Celulosa + Lignina (Xilema, Fibras esclerenquimáticas)], [Celulosa + Cutina (Epidermis y Anexos --e.g. las manzanas con epidermis cutinizada, tienen "cera" por fuera.)], [Celulosa + Suberina (Peridermis)], [Celulosa + Silicatos (Pastos)] ) Aquí, destaca la relevancia de la lignina, pues es bastante común encontrarla. === Lignina Presenta un elevado peso molecular que resulta de la unión de varios ácidos y alcoholes fenilpropílicos. Es un polímero natural muy complejo. Además, está presente en células conductoras en el xilema (células traqueideas y traqueas) y en células que aportan a la resistencia estructural de las plantas (fibras y células pétreas). = Formación de la pared celular A grandes rasgos, la pared celular se forma de la siguiente manera: + Fragmoplasto (fragmosomas): Son vesículas en la zona del fragmoplasto de la célula en división. + Placa celular: Es la fusión de vesículas de fragmosomas. + Lámina media: Contacta paredes laterales de células hijas. + Pared primaria + Pared secundaria (no siempre presente) #obsbox()[ La formación del fragmoplasto ocurre en la telofase. Es importante recordar que las etapas de la mitosis son profase, pro metafase, metafase, anafase y telofase ][ Todos los materiales vienen del Dictiosoma, pues sintetiza las pectinas ] = Células, organelos, estructuras y compuestos celulares observables a través del microscopio óptico == Microscopio óptico En los próximos laboratorios ocuparemos un microscopio binocular con 4 objetivos: 4X, 10X, 40X y 100X (*no* usaremos 100X); una Platina ("mesita de trabajo donde va la muestra"), una fuente de luz, un condensador o diafragma (permite controlar la entrada de luz), un tornillo macrométrico (sube y baja la platina) y un tornillo micrometrico (permite un mayor enfoque). _*¡Importante!*_ Al terminar de ocupar el microscopio, se baja la platina, se deja la muestra sobre el mesón y se apaga la luz. == Colorantes La *safranina* siempre tiñe de rojo la lignina (indica la presencia de pared secundaria). Por su parte, el *_fast green_* se une a la celulosa (pared primaria), adquiriendo una coloración azul clara/verde. #obsbox()[ _Observación._ Una célula es *isodiamétrica* cuando en el plano longitudinal y en el plano transversal tienen la misma forma. Por su parte, cuando se ven distintas en dos planos, se llaman *diamétricas.* ] = Células vivas y muertas Se diferencian en la tinción y el tipo de pared. Las células vivas serán todas aquellas que tengan pared primaria y organelos, mientras que las células muertas poseen pared secundaria (e.g. el xilema: por apoptosis, la célula saca todo los organelos en su interior para dar paso al agua, manteniendo su forma solo por la pared secundaria). = Vacuola Organelo celular rodeado por una *membrana denominada tonoplasto*. Generalmente de gran volumen replegando el citoplasma a la periferia. Almacena agua y otras sustancias, está relacionada con la *turgencia celular.* Puede almacenar *taninos* o *cristales* (sustancias de desecho), para que la planta, luego, se encargue de deshacerse de ellos. Se ven fácilmente bajo el microscopio. Es capaz de absorber mucha agua debido a que concentra gran cantidad de sales (*hipertónico*), mientras que el citoplasma es *hipotónico.* Y no "estalla" debido a que la pared celular es muy resistente. La vacuola se puede ver bajo el microscopio cuando tiene pigmentos hidrosolubles, como los antocianos. Si el medio exterior es más hipertónico que la vacuola, entonces el agua de la vacuola sale de la célula y ocurre la *plasmólisis.* Ahí, la célula "se chupa", a excepción de la pared celular, solo quedando sujetas a esta última a través de los plasmodesmos. = Plastidios Organelos de distinta forma y tamaño, con doble unidad de membrana. Pueden ser coloreados o incoloros, provienen de *proplastidios* (ancestro común) de células meristemáticas. De acuerdo a si tienen pigmentos o no, se pueden clasificar en dos: #box(enum( numbering: "(a)", [ Con pigmentos + Cloroplastos: tienen clorofila, pigmento verdoso + Cromoplastos: pigmentos de tonos rojos y naranjos - Globulosos - Fibrilares o tubulosos - Cristalinos - Membranosos ], [ Sin pigmentos + Leucoplastos - Amiloplastos: plastidios no pigmentados que almacenan almidón - Elaioplastos: almacenan aceites - Aleuroplastos: almacenan proteínas ] )) == Cloroplastos Organelos de doble membrana con forma de disco de color verde con presencia de clorofila. Se encuentran en órganos que presentan una epidermis como tejido externo, principalmente en hojas. En la epidermis, *solamente las células oclusivas presentan cloroplastos* (partes del estoma). El centro del estoma se llama *ostiolo.* == Cromoplastos Organelos con pigmentos carotenos (color amarillo, naranja), licopenos (color rojo), xantofilas (color amarillo). Se originan a partir de proplastidios o de cloroplastos que han perdido la clorofila. Se encuentran en diversos órganos a los que le otorgan su color característico como pétalos (atrae polinizadores), frutos o raíces (atrae dispersores). == Leucoplastos Organelos _no_ pigmentados que almacenan productos celulares: almidón (amiloplastos), lípidos (elaioplastos) y proteínas (aleuroplastos). Se encuentran principalmente en tejidos reservantes. *Amiloplastos* Almacenan almidón, corresponden, netamente, a los granos de almidón. El almidón se almacena en *capas concéntricas o excéntricas* en torno a un punto denominado *hilo*. Se utiliza lugol (tintura de yodo) para su reconocimiento. = Cristales Productos de desecho de las plantas que se acumulan en la vacuola en forma de cristales. Los más comunes son cristales de oxalato de calcio y los menos comunes son los de carbonato de calcio. Usualmente están en vacuolas de las hojas, pues ellas se terminarán cayendo, deshaciéndose de los desechos. #grid( column-gutter: 11pt, columns: (1fr, 1fr), [ *Oxalato de calcio* Pueden tener distinta forma - Drusa: agregado de cristales prismáticos. - Cristal prismático - Rafidio: cristales con forma de aguja ], [ *Carbonato de calcio* Están en forma de *cistolito,* cristales agregados sobre una invaginación celulósica de la pared primaria. #obsbox()[ Un *litocisto* es una célula que tiene un cistolito en su interior. ] ] ) ]
https://github.com/Maso03/Bachelor
https://raw.githubusercontent.com/Maso03/Bachelor/main/Bachelorarbeit/chapters/introduction.typ
typst
MIT License
= Einleitung In einer zunehmend digitalen Welt ist die Optimierung der Benutzererfahrung ein zentrales Anliegen für Unternehmen. Künstliche Intelligenz (KI) hat sich dabei als eine der innovativsten Technologien herauskristallisiert, die neue Wege eröffnet, um Benutzerinteraktionen zu verbessern und gleichzeitig Effizienzsteigerungen zu realisieren. Diese Bachelorarbeit widmet sich der Konzeption und Implementierung eines 3D-Avatars, der als interaktiver Berater fungiert und auf fortschrittlicher künstlicher Intelligenz basiert. Das Hauptziel dieser Arbeit ist die Entwicklung eines Chat-Avatars, der direkt aus einer Software heraus von Nutzern aufgerufen werden kann und Hilfestellungen zur Bedienung der Software sowie Antworten auf weiterführende Fragen bietet. Der Avatar soll in der Lage sein, verbale Erläuterungen zu Funktionen zu geben, Bedienungsanleitungen zu präsentieren und möglicherweise auch unterstützende Videos abzuspielen. Die technische Basis bildet dabei die Verwendung von ConvAI in Kombination mit der Unreal Engine. = Problemstellung Die Digitalisierung hat das Nutzungsverhalten von Software-Anwendern grundlegend verändert. Immer mehr Nutzer bevorzugen intuitive und sofort verfügbare Unterstützung direkt in der Software, was Unternehmen vor neue Herausforderungen stellt. Eine der zentralen Fragen lautet: Wie kann man Benutzerinteraktionen effizienter und ansprechender gestalten, um den Bedürfnissen und Erwartungen der Nutzer gerecht zu werden? Traditionelle Hilfesysteme bieten oft nicht die Interaktivität und personalisierte Unterstützung, die Anwender heutzutage erwarten. Ein 3D-Avatar, der als intelligenter Assistent fungiert, könnte eine vielversprechende Lösung darstellen. Solche Avatare sollen nicht nur detaillierte Informationen über die Software bereitstellen, sondern auch auf individuelle Anfragen reagieren und eine personalisierte Unterstützung bieten. Dies stellt jedoch mehrere Herausforderungen und Anforderungen: 1. *Technologische Komplexität*: Die Entwicklung eines realistischen und interaktiven 3D-Avatars erfordert den Einsatz fortschrittlicher Technologien wie generative Intelligenz (GenAI) und Sprachsteuerung. Diese Technologien müssen nahtlos integriert werden, um ein reibungsloses und überzeugendes Benutzererlebnis zu gewährleisten. 2. *Datenmanagement und -sicherheit*: Um personalisierte Beratung bieten zu können, muss der Avatar auf eine große Menge an Nutzerdaten zugreifen und diese analysieren. Dies erfordert ein robustes Datenmanagementsystem sowie Maßnahmen zum Schutz der Privatsphäre und Sicherheit der Nutzerdaten. 3. *Benutzerfreundlichkeit*: Der Avatar muss intuitiv und einfach zu bedienen sein, um von Anwendern akzeptiert zu werden. Eine komplexe oder fehleranfällige Bedienung könnte Nutzer abschrecken und das Vertrauen in die Technologie untergraben. 4. *Skalierbarkeit*: Die Lösung muss skalierbar sein, um in verschiedenen Softwareanwendungen und Branchen eingesetzt werden zu können. Dies erfordert flexible und anpassbare Systeme, die auf unterschiedliche Anwendungsfälle und Nutzeranforderungen reagieren können. 5. *Akzeptanz und Vertrauen*: Nutzer müssen Vertrauen in die Technologie und den Avatar als Assistenten entwickeln. Dies erfordert eine hohe Genauigkeit der bereitgestellten Informationen sowie eine positive und konsistente Nutzererfahrung. = Zielsetzung und Vorgehensweise Ziel ist die Entwicklung und Umsetzung eines minimal funktionsfähigen Prototyps (Durchstich) eines Chat-Avatars, der in der Software VaudisX integriert werden soll. Die Arbeit umfasst die Erstellung einer Softwarelösung zur Unterstützung von Schulung und Datenanfragen (DAQ) mit begleitender Dokumentation. Der Avatar soll, mithilfe der gefütterten Dokumentation, innerhalb der Software Hilfestellung geben, die in Textform, durch Videos oder andere unterstützende Elemente erfolgen kann. *Technische Basis*: - Entwicklung mit Unreal Engine und ConvAI - Nutzung eines bereits vorhandenen Avatars als Grundlage, der modifiziert wird - Einbindung eines Dealer Management Systems (DMS) Testsystems wie VaudisX *Hilfsmittel*: - Zugriff auf ConvAI und das DMS Testsystem - Nutzung eines Laptops für Entwicklungstests *Entwicklung*: - Erstellung einer Kopie eines bestehenden Projekts und Anpassung für den neuen Zweck - Integration einer 3D-Umgebung mit dem Avatar im Vordergrund und unterstützenden Elementen (z.B. Tafel zur Erklärung oder Videowiedergabe) - Anzeige von Softwareerklärungen auf dem Bildschirm *Anwendungsfälle*: - Unterstützung von Mitarbeitern bei VW-Händlern - Benutzer von VaudisX im Autohaus Diese Arbeit leistet einen wichtigen Beitrag zur aktuellen Forschung im Bereich der digitalen Unterstützungssysteme und zeigt auf, wie durch den Einsatz modernster Technologien innovative und effiziente Benutzererfahrungen geschaffen werden können. = Stand der Dinge Die Entwicklung von 3D-Avataren und intelligenten Assistenten hat in den letzten Jahren erhebliche Fortschritte gemacht. Unternehmen wie Google, Amazon und Microsoft investieren stark in die Forschung und Entwicklung von Conversational AI und GenAI, um personalisierte und interaktive Benutzererlebnisse zu schaffen. 5 Tage nach der Veröffentlichung von ChatGPT, einem Chatbot, der auf dem Large Language Model GPT-3 basiert, wurde er bereits von über 1 Million Nutzern verwendet und hat gezeigt, wie groß das Interesse an solchen Technologien ist. Nach nur 2 Monaten hatte ChatGPT bereits über 100 Millionen monatliche Nutzer und wurde in verschiedenen Branchen eingesetzt, darunter E-Commerce, Kundenservice und Bildung. Das macht ChatGPT zur schnellsten wachsenden Anwendung und zeigt das enorme Potenzial von KI für die Zukunft @KI-Wachstum. Zum Vergleich, Instagram brauchte für die gleiche Anzahl an Nutzern zwei Jahre und sechs Monate und TikTok brauchte für diese Anzahl an Nutzern neun Monate @KI-TikTok. Aktuell stehen Entwickler noch am Anfang der Entwicklung von KI-Systemen. Studien zeigen, dass potenziell erhebliche Auswirkungen auf Unternehmensprozesse genommen werden und damit letzlich auf die Kernelemente der Geschäftsmodelle. Dabei werden Tätigkeiten im Verwaltungsbetrieb oder im Büro stärker betroffen sein als Tätigkeiten im Bereich der Produktion @GenAI-Standard. Schätzungsweise sind 300 Millionen Arbeitsplätze von KI betroffen @GenAI-Standard. Das Aufkommen von KI-Systemen wird, wie auch in der Vergangenheit, das Internet und die Mobiltechnologie, die Wirtschaft dazu zwingen, sich anzupassen um wettbewerbsfähig und relevant zu bleiben @GenAI-Standard2. KI zwingt Unternehmen dazu die Geschäftmodelle anzupassen und neu zu überdenken, um sich in einem schnell wandelnden Umfeld zu behaupten @GenAI-Standard2. Da kommen 3D-Avatare ins Spiel. Sie sind ein wichtiger Bestandteil der digitalen Transformation und bieten Unternehmen die Möglichkeit, mit ihren Kunden auf eine neue Art und Weise zu interagieren. Avatare können als virtuelle Assistenten, Verkaufsberater oder Kundenservice-Agenten eingesetzt werden und personalisierte Unterstützung bieten. Durch den Einsatz von KI-Technologien wie Conversational AI und GenAI können Avatare natürliche und menschenähnliche Interaktionen ermöglichen und so die Benutzererfahrung verbessern. Auf dem Markt existieren verschieden Lösungen, meistens in Form eines Chatbots, welcher Fragen zu einem Problem beantwortet. Diese Chatbots basieren auf Maschinelles Lernen und können über ihre eigenen Trainingsdaten keine weitere Hilfe leisten. Die Entwicklung eines 3D-Avatars, der als interaktiver Berater für eine Software fungiert, ist jedoch neu und bietet Potenzial für innovative Anwendungen, da dieser auf GenAI basiert und somit in der Lage ist, personalisierte und kontextbezogene Unterstützung zu bieten.
https://github.com/ouuan/cv
https://raw.githubusercontent.com/ouuan/cv/master/cv.typ
typst
Apache License 2.0
#import "template.typ": * #show: cv.with( name: "游宇凡", email: "<EMAIL>", birthday: "2003.1.26", github-id: "ouuan", ) #education(( ( school: [#box(image("./logos/tsinghua.svg"), height: 1.1em, baseline: 0.19em) 清华大学 致理书院], major: "信息与计算科学", degree: "本科", date: "2021.9 — 2025.6(预期)", ), )) #research(( ( name: "ZIP 文件格式解析歧义安全问题", supervisor: "陈建军", date: "2023.10 — 至今", points: ( "通过黑盒模糊测试发现了不同 ZIP 文件解析器(解压软件)之间的大量解析歧义问题", "发现了绕过杀毒软件、邮件安全网关检测,造成 Office 文档显示差异绕过审核机制、查重检测,伪造 Office 文档签名,伪造 JAR 文件签名,冒用编辑器扩展 ID 等多个不同场景下的安全漏洞", "上报漏洞后已得到 Gmail、Coremail、Go、LibreOffice、Spring Boot 等厂商的确认修复、漏洞赏金和 CVE 编号", "研究成果预计将以一作身份发表论文", ), ), ( name: "基于代码属性图的PHP程序污点型漏洞挖掘", supervisor: "陈建军", date: "2023.7 — 2023.9", points: ( "使用基于代码属性图的PHP代码静态分析方法,对一些开源项目进行漏洞挖掘", "对现有漏洞挖掘工具进行了优化和修复,提升了工具的易用性、效率和准确性", "在8个开源项目中发现了SSRF、XSS、Path Traversal、SQLi、DoS、CSRF等类型的多个漏洞", "对漏洞进行了上报,申请到4个CVE,并获得了漏洞赏金", ), ), )) #let courses = ( "计算机网络原理", "A+", "现代密码学", "A+", "操作系统", "A+", "计算机系统概论", "A+", "程序设计训练", "A+", "软件工程", "A+", "数字逻辑设计", "A+", "数字逻辑实验", "A+", "网络空间安全导论", "A", "计算机网络安全技术", "A", "离散数学(2)", "A", "离散数学(1)", "A-", "数据结构", "A-", "形式语言与自动机", "A-", "数值分析", "A-", "计算机组成原理", "B+", ) #section-header( title: "学业成绩", icon: "./icons/graduation-cap.svg", ) - 计算机专业课 GPA 3.96 / 4.0,其中 8 门课程获得 A+。参与了大量课程项目,在实践中巩固了安全、网络、系统、数字逻辑、软件工程等各领域的专业知识,并培养了出色的学习运用新知识、解决实际问题、参与团队协作的能力。 - 修读了数学分析、高等线性代数、抽象代数、概率论、常微分方程、拓扑学等为数学专业开设的高难度数学课。 - 计算机专业课成绩如下: #grid( columns: (auto, 1fr, auto, 1fr, auto, 1fr, auto, 1fr), column-gutter: 1em, row-gutter: 0.8em, ..courses, ) #course-projects(( ( name: "勒索软件的分析与破解", course: "专业实践", role: "360公司优秀实习生", date: "2024.7", tech: "逆向分析 (IDA) / 密码学 / GPT (pytorch)", points: ( [对 Conti 和 DoNex 勒索软件样本的行为进行了逆向分析], [针对 DoNex 中加密时存在的重用密钥流漏洞,编写了解密工具,参考论文 _A natural language approach to automated cryptanalysis of two-time pads_ 的思路,将其中使用的 n-gram language model 替换为 GPT,基于文件的未加密部分训练小型 GPT 模型,然后基于模型输出运行 Viterbi 算法推测密钥,能够以较高的效率正确破解整个密钥流], ), ), ( name: "路由器安全漏洞验证", course: "网络空间安全导论", role: "单人(漏洞验证)", date: "2024.5 — 2024.6", tech: "Python (Scapy)", points: ( [搭建测试环境,编写发包脚本,对两款路由器分别进行实验,验证了它们受 _Man-in-the-middle attacks without rogue ap: When wpas meet icmp redirects_ 和 _Exploiting Sequence Number Leakage: TCP Hijacking in NAT-Enabled Wi-Fi Networks_ 两篇论文所提出的漏洞影响,能被用于实施流量劫持、拒绝服务等攻击], [上报了漏洞,申请到 4 个 CVE], ), ), ( name: "操作系统内存管理组件的形式化验证", course: "操作系统", role: "单人项目", date: "2024.4 — 2024.6", tech: "Verus / Rust, OS", points: ( "学习了解现有的 OS 形式化验证相关工作,尤其是学习了 Verus 的使用", "使用 Verus 工具和 Rust 语言为 ArceOS 编写了经验证的内存分配器组件,并将他人编写的经验证的页表接入 ArceOS,从而构建出了内存相关组件经形式化验证的操作系统", ), ), ( name: "IPv6 硬件路由器", course: "计算机网络原理 & 计算机组成原理", role: "三人合作(队长)", date: "2023.10 — 2024.1", tech: "SystemVerilog / C, Networking / RISC-V CPU", points: ( "在 FPGA 开发板上实现 IPv6 硬件路由器,支持四口 1Gbps 线速转发,并能存下全网路由表(约 20 万条表项)", "使用硬件描述语言实现邻居发现协议、转发逻辑、树状转发表流水线查询,以及 RISC-V 五级流水线 CPU", "软件实现 RIPng 路由协议以及路由表数据结构的维护,通过 DMA、MMIO 等软硬件接口与路由器进行通信", "我的贡献:队长,负责全部的软件部分以及路由器硬件部分的约一半工作量(CPU 硬件实现主要由队友负责)", ), ), ( name: "<NAME>体感游戏", course: "数字逻辑设计", role: "两人合作", date: "2023.4 — 2023.6", tech: "SystemVerilog", points: ( "使用硬件描述语言在 FPGA 开发板上实现 Chrome 小恐龙体感游戏", "使用外置传感器检测玩家动作控制小恐龙,游戏逻辑由硬件执行,画面通过 VGA 显示", "我的贡献:负责传感器模块组装调试,以及传感器和画面显示部分的代码实现", ), ), ( name: "GIF 图片搜索网站", course: "软件工程", role: "五人合作 (队长)", date: "2023.3 — 2023.5", tech: "Nuxt (Vue / TypeScript) / Python (Django / Flask) / Docker", points: ( "具有图片上传管理、搜索查看、AI 处理等功能,以及订阅、点赞、评论、私信等社交功能", "前端使用 Nuxt 框架 (Vue) 以及 Naive UI 组件库", "后端 API server 使用 Django 框架,图片处理使用 Flask 框架", "使用 Docker 部署了前后端服务以及 PostgreSQL、Elasticsearch", "我的贡献:和另一位同学一同负责前端开发,并作为队长主管总体设计,协调团队合作,协助队友修复 bug", ), ), ( name: "Wordle 游戏 & 在线评测系统", course: "程序设计训练", role: "单人项目", date: "2022.8 — 2022.9", tech: "Rust (egui / Actix) / Vue", points: ( "Wordle 游戏:包括命令行 CLI 界面和原生 GUI 界面(使用 egui 框架),以及基于信息熵算法的自动求解器", "在线评测系统:后端使用 Rust Actix 框架,前端使用 Vue,支持提交代码查看评测列表、结果、排行榜,使用 SQLite 持久存储数据,采用非阻塞评测任务队列", ), ), )) #awards(( ( name: "2024年全国大学生计算机系统能力大赛-操作系统设计赛(全国)-OS功能挑战赛道", prize: "二等奖(团体)", date: "2024.8", ), ( name: "2022-2023 学年度致理书院科技创新优秀奖学金", prize: "", date: "2023.12", ), ( name: "清华大学第七届网络安全技术挑战赛 (THUCTF2023)", prize: "特等奖", date: "2023.10", ), ( name: "清华大学第二十六届智能体大赛", prize: "八强", date: "2022.3", ), ( name: "第37届全国青少年信息学奥林匹克竞赛 (NOI2020)", prize: "银牌", date: "2020.8", ), )) // TODO: all numbers here need to be updated continuously #open-source( [在 GitHub 上维护了若干项目,并参与贡献了大量项目,历史总计 PR 674 个,issue 445 个,并在科研、学习内外上报了若干安全漏洞,总计获得 15 个 CVE。部分项目如下所示:], ( ( name: "Codle", repo: "ouuan/codle", star: "49", role: "个人项目", date: "2022.3 起", tech: "Vue / TypeScript", desc: "仿照 Wordle 的设计,基于抽象语法树的代码内容猜测游戏", ), ( name: "CP Editor", repo: "cpeditor/cpeditor", star: "1.8k", role: "主要维护者", date: "2019.12 起", tech: "C++ / Qt", desc: "为算法竞赛设计的代码编辑器,核心功能包括从网站获取测例、编译代码、运行检查测例、提交代码等", ), ( name: "OI Wiki", repo: "OI-wiki/OI-wiki", star: "20.8k", role: "核心贡献者", date: "2019.3 起", tech: "", desc: "算法竞赛知识点教程、百科", ), ), ) #skills(( ( name: "编程语言", items: ( "Rust", "C++", "C", "TypeScript", "Vue", "Python", "Shell", "SystemVerilog", "x86", "RISC-V", "MATLAB", "……", ), ), ( name: "工具", items: ( "Linux(日常桌面主力使用,并有维护个人服务器)", "Git", "Docker", "Neovim", "LaTeX", "Typst", "……", ), ), ( name: "语言", items: ( "全国大学英语六级考试 (CET6) 567 分", "阅读过大量英文原版计算机教材和论文", ), ), ))
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/touying/0.1.0/themes/metropolis.typ
typst
Apache License 2.0
// This theme is inspired by https://github.com/matze/mtheme // The origin code was written by https://github.com/Enivex // Consider using: // #set text(font: "Fira Sans", weight: "light", size: 20pt) // #show math.equation: set text(font: "Fira Math") // #set strong(delta: 100) // #set par(justify: true) #import "../utils/utils.typ" #import "../utils/states.typ" #let _saved-align = align #let slide( self: utils.empty-object, title: auto, align: horizon, margin: (top: 3em, bottom: 1em, left: 0em, right: 0em), padding: 2em, ..args ) = { self.page-args = self.page-args + ( margin: margin, fill: self.m-colors.extra-light-gray, ) if title != auto { self.m-title = title } let touying-slide = self.methods.touying-slide touying-slide( ..args.named(), self: self, setting: body => { show: _saved-align.with(align) show: pad.with(padding) set text(fill: self.m-colors.dark-teal) show: args.named().at("setting", default: body => body) body }, ..args.pos(), ) } #let title-slide( self: utils.empty-object, title: [], subtitle: none, author: none, date: none, extra: none, hide-header: true, hide-footer: true, ) = { if hide-header { self.page-args.header = none } if hide-footer { self.page-args.footer = none } let content = { set text(fill: self.m-colors.dark-teal) set align(horizon) block(width: 100%, inset: 2em, { text(size: 1.3em, strong(title)) if subtitle != none { linebreak() text(size: 0.9em, subtitle) } line(length: 100%, stroke: .05em + self.m-colors.light-brown) set text(size: .8em) if author != none { block(spacing: 1em, author) } if date != none { block(spacing: 1em, date) } set text(size: .8em) if extra != none { block(spacing: 1em, extra) } }) } let touying-slide = self.methods.touying-slide touying-slide(self: self, repeat: none, content) } #let new-section-slide(self: utils.empty-object, hide-header: true, hide-footer: true, name) = { if hide-header { self.page-args.header = none } if hide-footer { self.page-args.footer = none } let content = { states.new-section(name) set align(horizon) show: pad.with(20%) set text(size: 1.5em) name block(height: 2pt, width: 100%, spacing: 0pt, self.m-progress-bar) } let touying-slide = self.methods.touying-slide touying-slide(self: self, repeat: none, content) } #let focus-slide(self: utils.empty-object, hide-header: true, hide-footer: true, body) = { if hide-header { self.page-args.header = none } if hide-footer { self.page-args.footer = none } self.page-args = self.page-args + ( fill: self.m-colors.dark-teal, margin: 2em, ) set text(fill: self.m-colors.extra-light-gray, size: 1.5em) let touying-slide = self.methods.touying-slide touying-slide(self: self, repeat: none, align(horizon + center, body)) } #let register(aspect-ratio: "16-9", header: states.current-section, footer: [], self) = { // save the variables for later use self.m-cell = block.with( width: 100%, height: 100%, above: 0pt, below: 0pt, breakable: false, ) self.m-colors = ( dark-teal: rgb("#23373b"), light-brown: rgb("#eb811b"), lighter-brown: rgb("#d6c6b7"), extra-light-gray: rgb("#fafafa"), ) self.m-progress-bar = states.touying-progress(ratio => { grid( columns: (ratio * 100%, 1fr), (self.m-cell)(fill: self.m-colors.light-brown), (self.m-cell)(fill: self.m-colors.lighter-brown) ) }) self.m-footer = footer self.m-title = header // set page let header(self) = { set align(top) if self.m-title != none { show: self.m-cell.with(fill: self.m-colors.dark-teal, inset: 1em) set align(horizon) set text(fill: self.m-colors.extra-light-gray, size: 1.2em) utils.fit-to-width(grow: false, 100%, strong(utils.call-or-display(self, self.m-title))) } else { [] } } let footer(self) = { set text(size: 0.8em) show: pad.with(.5em) set align(bottom) text(fill: self.m-colors.dark-teal.lighten(40%), utils.call-or-display(self, self.m-footer)) h(1fr) text(fill: self.m-colors.dark-teal, states.slide-counter.display() + " / " + states.last-slide-number) } self.page-args = self.page-args + ( paper: "presentation-" + aspect-ratio, fill: self.m-colors.extra-light-gray, header: header, footer: footer, margin: 0em, ) // register methods self.methods.slide = slide self.methods.title-slide = title-slide self.methods.new-section-slide = new-section-slide self.methods.focus-slide = focus-slide self.methods.touying-outline = (self: utils.empty-object, enum-args: (:), ..args) => { states.touying-outline(enum-args: (tight: false,) + enum-args, ..args) } self.methods.alert = (self: utils.empty-object, it) => text(fill: self.m-colors.light-brown, it) self }
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/cetz/0.3.0/src/mark.typ
typst
Apache License 2.0
#import "drawable.typ" #import "vector.typ" #import "matrix.typ" #import "util.typ" #import "path-util.typ" #import "styles.typ" #import "mark-shapes.typ": get-mark #import "process.typ" #import util: typst-length /// Checks if a mark should be drawn according to the current style. /// - style (style): The current style. /// -> bool #let check-mark(style) = style != none and (style.start, style.end, style.symbol).any(v => v != none) /// Processes the mark styling. /// TODO: remember what is actually going on here. /// /// - ctx (context): The context object. /// - style (style): The current style. /// - root (str): Where the mark is being placed, normally either `"start"` or `"end"`. Allows different styling for marks in different directions. /// - path-length (float): The length of the path. This is used for relative offsets. #let process-style(ctx, style, root, path-length) = { let base-style = ( symbol: auto, fill: auto, stroke: auto, slant: auto, harpoon: auto, flip: auto, reverse: auto, inset: auto, width: auto, scale: auto, length: auto, sep: auto, pos: auto, offset: auto, flex: auto, xy-up: auto, z-up: auto, shorten-to: auto, position-samples: auto, anchor: auto, ) if type(style.at(root)) != array { style.at(root) = (style.at(root),) } if type(style.symbol) != array { style.symbol = (style.symbol,) } let out = () for i in range(calc.max(style.at(root).len(), style.symbol.len())) { let style = style style.symbol = style.symbol.at(i, default: auto) style.at(root) = style.at(root).at(i, default: auto) if type(style.symbol) == dictionary { style = styles.resolve(style, merge: style.symbol) } if type(style.at(root)) == str { style.symbol = style.at(root) } else if type(style.at(root)) == dictionary { style = styles.resolve(style, root: root, base: base-style) } style.stroke = util.resolve-stroke(style.stroke) style.stroke.thickness = util.resolve-number(ctx, style.stroke.thickness) if "angle" in style and type(style.angle) == angle { style.width = calc.tan(style.angle / 2) * style.length * 2 } // Stroke thickness relative attributes for (k, v) in style { if k in ("length", "width", "inset", "sep") { style.insert(k, if type(v) == ratio { style.stroke.thickness * v / 100% } else { util.resolve-number(ctx, v) } * style.scale) } } // Path length relative attributes for k in ("offset", "pos",) { let v = style.at(k) if v != none and v != auto { style.insert(k, if type(v) == ratio { v * path-length / 100% } else { util.resolve-number(ctx, v) }) } } out.push(style) } return out } #let transform-mark(style, mark, pos, dir, flip: false, reverse: false, slant: none, harpoon: false) = { let up = style.xy-up if dir.at(2) != 0 { up = style.z-up } assert(style.anchor in ("tip", "base", "center")) let tip = mark.tip let base = mark.base let origin = mark.at(style.anchor) mark.offset = vector.dist(origin, tip) let t = ( // Translate & rotate to the target coordinate & direction matrix.transform-translate(..pos), matrix.transform-rotate-dir(dir, up), matrix.transform-rotate-z(-90deg), // Apply mark transformations if reverse { matrix.transform-translate(-mark.length, 0, 0) }, if slant not in (none, 0%) { if type(slant) == ratio { slant /= 100% } matrix.transform-shear-x(slant) }, if flip or reverse { matrix.transform-scale({ if flip { (y: -1) } if reverse { (x: -1) } }) }, /* Rotate mark to have base->tip on the x-axis */ matrix.transform-rotate-z(vector.angle2(base, tip)), /* Translate mark to have its anchor (tip, base, center) at (0,0) */ matrix.transform-translate(..vector.scale(origin, -1)), ) mark.drawables = drawable.apply-transform( matrix.mul-mat(..t.filter(m => m != none)), mark.drawables ) return mark } #let _eval-mark-shape-and-anchors(ctx, mark, style) = { if "eval-mark-guard" in ctx { panic("Recursive mark drawing is not allowed") } ctx.eval-mark-guard = true ctx.groups = () ctx.nodes = (:) ctx.transform = matrix.ident() import "/src/draw.typ" let body = draw.group({ draw.set-style( stroke: style.at("stroke", default: none), fill: style.at("fill", default: none), mark: none, line: (mark: none), bezier: (mark: none), arc: (mark: none), ) mark }, name: "mark") let (ctx: ctx, bounds: bounds, drawables: drawables) = process.many(ctx, body) let anchor-fn = ctx.nodes.at("mark").anchors // Check if the mark has named anchor let has-anchor(name) = { return name in (anchor-fn)(()) } // Fetch special mark anchors let get-anchor(name, default: none) = { if default != none { if not has-anchor(name) { return default } } return (anchor-fn)(name) } let tip = get-anchor("tip", default: (0, 0, 0)) let base = get-anchor("base", default: tip) let center = get-anchor("center", default: vector.lerp(tip, base, .5)) return ( tip: tip, base: base, center: center, length: vector.dist(tip, base), drawables: drawables, ) } /// Places a mark on the given path. Returns a {{dictionary}} with the following keys: /// - drawables (drawable): The mark drawables. /// - distance (float): The length to shorten the path by. /// - pos (float): The position of the mark, can be used to snap the end of the path to after shortening. /// /// --- /// /// - ctx (context): The canvas context object. /// - styles (style): A processed mark styling. /// - segments (drawable): The path to place the mark on. /// - is-end (bool): TODO /// -> dictionary #let place-mark-on-path(ctx, styles, segments, is-end: false) = { if type(styles) != array { styles = (styles,) } let distance = 0 let shorten-distance = 0 let shorten-pos = none let drawables = () for (i, style) in styles.enumerate() { let is-last = i + 1 == styles.len() if style.symbol == none { continue } // Override position, if set if style.pos != none { distance = style.pos } // Apply mark offset distance += style.offset let (mark-fn, defaults) = get-mark(ctx, style.symbol) let merge-flag(style, key, default: false) = { let old = style.at(key) let def = defaults.at(key, default: default) style.insert(key, (old or def) and not (old and def)) return style } style = merge-flag(style, "reverse") style = merge-flag(style, "flip") style = merge-flag(style, "harpoon") let mark = _eval-mark-shape-and-anchors(ctx, mark-fn(style), style) let pos = if style.flex { path-util.point-on-path( segments, if distance != 0 { distance * if is-end { -1 } else { 1 } } else { if is-end { 100% } else { 0% } }, extrapolate: true) } else { let (_, dir) = path-util.direction( segments, if is-end { 100% } else { 0% }, clamp: true) let pt = if is-end { path-util.segment-end(segments.last()) } else { path-util.segment-start(segments.first()) } vector.sub(pt, vector.scale(vector.norm(dir), distance * if is-end { 1 } else { -1 })) } assert.ne(pos, none, message: "Could not determine mark position") let dir = if style.flex { let a = pos let b = path-util.point-on-path( segments, (mark.length + distance) * if is-end { -1 } else { 1 }, samples: style.position-samples, extrapolate: true) if b != none and a != b { vector.sub(b, a) } else { let (_, dir) = path-util.direction( segments, distance, clamp: true) vector.scale(dir, if is-end { -1 } else { 1 }) } } else { let (_, dir) = path-util.direction( segments, if is-end { 100% } else { 0% }, clamp: true) if dir != none { vector.scale(dir, if is-end { -1 } else { 1 }) } } assert.ne(pos, none, message: "Could not determine mark direction") mark = transform-mark( style, mark, pos, dir, reverse: style.reverse, slant: style.slant, flip: style.flip, harpoon: style.harpoon, ) // Shorten path to this mark let inset = mark.at("inset", default: 0) if style.shorten-to != none and (style.shorten-to == auto or i <= style.shorten-to) { let offset = mark.offset inset += offset shorten-distance = distance + mark.length - inset shorten-pos = vector.add(pos, vector.scale(vector.norm(dir), mark.length - inset)) } drawables += mark.drawables distance += mark.length // Add separator distance += style.sep } return ( drawables: drawables, distance: shorten-distance, pos: shorten-pos ) } /// Places marks along a path. Returns them as an {{array}} of {{drawable}}. /// /// - ctx (context): The context object. /// - style (style): The current mark styling. /// - transform (matrix): The current transformation matrix. /// - path (drawable): The path to place the marks on. /// - add-path (bool): When `true` the shortened path will returned as the first {{drawable}} in the {{array}} /// -> array #let place-marks-along-path(ctx, style, transform, path, add-path: true) = { let distance = (0, 0) let snap-to = (none, none) let drawables = () let (path, is-transformed) = if not style.transform-shape and transform != none { (drawable.apply-transform(transform, path).first(), true) } else { (path, false) } let segments = path.segments if style.start != none or style.symbol != none { let (drawables: start-drawables, distance: start-distance, pos: pt) = place-mark-on-path( ctx, process-style(ctx, style, "start", path-util.length(segments)), segments ) drawables += start-drawables distance.first() = start-distance snap-to.first() = pt } if style.end != none or style.symbol != none { let (drawables: end-drawables, distance: end-distance, pos: pt) = place-mark-on-path( ctx, process-style(ctx, style, "end", path-util.length(segments)), segments, is-end: true ) drawables += end-drawables distance.last() = end-distance snap-to.last() = pt } if distance != (0, 0) { segments = path-util.shorten-path( segments, ..distance, mode: if style.flex { "CURVED" } else { "LINEAR" }, samples: style.position-samples, snap-to: snap-to) } if add-path { path.segments = segments drawables.insert(0, path) } // If not transformed pre mark placement, // transform everything after mark placement. if not is-transformed { drawables = drawable.apply-transform(transform, drawables) } return drawables }
https://github.com/typst/packages
https://raw.githubusercontent.com/typst/packages/main/packages/preview/grayness/0.2.0/manual.typ
typst
Apache License 2.0
#import "@preview/tidy:0.3.0" = Grayness This package provides basic image editing functions. All of them work with Raster Data (e.g. "normal" Images like PNG or JPEG). The `grayscale-image` function also works with Vector Data (SVG). The Following functions are available: #let docs = tidy.parse-module(read("lib.typ")) #tidy.show-module(docs, style: tidy.styles.default)
https://github.com/sebmestrallet/typst-simple-siam
https://raw.githubusercontent.com/sebmestrallet/typst-simple-siam/main/thumbnails/README.md
markdown
MIT No Attribution
# How to generate the thumbnails 1. Compile `../src/main.typ` with Typst 1. Split the PDF with [PDFsam](https://github.com/torakiki/pdfsam) 1. For each page, open it with [Xournal++](https://github.com/xournalpp/xournalpp) and export as PNG @ 96dpi
https://github.com/frectonz/the-pg-book
https://raw.githubusercontent.com/frectonz/the-pg-book/main/book/002.%20progbot.html.typ
typst
#set page( paper: "a5", margin: (x: 1.8cm, y: 1.5cm), ) #set text( font: "Liberation Serif", size: 10pt, hyphenate: false ) #set par(justify: true) #v(10pt) = Programming Bottom-Up #v(10pt) _1993_ _(This essay is from the introduction to On Lisp.)_ It's a long-standing principle of programming style that the functional elements of a program should not be too large. If some component of a program grows beyond the stage where it's readily comprehensible, it becomes a mass of complexity which conceals errors as easily as a big city conceals fugitives. Such software will be hard to read, hard to test, and hard to debug. In accordance with this principle, a large program must be divided into pieces, and the larger the program, the more it must be divided. *How do you divide a program?* The traditional approach is called _top-down design_: you say "the purpose of the program is to do these seven things, so I divide it into seven major subroutines. The first subroutine has to do these four things, so it in turn will have four of its own subroutines," and so on. This process continues until the whole program has the right level of granularity -- each part large enough to do something substantial, but small enough to be understood as a single unit. Experienced Lisp programmers divide up their programs differently. As well as top-down design, they follow a principle which could be called _bottom-up design_ -- changing the language to suit the problem. In Lisp, you don't just write your program down toward the language, you also build the language up toward your program. As you're writing a program you may think "I wish Lisp had such-and-such an operator." So you go and write it. Afterward you realize that using the new operator would simplify the design of another part of the program, and so on. Language and program evolve together. Like the border between two warring states, the boundary between language and program is drawn and redrawn, until eventually it comes to rest along the mountains and rivers, the natural frontiers of your problem. In the end your program will look as if the language had been designed for it. And when language and program fit one another well, you end up with code which is clear, small, and efficient. It's worth emphasizing that bottom-up design doesn't mean just writing the same program in a different order. When you work bottom-up, you usually end up with a different program. Instead of a single, monolithic program, you will get a larger language with more abstract operators, and a smaller program written in it. Instead of a lintel, you'll get an arch. In typical code, once you abstract out the parts which are merely bookkeeping, what's left is much shorter; the higher you build up the language, the less distance you will have to travel from the top down to it. This brings several advantages: + By making the language do more of the work, bottom-up design yields programs which are *smaller and more agile*. A shorter program doesn't have to be divided into so many components, and fewer components means programs which are easier to read or modify. Fewer components also means fewer connections between components, and thus less chance for errors there.As industrial designers strive to reduce the number of moving parts in a machine, experienced Lisp programmers use bottom-up design to reduce the size and complexity of their programs. + Bottom-up design promotes *code re-use*. When you write two or more programs, many of the utilities you wrote for the first program will also be useful in the succeeding ones. Once you've acquired a large substrate of utilities, writing a new program can take only a fraction of the effort it would require if you had to start with raw Lisp. + Bottom-up design makes programs *easier to read*. An instance of this type of abstraction asks the reader to understand a general-purpose operator; an instance of functional abstraction asks the reader to understand a special-purpose subroutine. #footnote["But no one can read the program without understanding all your new utilities." To see why such statements are usually mistaken, see Section 4.8.] + Because it causes you always to be on the lookout for patterns in your code, working bottom-up helps to *clarify your ideas* about the design of your program. If two distant components of a program are similar in form, you'll be led to notice the similarity and perhaps to redesign the program in a simpler way. Bottom-up design is possible to a certain degree in languages other than Lisp. Whenever you see library functions, bottom-up design is happening. However, Lisp gives you much broader powers in this department, and augmenting the language plays a proportionately larger role in Lisp style -- so much so that Lisp is not just a different language, but a whole different way of programming. It's true that this style of development is better suited to programs which can be written by small groups. However, at the same time, it extends the limits of what can be done by a small group. In _The Mythical Man-Month_, <NAME> proposed that the productivity of a group of programmers does not grow linearly with its size. As the size of the group increases, the productivity of individual programmers goes down. The experience of Lisp programming suggests a more cheerful way to phrase this law: _as the size of the group decreases, the productivity of individual programmers goes up_. A small group wins, relatively speaking, simply because it's smaller. When a small group also takes advantage of the techniques that Lisp makes possible, it can win outright.
https://github.com/rikhuijzer/phd-thesis
https://raw.githubusercontent.com/rikhuijzer/phd-thesis/main/chapters/4.typ
typst
The Unlicense
#import "../style.typ": citefig #import "../functions.typ": chapter, textcite, parencite, note #chapter( [Predicting Special Forces Dropout via Explainable Machine Learning], label: [ <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>, <NAME>. (2024). Predicting Special Forces Dropout via Explainable Machine Learning. _European Journal of Sport Science_. #link("https://doi.org/10.1002/ejsc.12162") ], abstract: [ Selecting the right individuals for a sports team, organization, or military unit has a large influence on the achievements of the organization. However, the approaches commonly used for selection are either not reporting predictive performance or not explainable (i.e., black box models). In the present study, we introduce a novel approach to selection research, using various machine learning models. We examined 274 special forces recruits, of whom 196 dropped out, who performed a set of physical and psychological tests. On this data, we compared four machine learning models on their predictive performance, explainability, and stability. We found that a stable rule-based (SIRUS) model was most suitable for classifying dropouts from the special forces selection program. With an averaged area under the curve score of 0.70, this model had good predictive performance, while remaining explainable and stable. Furthermore, we found that both physical and psychological variables were related to dropout. More specifically, a higher score on the 2800 meters time, connectedness, and skin folds were most strongly associated with dropping out. We discuss how researchers and practitioners can benefit from these insights in sport and performance contexts. ] ) == Introduction The achievements of sports clubs, organizations, and military units are largely determined by the performance of the individuals in the organization. As a consequence, there is an ever increasing pressure to select the right individuals, that is, individuals who will perform successfully in the future (e.g., #parencite(<hartigh2018selection>)). Historically, military selection has been an important breeding ground for research into selection in psychology and sports. For example, widely used instruments such as intelligence tests @terman1918use, personality inventories @ellis1948validity, and leadership measures @fleishman1953leadership were first established and validated in military contexts. In the present study, we aimed to advance the field of selection further by applying machine learning models for the selection of elite soldiers. In doing so, we set out to investigate the predictive performance, explainability, and stability of statistical models based on relevant physical and psychological predictors. Here, predictive performance means the estimated ability of the model to predict future behaviors, explainability means how easy it is to understand the model and why certain predictions were made, and stability means the ability of the model to produce similar conclusions for small changes to the data @yu2013stability. === Selection in High-Stakes Military Contexts Within the military, the special forces are considered elite. Special forces operators need to be able to perform their tasks under difficult circumstances, such as continuous threat, extreme temperatures, isolation, and high task complexity, while being involved in politically sensitive situations @picano2002development. Similar to elite sports, this requires extraordinary physical and mental capabilities @vaara2022physical. Special forces selection courses worldwide simulate these circumstances in, what some countries call, _hell weeks_. During these selection weeks, recruits typically complete exercises and tasks for a large part of the day while being sleep deprived. Several studies have been conducted in the past decades to predict success versus dropout in such selection programs of the special forces. For example, a study among 800 candidates found that both physical and psychological measures, such as grit and pull-ups, significantly correlated with graduation @farina2019physical. The relevance of physical and psychological factors were also found in other high-stakes military contexts. For instance, studies on 12,924 military pilots, 115 reconnaissance marines, and 57 counter terrorism intervention unit recruits found that various physical and psychological measures were associated with graduation @king2013standard @saxon2020continuous @tedeholm2021personality. Furthermore, a large-scale study on 1,138 United States (U.S.) special forces candidates found that psychological hardiness significantly correlated with graduation @bartone2008psychological. Taken together, a multidisciplinary approach including both physical and psychological measures, is likely to perform best on the complex task of predicting dropout @williams2000talent. An important note about previous research is that many studies report only model explanations, that is, the studies fit a statistical model to the data and report the fitted parameters. Interestingly, this approach is also common practice in the field of sport science. However, the outcomes produced by such models may have little ability to predict future behaviors, because of overfitting @jauhiainen2022predicting @yarkoni2017choosing @hofman2021integrating. Also, many studies only report the results from one statistical model, such as a simple regression or the _t_-test, which largely ignores the statistical (and computational) progress made since then. Applying more recent analytic techniques, such as model evaluation via cross-validation, could therefore improve research into the selection procedures (e.g., #parencite(<abt2022raising>)). === Statistical Models from Machine Learning Recent analytic advances can be found in the domain of machine learning, which can generally be described as computer systems that learn and adapt without following specific instructions. One example is computer vision, which contains models that can learn from visual data to automatically detect and classify sport-specific movements. In general, the field invented and re-discovered a plethora of statistical models, many of which are promising because the models are distribution-free and are able to find complex relationships in data. The distribution-free property is relevant for selection because psychometric variables are usually normally distributed while performance variables in elite performers often are not (e.g., #parencite(<hartigh2018development>); #parencite(<oboyle2012best>)). Furthermore, finding complex relationships could provide new insights into the underlying processes when sufficient data is available. As an example, #textcite(<jauhiainen2022predicting>) used a complex data set, containing 3-dimensional motion and physical data, to predict injuries in 791 female elite handball and soccer players. More generally, the commonly applied random forest algorithms have been very performant in different settings; especially when the number of variables is large or larger than the number of observations @biau2016random. However, machine learning is no panacea. A disadvantage of many machine learning applications in sports and the selection of military personnel is that the models are too complex to understand. Often, the complex models are then converted to a simplified form to make them interpretable, for example by using SHAP (SHapley Additive exPlanations; for details see #parencite(<molnar2022interpretable>)). Although the purpose of SHAP is to increase transparency and explainability of machine learning models, it loses information during the conversion from the complex model to the simplified representation. In other words, the simplified representation is not the same as the model that will be used for decision making. This is problematic for researchers and practitioners because the simplification could hide issues related to safety, fairness (e.g., biases), and reliability @doshi2017towards @barredo2020explainable. This is especially important in the context of selection, where wrong decisions can have a lasting impact on the individual. Apart from predictive performance and explainability, the stability of models is also an important aspect. A stable model is defined as a model which leads to similar conclusions for small changes to data @yu2013stability. An example of an unstable model could be a model which selects personality and sprint times to predict dropout in this year's cohort, but selects other variables for next year's cohort. In the context of selection, this variation in the prediction model is problematic. Unstable models cause various operational problems such as being deemed less trustworthy and requiring constant changes to the selection procedure @yu2013stability. === Current Study The purpose of the current study was to determine how well we could predict dropout of special forces recruits while retaining model explainability and stability. We used a regularized linear model as a baseline. This model is close to the linear models that are typically used for decision making in sport and psychology research. Next, we used three machine learning models, namely a decision tree, a state-of-the-art random forest, and a state-of-the-art explainable rule-based model. We specifically investigated how the four models compared on their predictive performance, explainability, and stability. We compared the models on their predictive performance via average area under the curve (AUC), on their explainability by comparing model interpretation techniques (e.g., linear model coefficients versus SHAP), and stability by comparing the differences between the algorithms used. == Materials and Methods === Participants We recruited 311 participants aged between 20 and 39 (_M_#sub[age] = 26.5, _SD_#sub[age] = 3.8), who were exclusively Dutch males and all part of the selection of the Special Forces of the Royal Netherlands Army. Active consent was obtained from all participants and the procedure was approved by the ethical review board of the faculty (code: PSY-1920-S-0512). Data preprocessing, which included the removal of participants for which some data was missing, resulted in a dataset of 274 participants. Of these participants, 196 dropped out and 78 graduated. More information could not be provided due to security reasons. === Design & Procedure Participation occurred via a platform specifically built for the research project (#link("https://yourspecialforces.nl")). The data collection was organized by researchers of the university at the training camp, and was facilitated by the staff of the Special Forces unit. Physical assessments occurred on the first day of the first week. Also in the first week of the training, participants completed the psychological assessments using tablets in a large room which was set up like a traditional classroom. Once participants entered the room for the psychological assessment, they were informed about the consent procedure, study goal, and that participation would not affect their graduation chances. For three to four days, the participants spent roughly one hour per day on filling out the questionnaires, which were all in Dutch. === Measures The study contained both physical and psychological measures. The physical fitness of the recruits was measured using a test battery designed to assess relevant physiological and physical characteristics that are considered to be important in military training courses (e.g., #parencite(<haff2015essentials>)). All tests were taken in a predetermined order. First, body composition was determined by measuring length, weight, and the 4-Site Skinfold @durnin1974body. Then a standardized warming up was conducted after which the recruits started in the test-circuit. Lower body power was measured with a broad jump, the best of three attempts was noted in centimeters. Next, speed and agility were tested using the Pro Agility test conducted twice with 30 seconds rest in between and both sprint times were summed. The agility test was followed by maximal grip strength of both hands with one attempt per hand using a Grip dynamometer. After this test, maximal strength of the lower body push and pull, and upper body push-kinetic chain was measured with a 3 repetition max (RM) protocol using the hex-bar deadlift and bench press exercise. Strength endurance of the upper body pull-chain was measured with pull-ups: recruits had one minute to complete as many pull-ups as possible. The penultimate test was designed to determine the anaerobic capacity of the recruits using a 60 meter sprint. It measured the time it took to sprint from one place to a place 5 meters away and back (10 meters), then 10 meters away and back (20 meters), and finally to a place 15 meters away and back (30 meters). Also here, the test was conducted twice with 30 seconds in between. After the 60 meter sprint, the recruits had exactly 10 minutes to recover and prepare for the aerobic endurance test, a timed 2800 meter run. The recruits were instructed to complete 8 rounds on a 350 meter concrete track as fast as possible. Regarding the psychological measures, the first day included the informed consent and a resilience questionnaire. The resilience questionnaire assessed the ability to recover or bounce back from stress via the Brief Resilience Scale @smith2008brief. For example, one of the six items was "I tend to bounce back quickly after hard times". Next, goal commitment was measured via six items such as "I am strongly committed to pursuing my goals" (see #parencite(<yperen2009some>)). The next questionnaire measured self-efficacy @bandura2006guide with 14 items such as "How confident are you in your ability to remain calm in difficult situations?". The second day consisted of two cognitive ability tests @condon2014international. The first test contained 11 matrix reasoning items and the second test contained 24 three-dimensional rotation items. The participants were allowed to take 15 and 30 minutes respectively to finish both tests. On the third day, three questionnaires were answered. The first questionnaire was a combination of five short questionnaires, namely Mindsets @dweck2000self, Basic Motives @yperen2014blended, Motivation Type @pelletier2013validation, and Approach-Avoidance Temperament @elliot2010approach. The second measured mental toughness via the MTQ48 @clough2002mental. This questionnaire contains four key components, namely Control, Commitment, Challenge, and Confidence. The third questionnaire measured Coping @lazarus1984stress. This questionnaire measured emotion-focused versus problem-focused coping in response to stressful events. For example, "I try to forget the whole thing by focusing on other things" which is an example of an emotion-focused strategy. After this, the participants filled in the Dutch version of the NEO-PI-3 personality questionnaire, which measures the big five dimensions: Neuroticism, Extraversion, Openness, Agreeableness, and Conscientiousness @mccrae2005neo. == Analyses In order to find the best performing model, we compared four different models via MLJ.jl @blaom2020mlj. We calculated the models' scores on the Area Under the receiver operating characteristics Curve (AUC). The AUC is a metric that indicates how well a model predicts a binary outcome, dropout versus graduation in our case. The AUC takes into account that the threshold of the model can be chosen freely. An AUC score of 1 means that the model can perfectly predict all outcomes and a score of 0 means that the model predicts everything wrong. An AUC score of 0.5 means random guessing and AUC scores of 0.7 to 0.85 and higher are generally considered to be good to excellent in social sciences (e.g., #parencite(<menaspa2010aerobic>)). We compared all models on their predictive performance via 12-fold cross-validation with AUC as the metric. The first model was the baseline: a regularized linear model. Here, regularization was necessary because this study gathered relatively many variables compared to the number of observations. Without regularization, the model is likely to overfit in such situations. As regularization for the linear model, we choose Elastic Net which is a combination of Lasso and Ridge regression (e.g., #parencite(<zou2005regularization>)) and fitted the model via MLJLinearModels.jl @blaom2020mlj. The strength of both regularizers was chosen automatically via hyperparameter tuning and 12 fold cross-validation. The second model was a decision tree, fitted via DecisionTree.jl @sadeghi2022decisiontree, and the third was a state-of-the-art boosted random forest called XGBoost @chen2016xgboost. The fourth model was a state-of-the-art Stable and Interpretable Rule Sets (SIRUS) algorithm @benard2021interpretable @huijzer2023sirus. The SIRUS model is essentially also a random forest algorithm, but with a small modification such that it is more stable and, therefore, explainable. Note that contrary to more continuous models such as linear models, the rules fitted by SIRUS contain hard cutpoints (e.g., _if some variable < 20, then A else B_). Of these models, the XGBoost is the least explainable while the other three models are all explainable. That is, the XGBoost cannot easily be interpreted due the complexity of the model. For the decision tree model, despite being explainable, it has the drawback of having a low stability since the split point at the root of the tree tends to vary wildly (for details about this phenomenon, see #parencite(<molnar2022interpretable>)). The stability of the logistic regression is moderate since the model is highly sensitive to the choice of regularization parameters when using ridge, lasso, or both @hastie2009elements. The stability of the XGBoost is high due to the large number of trees in the model which averages out fluctuations. Finally, the stability of SIRUS is generally high too since the algorithm was designed such that the structure of the random trees is more stable @benard2021interpretable. For more details about the analyses, see the code repository at osf.io#footnote[#link("https://osf.io/c8hdy/")]. == Results The summary statistics of the variables and correlations for all variables with graduation are respectively shown in Table A1 and Figure A1 and A2 of #textcite(<huijzer2023predicting>). The average AUC score and standard errors are shown in #citefig(<fig:auc>). To interpret these ROC curves, note that the diagonal line represents random guessing. Next, to create the lines, a model was fitted on one of the cross-validation folds for each fold and used to predict data that the model had not seen during training. Then, note that a classification model can use different thresholds, the lower the threshold, the more likely an individual is classified as graduate. Finally, for each fold, the line is drawn by increasing the model threshold from 0 to 1 and comparing the model predictions to the true values. The AUC score is the averaged area under these curves. The XGBoost model had the highest predictive performance, which was followed by the SIRUS model with a tree depth of 1 and at most 30 rules. Note that SIRUS with a tree depth of 2 would allow for more complex rules with two elements in the clause (e.g., _if X and Y, then A else B_) instead of only only clause (e.g., _if X, then A else B_). However, fitting a SIRUS model with a tree depth of 2 performed consistently worse, which indicated that the model overfitted the data. The logistic regression and the decision tree had slightly lower predictive performance. #figure( [ #image("../images/auc.svg", width: 100%) #note([ The thick lines represent estimates of the average ROC curves over all folds. The smaller lines in gray display the variation on this estimate by showing the the first 8 folds in the 12-fold cross-validation. We show only 8 folds because more folds made the plot very cluttered. The average Area Under the Curve (AUC) and 1.96 $*$ standard error scores are shown in the bottom right. ]) ], caption: "Receiver Operating Characteristic (ROC) Curves" ) <fig:auc> Altogether, while the XGBoost had a good predictive performance, the SIRUS model combined good predictive performance with strong stability and explainability (see Analysis section). We therefore decided to analyse the data further via this model. To do so, we have visualized the stability for different bootstrapped samples in Figure #citefig(<fig:sirus>). Here, by bootstrapped samples, we mean that we took multiple random samples, via MLJ.jl @blaom2020mlj, of the data and fitted the model on each of these samples. The bootstrapping allowed us to visualize the uncertainty in the model which, in turn, aids model explanations. To inspect the model, we go through one example feature in Figure #citefig(<fig:sirus>). The figure shows that the 2800 meters time had the most importance when summing the feature importances over the various bootstrapped samples. Next, we know that the rules in the SIRUS algorithm with a depth of 1 by default always point to "lower then", for example _if 2800 meters time < 650_, then _then-score_ else _else-score_ @huijzer2023sirus. If the _then-score_ is greater than the _else-score_, then the model predicts that the individual who satisfies the rule is more likely to graduate. If the _then-score_ is smaller than _else-score_, then the model predicts that the individual who satisfies the rule is more likely to drop out. The plotted rule directions show the direction of this _then-score_ and _else-score_ via $log("else-scores" / "then-scores")$. Thus, from the plotted rule directions, we can see that the model found that a higher 2800 meters time was associated with drop out. The exact locations of the split points (e.g., _if 2800 meters time < 650_) are shown in the right part of the plot and were different in the different bootstrapped samples. Most of the split points were at 650 seconds, and some where at 700 seconds. We plotted these split points on top of histograms of the data to show the distribution of the data. #figure( [ #image("../images/SIRUS-uncertainty.svg", width: 82%) #note([ This figure indicates the model uncertainty over different bootstrapped samples. The leftmost column show the feature importance, the middle column shows the directions of the rules, and the rightmost column shows the split points of the rules and a histogram of the data. Specifically, the direction shows $log("else-scores" / "then-scores")$. The sizes of the dots indicate the weight that the rule has, so a bigger dot means that a rule plays a larger role in the final outcome. These dots are sized in such a way that a doubling in weight means a doubling in surface size.} Finally, the variables are ordered by the sum of the weights of the rules and only the first 15 are shown. ]) ], caption: "Rules used by the Rule-Based Classifier in Different Folds" ) <fig:sirus> When looking at all the predictions, the running time on the 2800 meters was the most important with a clear cut-off point for all folds at about 700 seconds. This means that, for all the folds, a higher running time was found to be associated with dropping out. Furthermore, a higher score on, in particular, connectedness and skin folds were associated with dropping out. == Discussion The purpose of the current study was to determine how well we could predict dropout of special forces recruits while retaining model explainability and stability. To do so, we compared a linear, decision tree, XGBoost, and SIRUS classifier. Of the four models, the XGBoost had the best predictive performance. This is in line with earlier research that found that XGBoost is a powerful algorithm in a wide array of tasks ranging from predicting Tweet engagements @anelli2020recsys to predicting injuries in competitive runners @lovdal2021injury. However, XGBoost is less explainable than SIRUS. The difference between the two is that the SIRUS algorithm simplifies the model and then uses this model for both explanations and predictions. In contrast, model explainability methods typically use a simplified representation for explanations and the complex model for predictions. This difference between explanations and predictions could hide issues related to safety, fairness (e.g., biases), and reliability which is especially problematic in the context of selection, where wrong decisions can have a lasting impact on the individual. Next, the logistic regression, which is most familiar to sport and performance scientists, was explainable, but not very stable and performed slightly poorer than the SIRUS model. The general instability of the logistic model is an issue that has been described by @hastie2009elements. Furthermore, the decision tree is explainable but not stable (see #parencite(<molnar2022interpretable>)). Together, the algorithm that displayed the best combination on all aspects was the SIRUS algorithm by achieving a good predictive performance and stability, while remaining explainable. The SIRUS algorithm appeared to be able to correctly deselect about 10% to 20% of dropouts, that is, without sending recruits home who would have graduated, depending on the fold (see the top right of the SIRUS ROC in Figure #citefig(<fig:auc>)). There is still a considerable amount of variance in the ROC curves, but at least 10% would already be a meaningful number in practice. Moreover, the accuracy of the prediction will most likely improve when fitting the model on the full dataset instead of cross-validation folds and when gathering more data over time. Since the SIRUS model performs relatively well, and is explainable and stable, we can use our domain knowledge to estimate the generalizability of the model. With this in mind, the main takeaways from the current model are that candidates who take more than roughly 700 seconds on the 2800 meters, score higher on connectedness, and have higher skin folds are more likely to drop out (see Figure #citefig(<fig:sirus>)). The SIRUS algorithm appeared to be able to correctly deselect about 15% to 45% of dropouts, that is, without sending recruits home who would have graduated, depending on the fold (see the top right of the SIRUS ROC in Figure #citefig(<fig:auc>)). There is still a considerable amount of variance in the ROC curves, but at least 15% would already be a meaningful number in practice. Moreover, the accuracy of the prediction will most likely improve when fitting the model on the full dataset instead of cross-validation folds and when gathering more data over time. Since the SIRUS model performs relatively well, and is explainable and stable, we can use our domain knowledge to estimate the generalizability of the model. With this in mind, the main takeaways from the current model are that candidates who take more than roughly 700 seconds on the 2800 meters, score higher on connectedness, and have higher skin folds are more likely to drop out (see Figure #citefig(<fig:sirus>)). Most of these variables are in accordance with earlier studies. For instance, a lower time for the 3-mile run also predicted graduation in 800 U.S. special forces recruits @farina2019physical. Furthermore, a lower fat percentage, as measured by the skin folds, was associated with physical fitness in 140 Finnish recruits @mattila2007physical. Together, this adds theoretical confidence that the predictive model will generalize to new cohorts. === Limitations and Future Research Although the psychological measurements were well-organized and based on validated questionnaires, a limitation could be that participants faked their responses (e.g., #parencite(<galic2012applicants>)). To mitigate this in our study, we emphasized that data would be processed anonymously and that staff of the Special Forces unit could not access the data nor use it to make selection decisions, which has been shown to reduce the faking tendency @kuncel2007toward. Nevertheless, to make the transfer to real selection, the risk of faking should be accounted for. For future research, it would be interesting to investigate how selection decisions can be made on the data while new data keeps being added. === Conclusions and Practical Implications In our attempt to predict dropout of special forces recruits by fitting machine learning models, SIRUS had a higher predictive performance than the linear classifier and decision tree, while being more explainable than the state-of-the-art XGBoost classifier. In other words, SIRUS achieves a balance between predictive performance, explainability, and stability. This together with its ease-of-use make it particularly suitable for many research problems in science, including selection in sports, and organizational and military contexts. This better understanding of the model may outperform the accuracy of black-box models in the long run, because it allows researchers to improve the model with their domain expertise and improve their domain expertise with the model. In turn, practitioners may use this to make data-driven selection decisions. To conclude, we would encourage scientists to use SIRUS, or similar stable rule-based models. This is especially useful when working in fields, such as sports and military selection, where the number of variables often approaches the number of observations and where predictive performance, explainability, and stability are critical.
https://github.com/adelhult/typst-hs-test-packages
https://raw.githubusercontent.com/adelhult/typst-hs-test-packages/main/test/counter-examples/colon_in_label_as_arg.typ
typst
MIT License
#let x = selector(<foo:bar>)
https://github.com/daniel-eder/typst-template-jku
https://raw.githubusercontent.com/daniel-eder/typst-template-jku/main/src/template/pages/abstract.typ
typst
// SPDX-FileCopyrightText: 2023 <NAME> // // SPDX-License-Identifier: Apache-2.0 #import "../styles/pre_content_heading.typ": pre_content_heading #let abstract( abstract, ) = { set page(numbering: "i", number-align: center) pre_content_heading([Abstract]) abstract }
https://github.com/Goldan32/brilliant-cv
https://raw.githubusercontent.com/Goldan32/brilliant-cv/main/metadata.typ
typst
Apache License 2.0
// NOTICE: Copy this file to your root folder. /* Personal Information */ #let firstName = "Dániel" #let lastName = "Arany" #let personalInfo = ( github: "Goldan32", phone: "+36 30 290 1219", email: "<EMAIL>", linkedin: "danielarany", //custom-1: (icon: "", text: "example", link: "https://example.com"), //gitlab: "mintyfrankie", //homepage: "jd.me.org", //orcid: "0000-0000-0000-0000", //researchgate: "John-Doe", //extraInfo: "", ) /* Language-specific */ // Add your own languages while the keys must match the varLanguage variable #let headerQuoteInternational = ( "": [Freshly graduated embedded software engineer with 3 years of part-time experience is looking for a full-time opportunity.], "en": [Freshly graduated embedded software engineer with 3 years of part-time experience is looking for a full-time opportunity.], "hu": [Frissen végzett beágyazott szoftvermérnök, 3 év félmunkaidős tapasztalattal keres teljes állású lehetőséget.] ) #let cvFooterInternational = ( "": "Curriculum Vitae", "en": "Curriculum Vitae", "hu": "Önéletrajz" ) #let letterFooterInternational = ( "": "Cover Letter", "en": "Cover Letter", "hu": "Motivációs Levél" ) #let nonLatinOverwriteInfo = ( "customFont": "Heiti SC", "firstName": "王道尔", "lastName": "", // submit an issue if you think other variables should be in this array ) /* Layout Setting */ #let awesomeColor = "skyblue" // Optional: skyblue, red, nephritis, concrete, darknight #let profilePhoto = "../src/avatar_cropped.png" // Leave blank if profil photo is not needed #let varLanguage = "hu" // INFO: value must matches folder suffix; i.e "zh" -> "./modules_zh" #let varEntrySocietyFirst = false // Decide if you want to put your company in bold or your position in bold #let varDisplayLogo = true // Decide if you want to display organisation logo or not
https://github.com/Myriad-Dreamin/tinymist
https://raw.githubusercontent.com/Myriad-Dreamin/tinymist/main/crates/tinymist-query/src/fixtures/goto_definition/import_new_name.typ
typst
Apache License 2.0
// path: base.typ ----- #import "base.typ" as x #(/* position after */ x);