Unnamed: 0
int64 0
0
| repo_id
stringlengths 5
186
| file_path
stringlengths 15
223
| content
stringlengths 1
32.8M
⌀ |
---|---|---|---|
0 | repos | repos/zelda/LICENSE-APACHE.md | Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability. |
0 | repos | repos/zelda/README.md | <img height="32" src="https://upload.wikimedia.org/wikipedia/commons/8/86/Triforce.svg"></img>
zelda [](https://shields.io/) [](https://shields.io/)
====
A short and sweet package for native [Zig](https://ziglang.org) HTTP(s) requests.
Zelda uses [hzzp](https://github.com/truemedian/hzzp) and [zig-libressl](https://github.com/haze/zig-libressl) to provide a simple interface for HTTP 1.1 interactions. There is a lot that goes into retrieving data from a remote server, but sometimes you don't want to spend hours mulling over the details, especially for projects where the transport is only a portion of the story of the larger program.
### Capabilities
- [x] HTTP/1.1
- [x] TLS 1.1, TLS 1.2, TLS 1.3
- [x] Simple One-Shot interface for raw bytes & JSON encoded data
### Linking
```zig
const zelda = @import("path/to/zelda/build.zig");
pub fn build(b: *std.build.Builder) !void {
const exe = ...
try zelda.link(b, exe, target, mode, use_system_libressl);
}
```
### Example
```zig
/// Extracted from `examples/whats_my_ip/src/main.zig`
const std = @import("std");
const zelda = @import("zelda");
const IPResponse = struct {
ip: []const u8,
};
pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.c_allocator);
defer arena.deinit();
try printIPFromRaw(arena.allocator());
try printIPFromJson(arena.allocator());
}
pub fn printIPFromJson(allocator: std.mem.Allocator) !void {
const response = try zelda.getAndParseResponse(IPResponse, .{ .allocator = allocator }, allocator, "https://api64.ipify.org/?format=json");
defer std.json.parseFree(IPResponse, response, .{ .allocator = allocator });
var stdout = std.io.getStdOut().writer();
try stdout.print("My ip is {s}\n", .{response.ip});
}
pub fn printIPFromRaw(allocator: std.mem.Allocator) !void {
var response = try zelda.get(allocator, "http://api64.ipify.org/");
defer response.deinit();
var stdout = std.io.getStdOut().writer();
if (response.body) |body|
try stdout.print("My ip is {s}\n", .{response.body})
else
try stdout.writeAll("Failed to receive body from ipify\n");
}
```
Of course, if this library is missing anything, feel free to open a Pull Request or issue 😊
<sup>
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
</sup>
<br/>
<sub>
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this package by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.
</sub>
|
0 | repos | repos/zelda/build.zig | const std = @import("std");
pub const zig_libressl = @import("zig-libressl/build.zig");
const Pkg = std.build.Pkg;
fn relativeToThis(comptime path: []const u8) []const u8 {
comptime {
return std.fs.path.dirname(@src().file).? ++ std.fs.path.sep_str ++ path;
}
}
pub const pkgs = struct {
pub const hzzp = Pkg{
.name = "hzzp",
.source = std.build.FileSource{ .path = relativeToThis("hzzp/src/main.zig") },
};
pub const zuri = Pkg{
.name = "zuri",
.source = std.build.FileSource{ .path = relativeToThis("zuri/src/zuri.zig") },
};
pub const libressl = Pkg{
.name = "zig-libressl",
.source = std.build.FileSource{ .path = relativeToThis("zig-libressl/src/main.zig") },
};
pub const zelda = Pkg{
.name = "zelda",
.source = .{ .path = relativeToThis("src/main.zig") },
.dependencies = &[_]Pkg{
hzzp, zuri, libressl,
},
};
};
pub fn link(
b: *std.build.Builder,
exe: *std.build.LibExeObjStep,
target: std.zig.CrossTarget,
mode: std.builtin.Mode,
use_system_libressl: bool,
) !void {
exe.addPackage(pkgs.zelda);
try zig_libressl.useLibreSslForStep(b, target, mode, relativeToThis("zig-libressl/libressl"), exe, use_system_libressl);
}
pub fn build(b: *std.build.Builder) !void {
const use_system_libressl = b.option(bool, "use-system-libressl", "Link and build from the system installed copy of LibreSSL instead of building it from source") orelse false;
const mode = b.standardReleaseOptions();
const target = b.standardTargetOptions(.{});
const test_step = b.step("test", "Run library tests.");
const maybe_test_filter = b.option([]const u8, "test-filter", "Test filter");
const sanitize_thread = b.option(bool, "sanitize-thread", "Enable ThreadSanitizer") orelse false;
const create_test_step = b.addTest("src/tests.zig");
create_test_step.linkLibC();
create_test_step.sanitize_thread = sanitize_thread;
create_test_step.setTarget(target);
create_test_step.setBuildMode(mode);
try link(b, create_test_step, target, .ReleaseFast, use_system_libressl);
if (maybe_test_filter) |test_filter| {
create_test_step.setFilter(test_filter);
}
test_step.dependOn(&create_test_step.step);
}
|
0 | repos/zelda | repos/zelda/src/main.zig | pub const zuri = @import("zuri");
pub const hzzp = @import("hzzp");
pub const libressl = @import("zig-libressl");
pub const zelda_default_user_agent = "zelda/0.0.1";
pub const logger = @import("std").log.scoped(.zelda);
pub const request = @import("request.zig");
pub const client = @import("client.zig");
pub const HttpClient = client.Client;
pub usingnamespace @import("oneshot.zig");
pub fn cleanup() void {
client.global_connection_cache.deinit();
}
|
0 | repos/zelda | repos/zelda/src/tests.zig | const std = @import("std");
const zelda = @import("zelda");
// NOTE: this test will fail if ziglang.org is down!
test "fetch status code of ziglang.org" {
defer zelda.cleanup();
var response = try zelda.get(std.testing.allocator, "https://ziglang.org");
defer response.deinit();
try std.testing.expectEqual(@as(u10, 200), @enumToInt(response.status_code));
}
const HTTPBinResponse = struct {
data: []const u8,
};
test "post some data and get it back" {
defer zelda.cleanup();
const data = "bruh moment";
var response = try zelda.postAndParseResponse(HTTPBinResponse, .{
.allocator = std.testing.allocator,
.ignore_unknown_fields = true,
}, std.testing.allocator, "https://httpbin.org/post", .{ .kind = .Raw, .bytes = data });
defer std.json.parseFree(HTTPBinResponse, response, .{
.allocator = std.testing.allocator,
.ignore_unknown_fields = true,
});
try std.testing.expectEqualStrings(data, response.data);
}
const TestDataStruct = struct {
number_of_bruhs: usize,
bruh_status: []const u8,
maximum_bruh_enabled: bool,
};
test "post some json data and get it back" {
defer zelda.cleanup();
var source = TestDataStruct{
.number_of_bruhs = 69,
.bruh_status = "engaged",
.maximum_bruh_enabled = true,
};
var http_bin_response = try zelda.postJsonAndParseResponse(HTTPBinResponse, "https://httpbin.org/post", source, .{
.allocator = std.testing.allocator,
.parse_options = .{ .ignore_unknown_fields = true },
});
defer std.json.parseFree(HTTPBinResponse, http_bin_response, .{
.allocator = std.testing.allocator,
.ignore_unknown_fields = true,
});
var token_stream = std.json.TokenStream.init(http_bin_response.data);
var obj = try std.json.parse(TestDataStruct, &token_stream, .{
.allocator = std.testing.allocator,
.ignore_unknown_fields = true,
});
defer std.json.parseFree(TestDataStruct, obj, .{
.allocator = std.testing.allocator,
.ignore_unknown_fields = true,
});
try std.testing.expectEqual(source.number_of_bruhs, obj.number_of_bruhs);
try std.testing.expectEqual(source.maximum_bruh_enabled, obj.maximum_bruh_enabled);
try std.testing.expectEqualStrings(source.bruh_status, obj.bruh_status);
}
|
0 | repos/zelda | repos/zelda/src/oneshot.zig | const std = @import("std");
const req = @import("request.zig");
const Client = @import("client.zig").Client;
const BasicPerformFunctionPrototype = fn (*std.mem.Allocator, []const u8) anyerror!req.Response;
pub fn get(allocator: std.mem.Allocator, url: []const u8) !req.Response {
var client = try Client.init(allocator, .{});
defer client.deinit();
var request = req.Request{
.method = .GET,
.url = url,
.use_global_connection_pool = true,
};
return try client.perform(request);
}
pub fn post(allocator: std.mem.Allocator, url: []const u8, body: ?req.Body) !req.Response {
var client = try Client.init(allocator, .{});
defer client.deinit();
var request = req.Request{
.method = .POST,
.url = url,
.body = body,
.use_global_connection_pool = true,
};
return try client.perform(request);
}
/// Caller is responsible for freeing the returned type
pub fn postAndParseResponse(
comptime Type: type,
parse_options: std.json.ParseOptions,
allocator: std.mem.Allocator,
url: []const u8,
body: ?req.Body,
) !Type {
var response = try post(allocator, url, body);
defer response.deinit(); // we can throw the response away because parse will copy into the structure
const response_bytes = response.body orelse return error.MissingResponseBody;
var token_stream = std.json.TokenStream.init(response_bytes);
return std.json.parse(Type, &token_stream, parse_options);
}
pub fn postJson(allocator: std.mem.Allocator, url: []const u8, json_value: anytype, stringify_options: std.json.StringifyOptions) !req.Response {
var buffer = std.ArrayList(u8).init(allocator);
defer buffer.deinit();
var writer = buffer.writer();
try std.json.stringify(json_value, stringify_options, writer);
return post(allocator, url, req.Body{ .kind = .JSON, .bytes = buffer.items });
}
/// Caller is responsible for caling std.json.parseFree (with the same parseOptions) on the returned value
const PostAndParseOptions = struct {
allocator: std.mem.Allocator,
parse_options: std.json.ParseOptions = .{},
stringify_options: std.json.StringifyOptions = .{},
};
fn parseOptionsWithAllocator(allocator: std.mem.Allocator, options: std.json.ParseOptions) std.json.ParseOptions {
var newOpts = options;
newOpts.allocator = allocator;
return newOpts;
}
pub fn postJsonAndParseResponse(comptime OutputType: type, url: []const u8, json_value: anytype, options: PostAndParseOptions) !OutputType {
var response = try postJson(options.allocator, url, json_value, options.stringify_options);
defer response.deinit();
const response_bytes = response.body orelse return error.MissingResponseBody;
var token_stream = std.json.TokenStream.init(response_bytes);
return std.json.parse(OutputType, &token_stream, parseOptionsWithAllocator(options.allocator, options.parse_options));
}
/// Caller is responsible for freeing the returned type
pub fn getAndParseResponse(
comptime Type: type,
parse_options: std.json.ParseOptions,
allocator: std.mem.Allocator,
url: []const u8,
) !Type {
var response = try get(allocator, url);
defer response.deinit(); // we can throw the response away because parse will copy into the structure
const response_body = response.body orelse return error.MissingResponseBody;
var json = std.json.TokenStream.init(response_body);
return std.json.parse(Type, &json, parse_options);
}
|
0 | repos/zelda | repos/zelda/src/request.zig | const std = @import("std");
const root = @import("main.zig");
const hzzp = root.hzzp;
const libressl = root.libressl;
pub const Method = enum { GET, POST, PUT, DELETE, HEAD, OPTIONS, CONNECT, PATCH, TRACE };
// TODO(haze): MultipartForm,
pub const BodyKind = enum { JSON, Raw, URLEncodedForm };
const StringList = std.ArrayList([]const u8);
pub const HeaderValue = struct {
parts: StringList,
/// Owner is responsible for the returned memory
///
/// Transforms
/// Cache-Control: no-cache
/// Cache-Control: no-store
/// Into
/// Cache-Control: no-cache, no-store
pub fn value(self: HeaderValue, allocator: std.mem.Allocator) std.mem.Allocator.Error![]u8 {
// first, find out how much we need to allocate
// 2 = ", "
var bytesNeeded = 2 * (self.parts.items.len - 1);
for (self.parts.items) |part|
bytesNeeded += part.len;
var buffer = try allocator.alloc(u8, bytesNeeded);
var fixed_buffer_stream = std.io.fixedBufferStream(buffer);
var writer = fixed_buffer_stream.writer();
for (self.parts.items) |part, idx| {
writer.writeAll(part) catch unreachable;
if (idx != self.parts.items.len - 1)
writer.writeAll(", ") catch unreachable;
}
return buffer;
}
pub fn init(allocator: std.mem.Allocator) HeaderValue {
return .{
.parts = StringList.init(allocator),
};
}
pub fn deinit(self: *HeaderValue) void {
self.parts.deinit();
self.* = undefined;
}
};
pub const HeaderMap = std.StringArrayHashMap(HeaderValue);
pub const Body = struct {
kind: BodyKind,
bytes: []const u8,
};
pub const Request = struct {
const Self = @This();
pub const Error = error{MissingScheme};
method: Method,
url: []const u8,
headers: ?HeaderMap = null,
body: ?Body = null,
use_global_connection_pool: bool,
tls_configuration: ?libressl.TlsConfiguration = null,
};
pub const Response = struct {
const Self = @This();
headers: HeaderMap,
body: ?[]u8 = null,
allocator: std.mem.Allocator,
status_code: hzzp.StatusCode,
pub fn init(allocator: std.mem.Allocator, status_code: hzzp.StatusCode) Self {
return .{
.headers = HeaderMap.init(allocator),
.allocator = allocator,
.status_code = status_code,
};
}
pub fn deinit(self: *Self) void {
var headerIter = self.headers.iterator();
while (headerIter.next()) |kv| {
self.allocator.free(kv.key_ptr.*);
for (kv.value_ptr.parts.items) |item| {
self.allocator.free(item);
}
kv.value_ptr.parts.deinit();
}
self.headers.deinit();
if (self.body) |body|
self.allocator.free(body);
self.* = undefined;
}
};
|
0 | repos/zelda | repos/zelda/src/client.zig | const std = @import("std");
const builtin = @import("builtin");
const root = @import("main.zig");
const req = @import("request.zig");
const Request = req.Request;
const Response = req.Response;
const libressl = root.libressl;
const zuri = root.zuri;
const hzzp = root.hzzp;
fn initWindows() void {
if (@import("builtin").os.tag == .windows) {
_ = std.os.windows.WSAStartup(2, 2) catch {
@panic("Failed to initialize on windows");
};
}
}
var windowsInit = std.once(initWindows);
const CurlConnectionPoolMaxAgeSecs = 118;
const CurlConnectionPoolMaxClients = 5;
// things to check when matching connections
// host & port match
// protocols match
// TODO(haze/for the future): stuff that i saw curl doing (ConnectionExists in lib/url.c)
// ssl upgraded connections?
// authentication?
// Doubly linked list, protected by a parent rwlock to ensure thread safety
const StoredConnection = struct {
const Self = @This();
const Criteria = struct {
allocator: std.mem.Allocator,
host: union(enum) {
provided: []const u8,
allocated: []u8,
},
port: u16,
is_tls: bool,
fn getHost(self: Criteria) []const u8 {
return switch (self.host) {
.allocated => |data| data,
.provided => |data| data,
};
}
pub fn eql(self: Criteria, other: Criteria) bool {
const are_both_tls = self.is_tls == other.is_tls;
const do_ports_match = self.port == other.port;
const do_hosts_match =
std.mem.eql(u8, self.getHost(), other.getHost());
return are_both_tls and do_ports_match and do_hosts_match;
}
pub fn deinit(self: *Criteria) void {
switch (self.host) {
.provided => {},
.allocated => |buf| self.allocator.free(buf),
}
self.* = undefined;
}
};
allocator: std.mem.Allocator,
clientState: union(enum) {
Ssl: libressl.SslStream,
Normal: std.net.Stream,
},
criteria: Criteria,
pub fn deinit(self: *Self) void {
self.criteria.deinit();
self.allocator.destroy(self);
}
};
const ConnectionCache = struct {
const Self = @This();
const Queue = std.TailQueue(*StoredConnection);
const Node = Queue.Node;
items: Queue = Queue{},
fn findSuitableConnection(self: *Self, criteria: StoredConnection.Criteria) ?*Node {
var ptr: ?*Node = self.items.last;
while (ptr) |node| {
root.logger.debug("Checking Connection {*} {}", .{ node, node.data });
if (node.data.criteria.eql(criteria)) return node;
ptr = node.prev;
}
return null;
}
fn removeFromCache(self: *Self, stored_connection_node: *Node) void {
self.items.remove(stored_connection_node);
}
pub fn deinit(self: *Self) void {
while (self.items.pop()) |node| {
var allocator = node.data.allocator;
node.data.deinit();
allocator.destroy(node);
}
}
fn addNewConnection(self: *Self, stored_connection_node: *Node) void {
self.items.append(stored_connection_node);
}
};
pub var global_connection_cache = ConnectionCache{};
pub const Client = struct {
const Self = @This();
const HzzpSslResponseParser = hzzp.parser.response.ResponseParser(libressl.SslStream.Reader);
const HzzpResponseParser = hzzp.parser.response.ResponseParser(std.net.Stream.Reader);
pub const HzzpSslClient = hzzp.base.client.BaseClient(libressl.SslStream.Reader, libressl.SslStream.Writer);
pub const HzzpClient = hzzp.base.client.BaseClient(std.net.Stream.Reader, std.net.Stream.Writer);
pub const State = union(enum) {
Created,
ConnectedSsl: struct {
tunnel: libressl.SslStream,
client: HzzpSslClient,
},
Connected: struct {
tcp_connection: std.net.Stream,
client: HzzpClient,
},
Shutdown,
const NextError = HzzpSslResponseParser.NextError || HzzpResponseParser.NextError;
const PayloadReader = union(enum) {
SslReader: HzzpSslClient.PayloadReader,
Reader: HzzpClient.PayloadReader,
};
pub fn payloadReader(self: *State) PayloadReader {
return switch (self.*) {
.ConnectedSsl => |*state| .{ .SslReader = state.client.reader() },
.Connected => |*state| .{ .Reader = state.client.reader() },
else => unreachable,
};
}
pub fn next(self: *State) NextError!?hzzp.parser.response.Event {
return switch (self.*) {
.ConnectedSsl => |*state| state.client.next(),
.Connected => |*state| state.client.next(),
else => unreachable,
};
}
pub fn writePayload(self: *State, maybe_data: ?[]const u8) !void {
if (maybe_data) |data|
root.logger.debug("Attempting to write {} byte payload", .{data.len})
else
root.logger.debug("Attempting to write null payload", .{});
return switch (self.*) {
.ConnectedSsl => |*state| state.client.writePayload(maybe_data),
.Connected => |*state| state.client.writePayload(maybe_data),
else => unreachable,
};
}
pub fn finishHeaders(self: *State) !void {
root.logger.debug("Attempting to finish headers", .{});
return switch (self.*) {
.ConnectedSsl => |*state| state.client.finishHeaders(),
.Connected => |*state| state.client.finishHeaders(),
else => unreachable,
};
}
pub fn writeHeaderValue(self: *State, name: []const u8, value: []const u8) !void {
root.logger.debug("Attempting to set header: \"{s}\" = \"{s}\"", .{ name, value });
return switch (self.*) {
.ConnectedSsl => |*state| state.client.writeHeaderValue(name, value),
.Connected => |*state| state.client.writeHeaderValue(name, value),
else => unreachable,
};
}
pub fn writeStatusLine(self: *State, method: []const u8, path: []const u8) !void {
root.logger.debug("Attempting to write status line (method={s}, path={s})", .{ method, path });
return switch (self.*) {
.ConnectedSsl => |*state| state.client.writeStatusLine(method, path),
.Connected => |*state| state.client.writeStatusLine(method, path),
else => unreachable,
};
}
};
allocator: std.mem.Allocator,
state: State,
client_read_buffer: []u8,
user_agent: ?[]u8,
pub fn deinit(self: *Self) void {
if (self.user_agent) |user_agent|
self.allocator.free(user_agent);
self.allocator.free(self.client_read_buffer);
self.allocator.destroy(self);
}
/// if a user agent is provided, it will be copied into the client and free'd once deinit is called
pub fn init(
allocator: std.mem.Allocator,
options: struct {
user_agent: ?[]const u8 = null,
},
) !*Self {
var client: *Self = try allocator.create(Self);
errdefer allocator.destroy(client);
client.allocator = allocator;
client.state = .Created;
client.client_read_buffer = try allocator.alloc(u8, 1 << 13);
errdefer allocator.free(client.client_read_buffer);
if (options.user_agent) |user_agent| {
client.user_agent = try allocator.alloc(u8, user_agent.len);
std.mem.copy(u8, client.user_agent.?, user_agent);
} else {
client.user_agent = null;
}
windowsInit.call();
return client;
}
pub fn perform(self: *Self, request: Request) !Response {
var uri = try zuri.Uri.parse(request.url, false);
if (!std.ascii.eqlIgnoreCase(uri.scheme, "http") and !std.ascii.eqlIgnoreCase(uri.scheme, "https"))
return error.InvalidHttpScheme;
const port: u16 = if (uri.port == null) if (std.mem.startsWith(u8, uri.scheme, "https")) @as(u16, 443) else @as(u16, 80) else uri.port.?;
var tunnel_host_buf: [1 << 8]u8 = undefined;
var tunnel_host: []const u8 = undefined;
var is_ssl = port == 443;
var reused_connection: ?*ConnectionCache.Node = null;
switch (uri.host) {
.name => |host| {
if (host.len == 0) return error.MissingHost;
std.mem.copy(u8, &tunnel_host_buf, host);
tunnel_host = tunnel_host_buf[0..host.len];
},
.ip => |addr| {
// if we have an ip, print it as the host for the iguanaTLS client
tunnel_host = try std.fmt.bufPrint(&tunnel_host_buf, "{}", .{addr});
},
}
// we need to set this null byte for tls connections (because before tunnelHost would be a
// slice pointing to the url, and that would include the path)
tunnel_host_buf[tunnel_host.len] = '\x00';
if (request.use_global_connection_pool) {
root.logger.debug("Searching connection cache...", .{});
if (global_connection_cache.findSuitableConnection(StoredConnection.Criteria{
.allocator = self.allocator,
.host = .{ .provided = tunnel_host },
.port = port,
.is_tls = is_ssl,
})) |stored_connection_node| {
reused_connection = stored_connection_node;
self.state = switch (stored_connection_node.data.clientState) {
.Ssl => |*ssl_tunnel| .{
.ConnectedSsl = .{
.tunnel = ssl_tunnel.*,
.client = hzzp.base.client.create(self.client_read_buffer, ssl_tunnel.reader(), ssl_tunnel.writer()),
},
},
.Normal => |tcp_connection| .{ .Connected = .{
.tcp_connection = tcp_connection,
.client = hzzp.base.client.create(self.client_read_buffer, tcp_connection.reader(), tcp_connection.writer()),
} },
};
global_connection_cache.removeFromCache(stored_connection_node);
root.logger.debug("Found a connection to reuse! {}", .{stored_connection_node.data.criteria});
} else {
root.logger.debug("No reusable connection found", .{});
}
}
var created_new_connection = false;
root.logger.debug("req={}", .{request});
if (reused_connection == null) {
var tcp_connection = switch (uri.host) {
.name => |host| blk: {
root.logger.debug("Opening tcp connection to {s}:{}...", .{ host, port });
var address_list = try getAddressList(self.allocator, host, port);
defer self.allocator.free(address_list);
if (address_list.len == 0) return error.UnknownHostName;
break :blk try std.net.tcpConnectToAddress(address_list[0]);
},
.ip => |addr| blk: {
root.logger.debug("Opening tcp connection to {s}:{}...", .{ tunnel_host, port });
break :blk try std.net.tcpConnectToAddress(addr);
},
};
if (is_ssl) {
var tls_configuration = request.tls_configuration orelse try (libressl.TlsConfigurationParams{}).build();
root.logger.debug("Opening TLS tunnel... (host='{s}') {}", .{ tunnel_host, tls_configuration.params });
var tunnel = try libressl.SslStream.wrapClientStream(tls_configuration, tcp_connection, tunnel_host);
root.logger.debug("Tunnel open, creating client now", .{});
var client = hzzp.base.client.create(self.client_read_buffer, tunnel.reader(), tunnel.writer());
created_new_connection = true;
self.state = .{
.ConnectedSsl = .{
.tunnel = tunnel,
.client = client,
},
};
} else {
var client = hzzp.base.client.create(self.client_read_buffer, tcp_connection.reader(), tcp_connection.writer());
created_new_connection = true;
self.state = .{
.Connected = .{
.client = client,
.tcp_connection = tcp_connection,
},
};
}
root.logger.debug("Client created...", .{});
}
var added_connection_to_global_cache = false;
root.logger.debug("path={s} query={s} fragment={s}", .{ uri.path, uri.query, uri.fragment });
var path = if (std.mem.trim(u8, uri.path, " ").len == 0) "/" else uri.path;
if (std.mem.trim(u8, uri.query, " ").len == 0) {
try self.state.writeStatusLine(@tagName(request.method), path);
} else {
var status = try std.fmt.allocPrint(self.allocator, "{s}?{s}", .{ path, uri.query });
try self.state.writeStatusLine(@tagName(request.method), status);
self.allocator.free(status);
}
try self.state.writeHeaderValue("Host", tunnel_host);
try self.state.writeHeaderValue("Connection", "Keep-Alive");
if (self.user_agent) |user_agent|
try self.state.writeHeaderValue("User-Agent", user_agent)
else
try self.state.writeHeaderValue("User-Agent", root.zelda_default_user_agent);
// write headers now that we are connected
if (request.headers) |headerMap| {
var headerMapIter = headerMap.iterator();
while (headerMapIter.next()) |kv| {
var value = try kv.value_ptr.value(self.allocator);
defer self.allocator.free(value);
try self.state.writeHeaderValue(kv.key_ptr.*, value);
}
}
// write body
if (request.body) |body| {
switch (body.kind) {
.JSON => try self.state.writeHeaderValue("Content-Type", "application/json"),
.URLEncodedForm => try self.state.writeHeaderValue("Content-Type", "application/x-www-form-urlencoded"),
else => {},
}
var contentLengthBuffer: [64]u8 = undefined;
const contentLength = try std.fmt.bufPrint(&contentLengthBuffer, "{}", .{body.bytes.len});
try self.state.writeHeaderValue("Content-Length", contentLength);
try self.state.finishHeaders();
try self.state.writePayload(body.bytes);
} else {
try self.state.finishHeaders();
try self.state.writePayload(null);
}
root.logger.debug("Finished sending request...", .{});
var event = try self.state.next();
if (event == null) {
return error.MissingStatus;
} else while (event.? == .skip) : (event = try self.state.next()) {}
if (event == null or event.? != .status) {
return error.MissingStatus;
}
const rawCode = std.math.cast(u10, event.?.status.code) orelse return error.StatusCodeTooLarge;
const responseCode = @intToEnum(hzzp.StatusCode, rawCode);
// read response headers
var response = Response.init(self.allocator, responseCode);
event = try self.state.next();
while (event != null and event.? != .head_done) {
switch (event.?) {
.header => |header| {
const value = try self.allocator.alloc(u8, header.value.len);
std.mem.copy(u8, value, header.value);
if (response.headers.getEntry(header.name)) |entry| {
try entry.value_ptr.parts.append(value);
} else {
var list = req.HeaderValue.init(self.allocator);
try list.parts.append(value);
const name = try self.allocator.alloc(u8, header.name.len);
std.mem.copy(u8, name, header.name);
try response.headers.put(name, list);
}
},
else => return error.ExpectedHeaders,
}
event = try self.state.next();
}
// read response body (if any)
var bodyReader = self.state.payloadReader();
switch (bodyReader) {
.SslReader => |reader| response.body = try reader.readAllAlloc(self.allocator, std.math.maxInt(u64)),
.Reader => |reader| response.body = try reader.readAllAlloc(self.allocator, std.math.maxInt(u64)),
}
// This results in LLVM ir errors
// response.body = switch (bodyReader) {
// .SslReader => |reader| try reader.readAllAlloc(self.allocator, std.math.maxInt(u64)),
// .Reader => |reader| try reader.readAllAlloc(self.allocator, std.math.maxInt(u64)),
// };
if (created_new_connection and request.use_global_connection_pool) {
var stored_connection = try self.allocator.create(StoredConnection);
stored_connection.allocator = self.allocator;
stored_connection.clientState = switch (self.state) {
.ConnectedSsl => |sslState| .{ .Ssl = sslState.tunnel },
.Connected => |normalState| .{ .Normal = normalState.tcp_connection },
else => unreachable,
};
stored_connection.criteria = StoredConnection.Criteria{
.allocator = self.allocator,
.host = .{ .allocated = try self.allocator.dupe(u8, tunnel_host) },
.port = port,
.is_tls = is_ssl,
};
var node = try self.allocator.create(@TypeOf(global_connection_cache).Node);
node.next = null;
node.prev = null;
node.data = stored_connection;
global_connection_cache.addNewConnection(node);
added_connection_to_global_cache = true;
} else if (reused_connection) |stored_connection| {
// we're done with the one we used, we can put it back
global_connection_cache.addNewConnection(stored_connection);
}
return response;
}
};
/// Call `AddressList.deinit` on the result.
pub fn getAddressList(allocator: std.mem.Allocator, name: []const u8, port: u16) ![]std.net.Address {
const os = std.os;
var addrs: []std.net.Address = undefined;
if (builtin.target.os.tag == .windows or builtin.link_libc) {
const name_c = try std.cstr.addNullByte(allocator, name);
defer allocator.free(name_c);
const port_c = try std.fmt.allocPrintZ(allocator, "{}", .{port});
defer allocator.free(port_c);
const sys = if (builtin.target.os.tag == .windows) os.windows.ws2_32 else os.system;
const hints = os.addrinfo{
.flags = sys.AI.NUMERICSERV,
.family = os.AF.UNSPEC,
.socktype = os.SOCK.STREAM,
.protocol = os.IPPROTO.TCP,
.canonname = null,
.addr = null,
.addrlen = 0,
.next = null,
};
var res: *os.addrinfo = undefined;
const rc = sys.getaddrinfo(name_c.ptr, port_c.ptr, &hints, &res);
if (builtin.target.os.tag == .windows) switch (@intToEnum(os.windows.ws2_32.WinsockError, @intCast(u16, rc))) {
@intToEnum(os.windows.ws2_32.WinsockError, 0) => {},
.WSATRY_AGAIN => return error.TemporaryNameServerFailure,
.WSANO_RECOVERY => return error.NameServerFailure,
.WSAEAFNOSUPPORT => return error.AddressFamilyNotSupported,
.WSA_NOT_ENOUGH_MEMORY => return error.OutOfMemory,
.WSAHOST_NOT_FOUND => return error.UnknownHostName,
.WSATYPE_NOT_FOUND => return error.ServiceUnavailable,
.WSAEINVAL => unreachable,
.WSAESOCKTNOSUPPORT => unreachable,
else => |err| return os.windows.unexpectedWSAError(err),
} else switch (rc) {
@intToEnum(sys.EAI, 0) => {},
.ADDRFAMILY => return error.HostLacksNetworkAddresses,
.AGAIN => return error.TemporaryNameServerFailure,
.BADFLAGS => unreachable, // Invalid hints
.FAIL => return error.NameServerFailure,
.FAMILY => return error.AddressFamilyNotSupported,
.MEMORY => return error.OutOfMemory,
.NODATA => return error.HostLacksNetworkAddresses,
.NONAME => return error.UnknownHostName,
.SERVICE => return error.ServiceUnavailable,
.SOCKTYPE => unreachable, // Invalid socket type requested in hints
.SYSTEM => switch (os.errno(-1)) {
else => |e| return os.unexpectedErrno(e),
},
else => unreachable,
}
defer sys.freeaddrinfo(res);
const addr_count = blk: {
var count: usize = 0;
var it: ?*os.addrinfo = res;
while (it) |info| : (it = info.next) {
if (info.addr != null) {
count += 1;
}
}
break :blk count;
};
addrs = try allocator.alloc(std.net.Address, addr_count);
var it: ?*os.addrinfo = res;
var i: usize = 0;
while (it) |info| : (it = info.next) {
const addr = info.addr orelse continue;
addrs[i] = std.net.Address.initPosix(@alignCast(4, addr));
// if (info.canonname) |n| {
// if (result.canon_name == null) {
// result.canon_name = try arena.dupe(u8, mem.sliceTo(n, 0));
// }
// }
i += 1;
}
return addrs;
}
if (builtin.target.os.tag == .linux) {
const flags = std.c.AI.NUMERICSERV;
const family = os.AF.UNSPEC;
var lookup_addrs = std.ArrayList(std.net.LookupAddr).init(allocator);
defer lookup_addrs.deinit();
var canon = std.ArrayList(u8).init(allocator);
defer canon.deinit();
try std.net.linuxLookupName(&lookup_addrs, &canon, name, family, flags, port);
addrs = try allocator.alloc(std.net.Address, lookup_addrs.items.len);
// if (canon.items.len != 0) {
// result.canon_name = canon.toOwnedSlice();
// }
for (lookup_addrs.items) |lookup_addr, i| {
addrs[i] = lookup_addr.addr;
std.debug.assert(addrs[i].getPort() == port);
}
return addrs;
}
@compileError("std.net.getAddressList unimplemented for this OS");
}
|
0 | repos/zelda/examples | repos/zelda/examples/whats_my_ip/build.zig | const std = @import("std");
const zig_libressl = @import("zig-libressl-build.zig");
const Pkg = std.build.Pkg;
pub const pkgs = struct {
pub const hzzp = Pkg{
.name = "hzzp",
.source = std.build.FileSource.relative("../../hzzp/src/main.zig"),
};
pub const zuri = Pkg{
.name = "zuri",
.source = std.build.FileSource.relative("../../zuri/src/zuri.zig"),
};
pub const libressl = Pkg{
.name = "zig-libressl",
.source = std.build.FileSource.relative("../../zig-libressl/src/main.zig"),
};
pub const zelda = Pkg{
.name = "zelda",
.source = .{ .path = "../../src/main.zig" },
.dependencies = &[_]Pkg{
hzzp, zuri, libressl,
},
};
};
pub fn build(b: *std.build.Builder) !void {
const use_system_libressl = b.option(bool, "use-system-libressl", "Link and build from the system installed copy of LibreSSL instead of building it from source") orelse false;
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const mode = b.standardReleaseOptions();
const exe = b.addExecutable("whats_my_ip", "src/main.zig");
exe.linkLibC();
exe.addPackage(pkgs.zelda);
exe.setTarget(target);
exe.setBuildMode(mode);
exe.install();
try zig_libressl.useLibreSslForStep(b, target, mode, "libressl", exe, use_system_libressl);
const run_cmd = exe.run();
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| {
run_cmd.addArgs(args);
}
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
}
|
0 | repos/zelda/examples/whats_my_ip | repos/zelda/examples/whats_my_ip/src/main.zig | const std = @import("std");
const zelda = @import("zelda");
const IPResponse = struct {
ip: []const u8,
};
pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.c_allocator);
defer arena.deinit();
try printIPFromRaw(arena.allocator());
try printIPFromJson(arena.allocator());
}
pub fn printIPFromJson(allocator: std.mem.Allocator) !void {
const response = try zelda.getAndParseResponse(IPResponse, .{ .allocator = allocator }, allocator, "https://api64.ipify.org/?format=json");
defer std.json.parseFree(IPResponse, response, .{ .allocator = allocator });
var stdout = std.io.getStdOut().writer();
try stdout.print("My ip is {s}\n", .{response.ip});
}
pub fn printIPFromRaw(allocator: std.mem.Allocator) !void {
var response = try zelda.get(allocator, "http://api64.ipify.org/");
defer response.deinit();
var stdout = std.io.getStdOut().writer();
if (response.body) |body|
try stdout.print("My ip is {s}\n", .{body})
else
try stdout.writeAll("Failed to receive body from ipify\n");
}
|
0 | repos/zelda/examples | repos/zelda/examples/async/build.zig | const std = @import("std");
const zig_libressl = @import("zig-libressl-build.zig");
const Pkg = std.build.Pkg;
pub const pkgs = struct {
pub const hzzp = Pkg{
.name = "hzzp",
.source = std.build.FileSource.relative("../../hzzp/src/main.zig"),
};
pub const zuri = Pkg{
.name = "zuri",
.source = std.build.FileSource.relative("../../zuri/src/zuri.zig"),
};
pub const libressl = Pkg{
.name = "zig-libressl",
.source = std.build.FileSource.relative("../../zig-libressl/src/main.zig"),
};
pub const zelda = Pkg{
.name = "zelda",
.source = .{ .path = "../../src/main.zig" },
.dependencies = &[_]Pkg{
hzzp, zuri, libressl,
},
};
};
pub fn build(b: *std.build.Builder) !void {
const use_system_libressl = b.option(bool, "use-system-libressl", "Link and build from the system installed copy of LibreSSL instead of building it from source") orelse false;
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const mode = b.standardReleaseOptions();
const exe = b.addExecutable("async_zelda", "src/main.zig");
exe.linkLibC();
exe.addPackage(pkgs.zelda);
exe.setTarget(target);
exe.setBuildMode(mode);
exe.install();
try zig_libressl.useLibreSslForStep(b, target, mode, "libressl", exe, use_system_libressl);
const run_cmd = exe.run();
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| {
run_cmd.addArgs(args);
}
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
}
|
0 | repos/zelda/examples/async | repos/zelda/examples/async/src/main.zig | const std = @import("std");
const zelda = @import("zelda");
const out = std.log.scoped(.async_zelda);
const ConcurrentFetchCount = 10_000;
const SmallestIntegerForFetchCount = std.math.IntFittingRange(0, ConcurrentFetchCount);
pub const io_mode = .evented;
pub fn main() anyerror!void {
var allocator = std.heap.c_allocator;
// var client_frames: [ConcurrentFetchCount]@Frame(fetchAndPrint) = undefined;
var client_frames = try allocator.alloc(@Frame(fetchAndPrint), ConcurrentFetchCount);
defer allocator.free(client_frames);
var client_index: usize = 0;
while (client_index < ConcurrentFetchCount) : (client_index += 1) {
client_frames[client_index] = async fetchAndPrint(allocator, @intCast(SmallestIntegerForFetchCount, client_index));
}
var timer = try std.time.Timer.start();
for (client_frames) |*frame, index| {
await frame catch |why| {
out.info("Client {} failed with {}", .{ index, why });
};
}
std.debug.print("Collected {} results in {}", .{ ConcurrentFetchCount, std.fmt.fmtDuration(timer.read()) });
}
pub fn fetchAndPrint(allocator: std.mem.Allocator, client_id: SmallestIntegerForFetchCount) !void {
var client = try zelda.HttpClient.init(allocator, .{});
defer client.deinit();
var request = zelda.request.Request{
.method = .GET,
.url = "http://example.com",
.use_global_connection_pool = false,
};
const response = try client.perform(request);
// const response = try zelda.get(allocator, "http://example.com");
if (response.body) |body| {
out.info("[{}] got {} bytes", .{ client_id, body.len });
} else {
out.info("[{}] got no body", .{client_id});
}
}
|
0 | repos/zelda/examples | repos/zelda/examples/connection_pooling/build.zig | const std = @import("std");
const zig_libressl = @import("zig-libressl-build.zig");
const Pkg = std.build.Pkg;
pub const pkgs = struct {
pub const hzzp = Pkg{
.name = "hzzp",
.source = std.build.FileSource.relative("../../hzzp/src/main.zig"),
};
pub const zuri = Pkg{
.name = "zuri",
.source = std.build.FileSource.relative("../../zuri/src/zuri.zig"),
};
pub const libressl = Pkg{
.name = "zig-libressl",
.source = std.build.FileSource.relative("../../zig-libressl/src/main.zig"),
};
pub const zelda = Pkg{
.name = "zelda",
.source = .{ .path = "../../src/main.zig" },
.dependencies = &[_]Pkg{
hzzp, zuri, libressl,
},
};
};
pub fn build(b: *std.build.Builder) !void {
const use_system_libressl = b.option(bool, "use-system-libressl", "Link and build from the system installed copy of LibreSSL instead of building it from source") orelse false;
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const mode = b.standardReleaseOptions();
const exe = b.addExecutable("connection_pooling", "src/main.zig");
exe.linkLibC();
exe.addPackage(pkgs.zelda);
exe.setTarget(target);
exe.setBuildMode(mode);
exe.install();
try zig_libressl.useLibreSslForStep(b, target, mode, "libressl", exe, use_system_libressl);
const run_cmd = exe.run();
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| {
run_cmd.addArgs(args);
}
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
}
|
0 | repos/zelda/examples/connection_pooling | repos/zelda/examples/connection_pooling/src/main.zig | const std = @import("std");
const zelda = @import("zelda");
const out = std.log.scoped(.connection_pooling);
const TestCount = 32;
pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.c_allocator);
defer arena.deinit();
const url = "http://example.com";
var timer = try std.time.Timer.start();
const no_pool_average = try testConnection(arena.allocator(), url, false);
std.debug.print("no pool took: {}\n", .{std.fmt.fmtDuration(timer.lap())});
const pool_average = try testConnection(arena.allocator(), url, true);
std.debug.print("pool took: {}\n", .{std.fmt.fmtDuration(timer.lap())});
if (pool_average < no_pool_average)
std.debug.print("{} runs ea: pooling saved an avg of {}\n", .{ TestCount, std.fmt.fmtDuration(no_pool_average - pool_average) })
else
std.debug.print("{} runs ea: pooling was slower by an avg of {}\n", .{ TestCount, std.fmt.fmtDuration(pool_average - no_pool_average) });
}
fn testConnection(allocator: std.mem.Allocator, url: []const u8, use_conn_pool: bool) anyerror!u64 {
var times = std.ArrayList(u64).init(allocator);
defer times.deinit();
var count: usize = 0;
var timer = try std.time.Timer.start();
while (count < TestCount) : (count += 1) {
timer.reset();
var client = try zelda.HttpClient.init(allocator, .{});
defer client.deinit();
var request = zelda.request.Request{ .method = .GET, .url = url, .use_global_connection_pool = use_conn_pool };
_ = try client.perform(request);
const requestDuration = timer.lap();
try times.append(requestDuration);
out.info("[{}] request took {}", .{ count + 1, std.fmt.fmtDuration(requestDuration) });
}
var sum: u64 = 0;
for (times.items) |time|
sum += time;
const avg = sum / times.items.len;
out.info("pool={}, Avg {}", .{ use_conn_pool, std.fmt.fmtDuration(avg) });
return avg;
}
|
0 | repos | repos/zig-sdk/RELEASE.md | # Release
Every major/minor release should come with its own `release/vX.Y` branch. To
create this branch, run `dotnet nbgv prepare-release` from the `master` branch.
Pass `--versionIncrement major` if the version being developed after this
release is going to involve a major version bump.
Patch-level releases should be done out of the relevant major/minor branch. For
example, both `1.0.1` and `1.0.5` should come out of `release/v1.0`. So, there
is no need to run `dotnet nbgv prepare-release` in this case.
Before tagging a release, run `./cake` in the release branch locally on all
platforms that you have access to, and try the [sample projects](src/samples).
Verify that nothing has regressed. Also, ensure that the release branch builds
and tests successfully on [CI](https://github.com/vezel-dev/zig-sdk/actions).
Next, run `dotnet nbgv tag` from the release branch to create a release tag,
followed by `git tag <tag> <tag> -f -m <tag> -s` to sign it, and then push it
with `git push origin <tag>`. Again, wait for CI to build and test the tag. If
something goes wrong on CI, you can run `git tag -d <tag>` and
`git push origin :<tag>` to delete the tag until you resolve the issue(s), and
then repeat this step.
Finally, to actually publish the release, go to the
[releases page](https://github.com/vezel-dev/zig-sdk/releases) to create a
release from the tag you pushed, ideally with some well-written release notes.
Once the release is published, a workflow will build and publish NuGet packages
from the tag.
|
0 | repos | repos/zig-sdk/tasks.vs.json | {
"version": "0.2.1",
"tasks": [
{
"taskLabel": "Build",
"contextType": "build",
"appliesTo": "/",
"type": "launch",
"commands": [
{
"command": "dotnet",
"args": [
"tool",
"restore"
]
},
{
"command": "dotnet",
"args": [
"cake",
"-t",
"default-editor"
]
}
]
}
]
}
|
0 | repos | repos/zig-sdk/global.json | {
"$schema": "https://json.schemastore.org/global.json",
"sdk": {
"version": "8.0.400",
"rollForward": "disable",
"allowPrelease": false
},
"msbuild-sdks": {
"Microsoft.Build.Traversal": "4.1.0"
}
}
|
0 | repos | repos/zig-sdk/dotnet-tools.json | {
"version": 1,
"isRoot": true,
"tools": {
"cake.tool": {
"version": "4.0.0",
"commands": [
"dotnet-cake"
]
},
"dotnet-config": {
"version": "1.2.0",
"commands": [
"dotnet-config"
]
},
"dotnet-file": {
"version": "1.6.2",
"commands": [
"dotnet-file"
]
},
"gpr": {
"version": "0.1.294",
"commands": [
"gpr"
]
},
"nbgv": {
"version": "3.6.139",
"commands": [
"nbgv"
]
},
"sourcelink": {
"version": "3.1.1",
"commands": [
"sourcelink"
]
}
}
}
|
0 | repos | repos/zig-sdk/.stylecop.json | {
"$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json",
"settings": {
"indentation": {
"indentationSize": 4,
"tabSize": 4,
"useTabs": false
},
"readabilityRules": {
"allowBuiltInTypeAliases": false
},
"orderingRules": {
"elementOrder": [
"kind",
"constant",
"static",
"readonly"
],
"usingDirectivesPlacement": "outsideNamespace",
"systemUsingDirectivesFirst": true,
"blankLinesBetweenUsingGroups": "omit"
},
"namingRules": {
"tupleElementNameCasing": "PascalCase",
"includeInferredTupleElementNames": false
},
"maintainabilityRules": {
"topLevelTypes": [
"class",
"enum",
"delegate",
"interface",
"struct"
]
},
"layoutRules": {
"allowDoWhileOnClosingBrace": false,
"allowConsecutiveUsings": true,
"newlineAtEndOfFile": "require"
},
"documentationRules": {
"companyName": "Vezel",
"copyrightText": "SPDX-License-Identifier: 0BSD",
"headerDecoration": "",
"xmlHeader": false,
"fileNamingConvention": "metadata",
"documentationCulture": "en-US",
"documentExposedElements": true,
"documentInternalElements": false,
"documentPrivateElements": false,
"documentPrivateFields": false,
"documentInterfaces": false,
"excludeFromPunctuationCheck": [
"seealso"
]
}
}
}
|
0 | repos | repos/zig-sdk/.markdownlint.json | {
"$schema": "https://raw.githubusercontent.com/DavidAnson/markdownlint/main/schema/markdownlint-config-schema.json",
"default": true,
"MD001": true,
"MD003": {
"style": "atx"
},
"MD004": {
"style": "asterisk"
},
"MD005": true,
"MD007": {
"start_indented": false,
"start_indent": 4,
"indent": 4
},
"MD009": {
"br_spaces": 0,
"strict": true
},
"MD010": {
"spaces_per_tab": 4
},
"MD011": true,
"MD012": true,
"MD013": {
"code_blocks": false,
"headings": false,
"tables": false
},
"MD014": true,
"MD018": true,
"MD019": true,
"MD020": true,
"MD021": true,
"MD022": true,
"MD023": true,
"MD024": true,
"MD025": true,
"MD026": {
"punctuation": "!,.:;?"
},
"MD027": true,
"MD028": true,
"MD029": {
"style": "ordered"
},
"MD030": true,
"MD031": true,
"MD032": true,
"MD033": {
"allowed_elements": [
"div",
"img",
"p",
"strong"
]
},
"MD034": true,
"MD035": {
"style": "--------------------------------------------------------------------------------"
},
"MD036": {
"punctuation": "!,.:;?"
},
"MD037": true,
"MD038": true,
"MD039": true,
"MD040": {
"language_only": true
},
"MD041": true,
"MD042": true,
"MD043": false,
"MD044": true,
"MD045": true,
"MD046": {
"style": "fenced"
},
"MD047": true,
"MD048": {
"style": "backtick"
},
"MD049": {
"style": "asterisk"
},
"MD050": {
"style": "asterisk"
},
"MD051": true,
"MD052": true,
"MD053": {
"ignored_definitions": []
}
}
|
0 | repos | repos/zig-sdk/PACKAGE.md | # Zig SDK
The **Zig SDK** is an
[MSBuild SDK](https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-use-project-sdk)
that augments the .NET SDK with the ability to build Zig, C, and C++ projects.
With support for multiple programming languages, cross-compilation, NuGet
packaging, and more, the **Zig SDK** makes it trivial to author native
components as part of your .NET solution - without all the hassle that is
usually part and parcel of building and packaging native code. These features
are powered by the [Zig](https://ziglang.org) toolchain.
This project offers the following packages:
* [Vezel.Zig.Sdk](https://www.nuget.org/packages/Vezel.Zig.Sdk): Provides the
MSBuild SDK and associated tasks.
For more information, please visit the
[project home page](https://docs.vezel.dev/zig-sdk).
|
0 | repos | repos/zig-sdk/README.md | # Zig SDK
<div align="center">
<img src="zig.svg"
width="128"
alt="Zig SDK" />
</div>
<p align="center">
<strong>
An MSBuild SDK for building Zig, C, and C++ projects using the Zig
compiler.
</strong>
</p>
<div align="center">
[](LICENSE-0BSD)
[](https://github.com/vezel-dev/zig-sdk/commits/master)
[](https://github.com/vezel-dev/zig-sdk/actions/workflows/build.yml)
[](https://github.com/vezel-dev/zig-sdk/discussions)
[](https://discord.gg/wtzCfaX2Nj)
[](https://vezel.zulipchat.com)
</div>
--------------------------------------------------------------------------------
The **Zig SDK** is an
[MSBuild SDK](https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-use-project-sdk)
that augments the .NET SDK with the ability to build Zig, C, and C++ projects.
With support for multiple programming languages, cross-compilation, NuGet
packaging, and more, the **Zig SDK** makes it trivial to author native
components as part of your .NET solution - without all the hassle that is
usually part and parcel of building and packaging native code. These features
are powered by the [Zig](https://ziglang.org) toolchain.
## Usage
This project offers the following packages:
| Package | Description | Downloads |
| -: | - | :- |
| [![Vezel.Zig.Sdk][sdk-img]][sdk-pkg] | Provides the MSBuild SDK and associated tasks. | ![Downloads][sdk-dls] |
[sdk-pkg]: https://www.nuget.org/packages/Vezel.Zig.Sdk
[sdk-img]: https://img.shields.io/nuget/v/Vezel.Zig.Sdk?label=Vezel.Zig.Sdk
[sdk-dls]: https://img.shields.io/nuget/dt/Vezel.Zig.Sdk?label=
To install an SDK package in a project, add it to your `global.json` under the
`msbuild-sdks` property.
For more information, please visit the
[project home page](https://docs.vezel.dev/zig-sdk).
## Building
You will need the .NET SDK installed. Simply run `./cake`
(a [Bash](https://www.gnu.org/software/bash) script) to build artifacts. You can
also use `./cake pack` if you do not want to build the documentation (which
requires Node.js).
## License
This project is licensed under the terms found in
[`LICENSE-0BSD`](LICENSE-0BSD).
The Zig logo is licensed under the terms found in
[`LICENSE-CC-BY-SA-4.0`](LICENSE-CC-BY-SA-4.0).
|
0 | repos | repos/zig-sdk/Directory.Packages.props | <Project>
<PropertyGroup>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
</PropertyGroup>
<ItemGroup>
<GlobalPackageReference Include="DotNet.ReproducibleBuilds"
Version="1.2.4" />
<GlobalPackageReference Include="Microsoft.Build.CopyOnWrite"
Version="1.0.315" />
<GlobalPackageReference Include="Nerdbank.GitVersioning"
Version="3.6.143" />
<GlobalPackageReference Include="PolySharp"
Version="1.14.1" />
<GlobalPackageReference Include="StyleCop.Analyzers"
Version="1.2.0-beta.556" />
</ItemGroup>
<ItemGroup>
<PackageVersion Include="Microsoft.Build.Tasks.Core"
Version="17.11.4" />
</ItemGroup>
</Project>
|
0 | repos | repos/zig-sdk/version.json | {
"$schema": "https://raw.githubusercontent.com/dotnet/Nerdbank.GitVersioning/main/src/NerdBank.GitVersioning/version.schema.json",
"version": "6.0-dev",
"nuGetPackageVersion": {
"semVer": 2.0
},
"publicReleaseRefSpec": [
"^refs/heads/master$",
"^refs/tags/v.*$"
],
"cloudBuild": {
"setVersionVariables": false
},
"release": {
"branchName": "release/v{version}",
"firstUnstableTag": "dev"
}
} |
0 | repos | repos/zig-sdk/Directory.Build.props | <Project>
<PropertyGroup>
<Product>Zig SDK</Product>
<Description>The $(Product) is an MSBuild SDK for building Zig, C, and C++ projects using the Zig compiler.</Description>
<Company>Vezel</Company>
<Authors>$(Company) Contributors</Authors>
<Copyright>Copyright © $(Authors)</Copyright>
</PropertyGroup>
<PropertyGroup>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<AnalysisLevel>latest-all</AnalysisLevel>
<Configuration>Release</Configuration>
<DisableRuntimeMarshalling>true</DisableRuntimeMarshalling>
<EmitCompilerGeneratedFiles>true</EmitCompilerGeneratedFiles>
<EnablePackageValidation>true</EnablePackageValidation>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<ImplicitUsings>true</ImplicitUsings>
<IsPackable>false</IsPackable>
<IsPublishable>false</IsPublishable>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<PackageIcon>zig.png</PackageIcon>
<PackageLicenseExpression>0BSD</PackageLicenseExpression>
<PackageProjectUrl>https://docs.vezel.dev/zig-sdk</PackageProjectUrl>
<PackageReadmeFile>PACKAGE.md</PackageReadmeFile>
<PolySharpIncludeRuntimeSupportedAttributes>true</PolySharpIncludeRuntimeSupportedAttributes>
<RepositoryUrl>https://github.com/vezel-dev/zig-sdk.git</RepositoryUrl>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<PropertyGroup>
<UseArtifactsOutput>true</UseArtifactsOutput>
<ArtifactsPath>$(MSBuildThisFileDirectory)out</ArtifactsPath>
<ArtifactsProjectName>$([MSBuild]::MakeRelative('$(MSBuildThisFileDirectory)', '$(MSBuildProjectDirectory)'))</ArtifactsProjectName>
<ArtifactsPublishOutputName>pub</ArtifactsPublishOutputName>
<ArtifactsPackageOutputName>pkg</ArtifactsPackageOutputName>
</PropertyGroup>
</Project>
|
0 | repos/zig-sdk | repos/zig-sdk/.fleet/settings.json | {
"files.exclude": [
"out",
".idea",
".vs",
"bin",
"node_modules",
"obj"
]
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/samples/Directory.Packages.props | <Project>
<!-- Isolate from the rest of the tree; do not import top-level file. -->
</Project>
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/samples/Directory.Build.props | <Project>
<!-- Isolate from the rest of the tree; do not import top-level file. -->
</Project>
|
0 | repos/zig-sdk/src/samples/cxxlib | repos/zig-sdk/src/samples/cxxlib/api/cxxlib.h | // SPDX-License-Identifier: 0BSD
#include <cxxlib/forty-two.h>
|
0 | repos/zig-sdk/src/samples/cxxlib/api | repos/zig-sdk/src/samples/cxxlib/api/cxxlib/forty-two.h | // SPDX-License-Identifier: 0BSD
__attribute__((visibility("default")))
int cxxlib();
|
0 | repos/zig-sdk/src/samples | repos/zig-sdk/src/samples/zigexe/main.zig | // SPDX-License-Identifier: 0BSD
const std = @import("std");
const expect = std.testing.expect;
const header = @cImport(@cInclude("zigexe.h"));
test "foo bar" {
try expect(2 + 2 == 4);
}
test "foo baz" {
try expect('a' != 'b');
}
test "baz qux" {
try expect(1 == 1);
}
pub fn main() void {
std.log.info(header.HELLO_WORLD, .{ });
}
|
0 | repos/zig-sdk/src/samples/zigexe | repos/zig-sdk/src/samples/zigexe/include/zigexe.h | // SPDX-License-Identifier: 0BSD
#define HELLO_WORLD "Hello World"
|
0 | repos/zig-sdk/src/samples | repos/zig-sdk/src/samples/cexe/prelude.h | // SPDX-License-Identifier: 0BSD
#define HELLO_WORLD "Hello World\n"
|
0 | repos/zig-sdk/src/samples | repos/zig-sdk/src/samples/cexe/main.c | // SPDX-License-Identifier: 0BSD
#include <stdio.h>
int main(void)
{
return printf(HELLO_WORLD) < 0;
}
|
0 | repos/zig-sdk/src/samples | repos/zig-sdk/src/samples/ziglib/ziglib.zig | // SPDX-License-Identifier: 0BSD
const std = @import("std");
const assert = std.debug.assert;
pub export fn ziglib() i32 {
return 42;
}
test "test ziglib fn" {
assert(ziglib() == 42);
}
|
0 | repos/zig-sdk/src/samples | repos/zig-sdk/src/samples/clib/clib.c | // SPDX-License-Identifier: 0BSD
#include "clib.h"
typedef struct
{
int y;
} test1;
typedef struct
{
test1;
} test2;
int x;
#if defined(ZIG_OS_WINDOWS)
void undefined()
{
// Testing this scenario on Windows would require an import library.
}
#else
void undefined();
#endif
__attribute__((visibility("default")))
int clib(void)
{
// Exercise AllowUndefinedSymbols.
undefined();
// Triggers -Wparentheses.
if (x = 42)
return x;
return 0;
}
|
0 | repos/zig-sdk/src/samples/clib | repos/zig-sdk/src/samples/clib/include/clib.h | // SPDX-License-Identifier: 0BSD
#define FORTY_TWO 42
|
0 | repos/zig-sdk/src/samples/clib | repos/zig-sdk/src/samples/clib/subdir1/subdir1.c | // SPDX-License-Identifier: 0BSD
#include "subdir2/subdir2.h"
|
0 | repos/zig-sdk/src/samples/clib | repos/zig-sdk/src/samples/clib/subdir2/subdir2.h | // SPDX-License-Identifier: 0BSD
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigSymbolExports.cs | // SPDX-License-Identifier: 0BSD
namespace Vezel.Zig.Tasks;
public enum ZigSymbolExports
{
Used,
All,
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/sdk.cs | // SPDX-License-Identifier: 0BSD
[module: SkipLocalsInit]
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigFormatterMode.cs | // SPDX-License-Identifier: 0BSD
namespace Vezel.Zig.Tasks;
public enum ZigFormatterMode
{
Check,
Execute,
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigOutputType.cs | // SPDX-License-Identifier: 0BSD
namespace Vezel.Zig.Tasks;
public enum ZigOutputType
{
Exe,
WinExe,
Library,
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigConfiguration.cs | // SPDX-License-Identifier: 0BSD
namespace Vezel.Zig.Tasks;
public enum ZigConfiguration
{
Debug,
Release,
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigCompilerMode.cs | // SPDX-License-Identifier: 0BSD
namespace Vezel.Zig.Tasks;
public enum ZigCompilerMode
{
C,
Cxx,
Zig,
Test,
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigCompile.cs | // SPDX-License-Identifier: 0BSD
using Microsoft.Build.Framework;
using Microsoft.Build.Tasks;
namespace Vezel.Zig.Tasks;
[SuppressMessage("", "CA1819")]
public sealed class ZigCompile : ZigToolTask
{
private const StringSplitOptions SplitOptions = StringSplitOptions.RemoveEmptyEntries;
private const StringComparison Comparison = StringComparison.InvariantCulture;
[Required]
public bool AllowUndefinedSymbols { get; set; }
[Required]
public bool AccessControl { get; set; }
[Required]
public bool AsyncExceptions { get; set; }
[Required]
public bool BlockExtensions { get; set; }
[Required]
public bool BufferAnalysis { get; set; }
public ITaskItem? CommandFragmentsDirectory { get; set; }
[Required]
public string CompilerMode
{
get => _compilerMode.ToString();
set => _compilerMode = (ZigCompilerMode)Enum.Parse(typeof(ZigCompilerMode), value);
}
[Required]
public string Configuration
{
get => _configuration.ToString();
set => _configuration = (ZigConfiguration)Enum.Parse(typeof(ZigConfiguration), value);
}
[Required]
public bool ConsumptionAnalysis { get; set; }
[Required]
public bool CxxExceptions { get; set; }
[Required]
public bool CxxReflection { get; set; }
[Required]
public bool DebugSymbols { get; set; }
public string? DefineConstants { get; set; }
[Required]
public bool Deterministic { get; set; }
public string? DisableWarnings { get; set; }
[Required]
public bool DocumentationAnalysis { get; set; }
[Required]
public bool DynamicImageBase { get; set; }
[Required]
public bool EagerBinding { get; set; }
[Required]
public bool FastMath { get; set; }
public int ImageBase
{
get => _imageBase ?? 0;
set => _imageBase = value;
}
[Required]
public ITaskItem[] IncludeDirectories { get; set; } = null!;
[Required]
public string LanguageStandard { get; set; } = null!;
[Required]
public ITaskItem[] LibraryIncludeDirectories { get; set; } = null!;
[Required]
public ITaskItem[] LibraryReferences { get; set; } = null!;
[Required]
public ITaskItem[] LinkerDirectories { get; set; } = null!;
[Required]
public ITaskItem[] LinkerReferences { get; set; } = null!;
[Required]
public bool LinkTimeOptimization { get; set; }
[Required]
public bool MicrosoftExtensions { get; set; }
[Required]
public bool NullabilityAnalysis { get; set; }
[Required]
public ITaskItem OutputBinary { get; set; } = null!;
[Required]
public string OutputType
{
get => _outputType.ToString();
set => _outputType = (ZigOutputType)Enum.Parse(typeof(ZigOutputType), value);
}
[Required]
public ITaskItem[] PreludeHeaders { get; set; } = null!;
public ITaskItem? PublicIncludeDirectory { get; set; }
[Required]
public string ReleaseMode
{
get => _releaseMode.ToString();
set => _releaseMode = (ZigReleaseMode)Enum.Parse(typeof(ZigReleaseMode), value);
}
[Required]
public bool RelocationHardening { get; set; }
[Required]
public ITaskItem[] Sanitizers { get; set; } = null!;
[Required]
public ITaskItem[] Sources { get; set; } = null!;
public int StackSize
{
get => _stackSize ?? 0;
set => _stackSize = value;
}
[Required]
public string SymbolExports
{
get => _symbolExports.ToString();
set => _symbolExports = (ZigSymbolExports)Enum.Parse(typeof(ZigSymbolExports), value);
}
[Required]
public string SymbolVisibility
{
get => _symbolVisibility.ToString();
set => _symbolVisibility = (ZigSymbolVisibility)Enum.Parse(typeof(ZigSymbolVisibility), value);
}
[Required]
public bool TagAnalysis { get; set; }
[Required]
public string TargetFileName { get; set; } = null!;
[Required]
public string TargetRuntimeIdentifier { get; set; } = null!;
[Required]
public string TargetTriple { get; set; } = null!;
public string? TestFilter { get; set; }
[Required]
public bool ThreadingAnalysis { get; set; }
[Required]
public bool TreatWarningsAsErrors { get; set; }
[Required]
public bool TrustAnalysis { get; set; }
[Required]
public bool UnicodeEnvironment { get; set; }
[Required]
public int WarningLevel { get; set; }
private static readonly CultureInfo _culture = CultureInfo.InvariantCulture;
private ZigCompilerMode _compilerMode;
private ZigConfiguration _configuration;
private int? _imageBase;
private ZigOutputType _outputType;
private ZigReleaseMode _releaseMode;
private int? _stackSize;
private ZigSymbolExports _symbolExports;
private ZigSymbolVisibility _symbolVisibility;
[SuppressMessage("", "CA2201")]
[SuppressMessage("", "CA1308")]
protected override string GenerateCommandLineCommands()
{
var builder = new CommandLineBuilderExtension();
var isTest = _compilerMode == ZigCompilerMode.Test;
var isZig = _compilerMode == ZigCompilerMode.Zig || isTest;
var isCxx = _compilerMode == ZigCompilerMode.Cxx;
builder.AppendSwitch((_compilerMode, _outputType) switch
{
(ZigCompilerMode.C, _) => "cc",
(ZigCompilerMode.Cxx, _) => "c++",
(ZigCompilerMode.Zig, ZigOutputType.Library) => "build-lib",
(ZigCompilerMode.Zig, _) => "build-exe",
(ZigCompilerMode.Test, _) => "test",
_ => throw new Exception(),
});
// This enables MSBuild to recognize diagnostics properly. Make sure we pass this immediately after the cc/c++
// command.
if (!isZig)
builder.AppendSwitch("-fdiagnostics-format=msvc");
builder.AppendSwitch($"-target {TargetTriple.ToLowerInvariant()}");
if (_outputType == ZigOutputType.Library)
{
builder.AppendSwitch("-fPIC");
if (!isZig)
{
builder.AppendSwitch("-shared");
builder.AppendSwitchIfNotNull("-Wl,-soname,", TargetFileName);
}
var (un, no) = AllowUndefinedSymbols ? ("un", string.Empty) : (string.Empty, "no-");
// These flags are needed to cover Linux and Windows/macOS, respectively.
builder.AppendSwitch(isZig ? $"-z {un}defs" : $"-Wl,-z,{un}defs");
builder.AppendSwitch(isZig ? $"-f{no}allow-shlib-undefined" : $"-Wl,--{no}allow-shlib-undefined");
}
else
{
builder.AppendSwitch("-fPIE");
if (_symbolExports == ZigSymbolExports.All)
builder.AppendSwitch("-rdynamic");
}
if (isZig)
{
// The compiler uses static linking by default when building Zig code. We want dynamic linking in all cases.
builder.AppendSwitch("-dynamic");
// When building Zig code, by default, the compiler links statically to a platform-appropriate libc. We
// absolutely do not want that behavior when building code that might be loaded in a .NET process.
builder.AppendSwitch("-lc");
if (_configuration == ZigConfiguration.Release)
builder.AppendSwitch($"-O Release{_releaseMode}");
builder.AppendSwitch(DebugSymbols ? "-fno-strip" : "-fstrip");
}
else
{
// These exact flags are treated specially by zig cc/c++. They activate Debug, ReleaseFast, ReleaseSafe, and
// ReleaseSmall respectively. This in turns activates a bunch of other mode-specific flags that we do not
// have to specify here as a result.
builder.AppendSwitch((_configuration, _releaseMode) switch
{
(ZigConfiguration.Debug, _) => "-O0",
(ZigConfiguration.Release, ZigReleaseMode.Fast) => "-O2",
(ZigConfiguration.Release, ZigReleaseMode.Safe) => "-O2 -fsanitize=undefined",
(ZigConfiguration.Release, ZigReleaseMode.Small) => "-Os",
_ => throw new Exception(),
});
if (DebugSymbols)
builder.AppendSwitch("-g");
if (_symbolVisibility == ZigSymbolVisibility.Hidden)
builder.AppendSwitch("-fvisibility=hidden");
builder.AppendSwitch($"-std={LanguageStandard.ToLowerInvariant()}");
if (BlockExtensions)
builder.AppendSwitch("-fblocks");
if (MicrosoftExtensions)
{
builder.AppendSwitch("-fms-extensions");
builder.AppendSwitch("-Wno-microsoft-abstract");
builder.AppendSwitch("-Wno-microsoft-anon-tag");
builder.AppendSwitch("-Wno-microsoft-union-member-reference");
}
if (isCxx)
{
if (!AccessControl)
builder.AppendSwitch("-fno-access-control");
if (!CxxReflection)
builder.AppendSwitch("-fno-rtti");
if (!CxxExceptions)
builder.AppendSwitch("-fno-exceptions");
else if (AsyncExceptions)
builder.AppendSwitch("-fasync-exceptions");
}
else if (CxxExceptions)
builder.AppendSwitch("-fexceptions");
if (UnicodeEnvironment)
{
builder.AppendSwitch("-municode");
builder.AppendSwitchIfNotNull("-D ", "_UNICODE");
}
builder.AppendSwitch("-fno-strict-aliasing");
if (FastMath)
builder.AppendSwitch("-ffast-math");
if (TreatWarningsAsErrors)
builder.AppendSwitch("-Werror");
var disabledWarnings = new HashSet<string>(
(DisableWarnings ?? string.Empty)
.Split([';'], SplitOptions)
.Select(w => w.Trim())
.Where(w =>
{
if (string.IsNullOrEmpty(w))
return false;
if (w.StartsWith("no-", Comparison))
{
Log.LogWarning("The 'no-' prefix on warning '{0}' is invalid", w[3..]);
return false;
}
if (w.StartsWith("error=", Comparison))
{
Log.LogWarning("Changing specific warning '{0}' to error is not supported", w[6..]);
return false;
}
if (w == "error")
{
Log.LogWarning(
"Changing all warnings to errors should be done with '{0}'",
nameof(TreatWarningsAsErrors));
return false;
}
string? property = null;
if (w.StartsWith("consumed", Comparison))
property = nameof(ConsumptionAnalysis);
else if (w.StartsWith("documentation", Comparison))
property = nameof(DocumentationAnalysis);
else if (w.StartsWith("microsoft", Comparison))
property = nameof(MicrosoftExtensions);
else if (w.StartsWith("nullability", Comparison) ||
w.StartsWith("nullable", Comparison))
property = nameof(NullabilityAnalysis);
else if (w.StartsWith("tcb-enforcement", Comparison))
property = nameof(TrustAnalysis);
else if (w.StartsWith("type-safety", Comparison))
property = nameof(TagAnalysis);
else if (w.StartsWith("thread-safety", Comparison))
property = nameof(ThreadingAnalysis);
if (property != null)
{
Log.LogWarning("The '{0}' warning is controlled by '{1}'", w, property);
return false;
}
return true;
}));
void TryAppendWarningSwitch(string name)
{
// Try to avoid adding a warning flag if the user explicitly disabled it. This will not cover every
// possible case due to aggregate flags, but it will at least prevent some amount of command line length
// explosion.
if (!disabledWarnings.Contains(name))
builder.AppendSwitch($"-W{name}");
}
// Unfortunately, a lot of good warnings that really should be on by default are not. So, we have to keep a
// manual list of extra warnings to enable and make sure to keep it in sync with whatever LLVM/Clang version
// Zig is shipping with.
switch (WarningLevel)
{
case <= 0:
builder.AppendSwitch("-Wno-everything");
break;
case 1:
TryAppendWarningSwitch("alloca");
TryAppendWarningSwitch("invalid-utf8");
TryAppendWarningSwitch("non-gcc");
TryAppendWarningSwitch("reserved-identifier");
TryAppendWarningSwitch("signed-enum-bitfield");
if (isCxx)
{
TryAppendWarningSwitch("class-varargs");
TryAppendWarningSwitch("non-virtual-dtor");
TryAppendWarningSwitch("undefined-reinterpret-cast");
}
foreach (var warning in disabledWarnings)
builder.AppendSwitch($"-Wno-{warning}");
break;
case 2:
TryAppendWarningSwitch("all");
TryAppendWarningSwitch("array-bounds-pointer-arithmetic");
TryAppendWarningSwitch("c++-compat");
TryAppendWarningSwitch("cast-align");
TryAppendWarningSwitch("cast-qual");
TryAppendWarningSwitch("comma");
TryAppendWarningSwitch("float-equal");
TryAppendWarningSwitch("pointer-arith");
TryAppendWarningSwitch("shift-sign-overflow");
goto case 1;
case 3:
TryAppendWarningSwitch("anon-enum-enum-conversion");
TryAppendWarningSwitch("assign-enum");
TryAppendWarningSwitch("completion-handler");
TryAppendWarningSwitch("conditional-uninitialized");
TryAppendWarningSwitch("deprecated");
TryAppendWarningSwitch("extra");
TryAppendWarningSwitch("format-pedantic");
TryAppendWarningSwitch("format-type-confusion");
TryAppendWarningSwitch("implicit-fallthrough");
TryAppendWarningSwitch("keyword-macro");
TryAppendWarningSwitch("loop-analysis");
TryAppendWarningSwitch("over-aligned");
TryAppendWarningSwitch("shadow-all");
TryAppendWarningSwitch("switch-enum");
if (isCxx)
{
TryAppendWarningSwitch("inconsistent-missing-destructor-override");
TryAppendWarningSwitch("suggest-destructor-override");
TryAppendWarningSwitch("suggest-override");
}
goto case 2;
case 4:
default:
TryAppendWarningSwitch("bad-function-cast");
TryAppendWarningSwitch("compound-token-split");
TryAppendWarningSwitch("covered-switch-default");
TryAppendWarningSwitch("duplicate-decl-specifier");
TryAppendWarningSwitch("duplicate-enum");
TryAppendWarningSwitch("embedded-directive");
TryAppendWarningSwitch("expansion-to-defined");
TryAppendWarningSwitch("extra-semi");
TryAppendWarningSwitch("format=2");
TryAppendWarningSwitch("four-char-constants");
TryAppendWarningSwitch("incompatible-function-pointer-types-strict");
TryAppendWarningSwitch("missing-noreturn");
TryAppendWarningSwitch("redundant-parens");
TryAppendWarningSwitch("undef");
TryAppendWarningSwitch("unreachable-code-aggressive");
if (isCxx)
{
TryAppendWarningSwitch("atomic-implicit-seq-cst");
TryAppendWarningSwitch("ctad-maybe-unsupported");
TryAppendWarningSwitch("dtor-name");
TryAppendWarningSwitch("header-hygiene");
TryAppendWarningSwitch("old-style-cast");
TryAppendWarningSwitch("undefined-func-template");
TryAppendWarningSwitch("unsupported-dll-base-class-template");
TryAppendWarningSwitch("unused-exception-parameter");
TryAppendWarningSwitch("unused-member-function");
TryAppendWarningSwitch("unused-template");
TryAppendWarningSwitch("zero-as-null-pointer-constant");
}
else
{
TryAppendWarningSwitch("missing-prototypes");
TryAppendWarningSwitch("missing-variable-declarations");
TryAppendWarningSwitch("strict-prototypes");
}
goto case 3;
}
// The following -W flags need to be here because they have to be enabled regardless of WarningLevel. If
// they came before -Wno-everything (when WarningLevel is set to 0), they would have no effect.
builder.AppendSwitch("-Werror=newline-eof");
if (BufferAnalysis)
builder.AppendSwitch("-Wunsafe-buffer-usage");
if (ConsumptionAnalysis)
builder.AppendSwitch("-Wconsumed");
if (DocumentationAnalysis)
{
builder.AppendSwitch("-Wdocumentation");
builder.AppendSwitch("-Wdocumentation-pedantic");
}
if (!NullabilityAnalysis)
{
builder.AppendSwitch("-Wno-nullability");
builder.AppendSwitch("-Wno-nullability-completeness");
builder.AppendSwitch("-Wno-nullability-inferred-on-nested-type");
}
else
builder.AppendSwitch("-Wnullable-to-nonnull-conversion");
if (!TagAnalysis)
builder.AppendSwitch("-Wno-type-safety");
if (ThreadingAnalysis)
builder.AppendSwitch("-Wthread-safety");
if (!TrustAnalysis)
builder.AppendSwitch("-Wno-tcb-enforcement");
// TODO: https://github.com/vezel-dev/zig-sdk/issues/38
if (Deterministic)
{
builder.AppendSwitch("-Werror=date-time");
builder.AppendSwitch("-no-canonical-prefixes");
builder.AppendSwitchIfNotNull("-fdebug-compilation-dir ", GetWorkingDirectory() ?? ".");
}
}
foreach (var item in Sanitizers)
{
switch (item.ItemSpec.ToLowerInvariant())
{
case "undefined":
Log.LogWarning("The '{0}' sanitizer is controlled by '{1}'", item, nameof(ReleaseMode));
break;
case "thread" when isZig:
builder.AppendSwitch("-fsanitize-thread");
break;
default:
if (!isZig)
builder.AppendSwitch($"-fsanitize={item.ItemSpec}");
else
Log.LogWarning(
"The '{0}' sanitizer is not supported with '{1}={2}'",
item,
nameof(CompilerMode),
ZigCompilerMode.Zig);
break;
}
}
if (!LinkTimeOptimization)
builder.AppendSwitch("-fno-lto");
foreach (var define in (DefineConstants ?? string.Empty).Split([';'], SplitOptions))
{
var trimmed = define.Trim();
if (string.IsNullOrEmpty(trimmed))
continue;
builder.AppendSwitchIfNotNull("-D ", trimmed);
}
builder.AppendSwitchIfNotNull("-I ", GetWorkingDirectory() ?? ".");
builder.AppendSwitchIfNotNull("-I ", PublicIncludeDirectory);
foreach (var directory in LibraryIncludeDirectories)
builder.AppendSwitchIfNotNull("-isystem ", directory);
foreach (var directory in IncludeDirectories)
builder.AppendSwitchIfNotNull("-I ", directory);
if (!isZig)
foreach (var header in PreludeHeaders)
builder.AppendSwitchIfNotNull("-include ", header);
if (!EagerBinding)
builder.AppendSwitch(isZig ? "-z lazy" : "-Wl,-z,lazy");
if (!RelocationHardening)
builder.AppendSwitch(isZig ? "-z norelro" : "-Wl,-z,norelro");
if (_imageBase is { } ib)
builder.AppendSwitchIfNotNull(
isZig ? "--image-base 0x" : "-Wl,--image-base,0x", ib.ToString("x", _culture));
if (!DynamicImageBase)
builder.AppendSwitch(isZig ? "--no-dynamicbase" : "-Wl,--no-dynamicbase");
if (_stackSize is { } ss)
builder.AppendSwitchIfNotNull(isZig ? "--stack 0x" : "-Wl,-z,stack-size=0x", ss.ToString("x", _culture));
builder.AppendSwitch(isZig ? "-z origin" : "-Wl,-z,origin");
builder.AppendSwitchIfNotNull(isZig ? "-rpath " : "-Wl,-rpath,", "$ORIGIN");
builder.AppendSwitchIfNotNull(
isZig ? "--subsystem " : "-Wl,--subsystem,", _outputType == ZigOutputType.WinExe ? "windows" : "console");
// TODO: https://github.com/vezel-dev/zig-sdk/issues/8
builder.AppendFileNamesIfNotNull(Sources, delimiter: " ");
builder.AppendFileNamesIfNotNull(LibraryReferences, delimiter: " ");
foreach (var directory in LinkerDirectories)
builder.AppendSwitchIfNotNull("-L ", directory);
foreach (var library in LinkerReferences)
builder.AppendSwitchIfNotNull("-l ", library);
builder.AppendSwitchIfNotNull(isZig ? "-femit-bin=" : "-o ", OutputBinary);
if (!isZig)
builder.AppendSwitchIfNotNull("-gen-cdb-fragment-path ", CommandFragmentsDirectory);
if (isTest)
{
builder.AppendSwitch("--test-no-exec");
builder.AppendSwitchIfNotNull("--test-filter ", TestFilter);
}
return builder.ToString();
}
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigReleaseMode.cs | // SPDX-License-Identifier: 0BSD
namespace Vezel.Zig.Tasks;
public enum ZigReleaseMode
{
Fast,
Safe,
Small,
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigSymbolVisibility.cs | // SPDX-License-Identifier: 0BSD
namespace Vezel.Zig.Tasks;
public enum ZigSymbolVisibility
{
Default,
Hidden,
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigToolTask.cs | // SPDX-License-Identifier: 0BSD
using Microsoft.Build.Utilities;
namespace Vezel.Zig.Tasks;
public abstract class ZigToolTask : ToolTask
{
protected override sealed string ToolName =>
RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "zig.exe" : "zig";
protected override sealed string GenerateFullPathToTool()
{
return ToolExe;
}
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/ZigFormat.cs | // SPDX-License-Identifier: 0BSD
using Microsoft.Build.Framework;
using Microsoft.Build.Tasks;
namespace Vezel.Zig.Tasks;
[SuppressMessage("", "CA1819")]
public sealed class ZigFormat : ZigToolTask
{
[Required]
public string FormatterMode
{
get => _formatterMode.ToString();
set => _formatterMode = (ZigFormatterMode)Enum.Parse(typeof(ZigFormatterMode), value);
}
[Required]
public ITaskItem[] Sources { get; set; } = null!;
private ZigFormatterMode _formatterMode;
protected override string GenerateCommandLineCommands()
{
var builder = new CommandLineBuilderExtension();
builder.AppendSwitch("fmt");
if (_formatterMode == ZigFormatterMode.Check)
builder.AppendSwitch("--check");
builder.AppendFileNamesIfNotNull(Sources, " ");
return builder.ToString();
}
protected override bool HandleTaskExecutionErrors()
{
if (_formatterMode == ZigFormatterMode.Execute)
return base.HandleTaskExecutionErrors();
// In check mode, zig fmt will just print a list of files to standard output and exit with a non-zero code. This
// causes ToolTask.HandleTaskExecutionErrors to log an error notifying the user of the exit code. This can be a
// bit confusing to a user who is not already familiar with zig fmt's behavior. So, we try to present a more
// actionable error message to the user.
//
// Note that zig fmt will actually log errors if the files contain syntax errors, so we should not log our
// message in that case.
if (!HasLoggedErrors)
Log.LogError("The above files have incorrect code formatting (run the 'Format' target to fix them)");
return false;
}
}
|
0 | repos/zig-sdk/src | repos/zig-sdk/src/sdk/sdk.csproj | <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<AssemblyName>Vezel.Zig.Tasks</AssemblyName>
<DevelopmentDependency>true</DevelopmentDependency>
<IsPackable>true</IsPackable>
<IsTool>true</IsTool>
<NoWarn>
$(NoWarn);
NU5129
</NoWarn>
<PackageDescription>$(Description)
This package provides the MSBuild SDK and associated tasks.</PackageDescription>
<PackageId>Vezel.Zig.Sdk</PackageId>
<PackageType>MSBuildSdk</PackageType>
<RootNamespace>Vezel.Zig.Tasks</RootNamespace>
<SuppressDependenciesWhenPacking>true</SuppressDependenciesWhenPacking>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<None Include="build/**; content/**; Sdk/**"
Pack="true"
PackagePath="%(Identity)" />
</ItemGroup>
<ItemGroup>
<Watch Include="build/**; Sdk/**" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Build.Tasks.Core" />
</ItemGroup>
<!--
This target writes a global.json into src/samples which ensures that all the
sample projects will use the SDK package version we just built.
-->
<Target Name="_WriteGlobalJson"
DependsOnTargets="GetBuildVersion"
AfterTargets="Pack">
<PropertyGroup>
<_GlobalJson>
<![CDATA[
{
"$schema": "https://json.schemastore.org/global.json",
"msbuild-sdks": {
"Microsoft.Build.Traversal": "4.1.0",
"Vezel.Zig.Sdk": "$(Version)"
}
}
]]>
</_GlobalJson>
</PropertyGroup>
<WriteLinesToFile File="../samples/global.json"
Lines="$(_GlobalJson)"
Overwrite="true"
WriteOnlyWhenDifferent="true" />
</Target>
</Project>
|
0 | repos/zig-sdk/src/sdk | repos/zig-sdk/src/sdk/build/Vezel.Zig.Sdk.Items.props | <!-- SPDX-License-Identifier: 0BSD -->
<Project>
<ItemGroup>
<CHeader Include="**/*.h"
Exclude="$(DefaultItemExcludes); $(DefaultExcludesInProjectFolder)"
Watch="true" />
<CSource Include="**/*.c"
Exclude="$(DefaultItemExcludes); $(DefaultExcludesInProjectFolder)"
Watch="true" />
<CxxHeader Include="**/*.hxx"
Exclude="$(DefaultItemExcludes); $(DefaultExcludesInProjectFolder)"
Watch="true" />
<CxxSource Include="**/*.cxx"
Exclude="$(DefaultItemExcludes); $(DefaultExcludesInProjectFolder)"
Watch="true" />
<ZigSource Include="**/*.zig"
Exclude="$(DefaultItemExcludes); $(DefaultExcludesInProjectFolder)"
Watch="true" />
</ItemGroup>
</Project>
|
0 | repos/zig-sdk/src/sdk | repos/zig-sdk/src/sdk/build/Vezel.Zig.Sdk.Core.props | <!-- SPDX-License-Identifier: 0BSD -->
<Project>
<!--
We set this so that third-party build logic can determine whether it is
running in the context of a Vezel.Zig.Sdk-based project.
-->
<PropertyGroup>
<UsingVezelZigSdk>true</UsingVezelZigSdk>
</PropertyGroup>
</Project>
|
0 | repos/zig-sdk/src/sdk | repos/zig-sdk/src/sdk/build/Vezel.Zig.Sdk.Build.props | <!-- SPDX-License-Identifier: 0BSD -->
<Project>
<PropertyGroup>
<CoreCompileDependsOn>
_CheckCompileCommands
</CoreCompileDependsOn>
</PropertyGroup>
<!--
Unlike C and C++, Zig just needs a single root source file to perform the
compilation. If we add more, the compiler will error out.
By convention, we will say that libraries should name their root source file
the same as their package and executables should name it main. Users can
override this in their project file if they wish.
-->
<ItemGroup Condition="'$(DefaultSources)' == 'true'">
<Compile Include="**/*.c"
Excludes="$(DefaultItemExcludes); $(DefaultExcludesInProjectFolder)"
Condition="'$(Language)' == 'C'" />
<Compile Include="**/*.cxx"
Excludes="$(DefaultItemExcludes); $(DefaultExcludesInProjectFolder)"
Condition="'$(Language)' == 'Cxx'" />
<Compile Include="$(AssemblyName).zig"
Condition="'$(Language)' == 'Zig' and '$(OutputType)' == 'Library'" />
<Compile Include="main.zig"
Condition="'$(Language)' == 'Zig' and '$(OutputType)' != 'Library'" />
</ItemGroup>
</Project>
|
0 | repos/zig-sdk/src/sdk | repos/zig-sdk/src/sdk/Sdk/Sdk.props | <!-- SPDX-License-Identifier: 0BSD -->
<Project>
<!--
This is the file that gets imported at the beginning of a user's project
file when they use the Sdk attribute. Note that if the user has a
Directory.Build.props and/or Directory.Solution.props file, those come
first.
-->
<Import Project="Sdk.props"
Sdk="Microsoft.NET.Sdk" />
<Import Project="../build/Vezel.Zig.Sdk.Core.props" />
<Import Project="../build/Vezel.Zig.Sdk.Items.props" />
<Import Project="../build/Vezel.Zig.Sdk.Build.props" />
</Project>
|
0 | repos/zig-sdk | repos/zig-sdk/.vscode/extensions.json | {
"recommendations": [
"cake-build.cake-vscode",
"chdsbd.github-code-owners",
"codezombiech.gitignore",
"davidanson.vscode-markdownlint",
"editorconfig.editorconfig",
"github.vscode-github-actions",
"github.vscode-pull-request-github",
"jock.svg",
"llvm-vs-code-extensions.vscode-clangd",
"ms-dotnettools.csdevkit",
"ms-dotnettools.csharp",
"redhat.vscode-xml",
"redhat.vscode-yaml",
"tintoy.msbuild-project-tools",
"yy0931.gitconfig-lsp",
"ziglang.vscode-zig"
]
}
|
0 | repos/zig-sdk | repos/zig-sdk/.vscode/settings.json | {
"files.exclude": {
"out": true,
"src/samples/**/bin": true,
"src/samples/**/obj": true,
"src/samples/global.json": true,
"**/.idea": true,
"**/.vs": true,
"**/node_modules": true
}
}
|
0 | repos/zig-sdk | repos/zig-sdk/doc/package.json | {
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"engines": {
"node": ">=20.0.0"
},
"devDependencies": {
"markdownlint-cli2": "0.13.0"
},
"scripts": {
"build": "npx markdownlint-cli2"
}
}
|
0 | repos/zig-sdk | repos/zig-sdk/doc/usage.md | # Usage
To use the Zig SDK, first make sure that you have the
[.NET 6 SDK](https://dotnet.microsoft.com/download/dotnet/6.0) (or later)
installed.
Next, create a
[`global.json`](https://docs.microsoft.com/en-us/dotnet/core/tools/global-json)
file in the root of your repository and add an entry for the
[Vezel.Zig.Sdk](https://www.nuget.org/packages/Vezel.Zig.Sdk) package under the
`msbuild-sdks` property:
```json
{
"msbuild-sdks": {
"Vezel.Zig.Sdk": "x.y.z"
}
}
```
(Replace `x.y.z` with the actual NuGet package version.)
Next, create a project file. A library project is as simple as:
```xml
<Project Sdk="Vezel.Zig.Sdk" />
```
An executable project looks like:
```xml
<Project Sdk="Vezel.Zig.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
</PropertyGroup>
</Project>
```
The project file extension determines the language your code will be compiled
as - `.cproj` for C, `.cxxproj` for C++, and `.zigproj` for Zig.
The convention used by the Zig SDK is that C projects should use a `.c`
extension for source files and `.h` for header files, while C++ projects should
use `.cxx` and `.hxx`. Zig projects use `.zig`.
For C/C++ projects, it does not matter what you name your source and header
files. For Zig executable projects, your root source file should be named
`main.zig`. Zig library projects should use the project name for the root source
file; that is, if your project file is `mylib.zigproj`, your root source file
should be `mylib.zig`.
Once you have written some code, you can use `dotnet build`, `dotnet run`, etc.
|
0 | repos/zig-sdk | repos/zig-sdk/doc/index.md | # Home
The **Zig SDK** is an
[MSBuild SDK](https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-use-project-sdk)
that augments the .NET SDK with the ability to build Zig, C, and C++ projects.
With support for multiple programming languages, cross-compilation, NuGet
packaging, and more, the **Zig SDK** makes it trivial to author native
components as part of your .NET solution - without all the hassle that is
usually part and parcel of building and packaging native code. These features
are powered by the [Zig](https://ziglang.org) toolchain.
## Features
Here are some of the **Zig SDK** highlights:
* **Multiple programming languages:** Although Zig is a modern and pleasant
systems programming language, you might prefer to use C or C++ instead. As it
happens, the Zig compiler also embds a full C and C++ compiler - namely,
[Clang](https://clang.llvm.org). So, whichever language you prefer, the
**Zig SDK** has you covered.
* **Cross-compilation:** Thanks to the Zig compiler's excellent cross-targeting
support, cross-compilation is a first-class citizen in the **Zig SDK**. Gone
are the days of having to do overly complicated cross toolchain setup, or
resorting to building on multiple platforms for releases - just type
`dotnet build` to compile for all targets supported by your project.
* **Binary emulator support:** When cross-compiling, the **Zig SDK** will look
at the host and target platforms and try to pick an appropriate emulator. In
the majority of cases, this allows you to run and unit test the foreign
binary. [Darling](https://darlinghq.org), [QEMU](https://qemu.org),
[Wine](https://winehq.org), and
[WSL](https://docs.microsoft.com/en-us/windows/wsl) are recognized.
* **Unit testing:** The Zig language provides built-in unit testing constructs.
The **Zig SDK** allows you to run your project's unit tests with the familiar
`dotnet test` command. Test name filters are supported - e.g.
`dotnet test --filter foo`.
* **Code change monitoring:** The **Zig SDK** integrates with `dotnet watch` so
that e.g. `dotnet watch build`, `dotnet watch run`, and `dotnet watch test`
work as expected, enabling a rapid development loop.
* **Sensible NuGet packaging:** Out of the box, `dotnet pack` with the
**Zig SDK** will produce NuGet packages containing cross-built binaries for
all platforms that your project supports. Also, your public C and C++ header
files will be bundled, as will your Zig source code. This makes the resulting
NuGet package easy to consume both in .NET projects and in other projects
using the **Zig SDK**.
* **Multi-project solutions:**
[Soon™.](https://github.com/vezel-dev/zig-sdk/issues/8)
* **Editor integration:** The **Zig SDK** can generate files needed by language
servers, resulting in an IDE-like experience when editing code. For C/C++,
[clangd](https://clangd.llvm.org) is fully supported, while for Zig projects,
there is limited [ZLS](https://github.com/zigtools/zls) support.
Please note that the **Zig SDK** is *not* intended to be a full replacement for
the [Zig Build System](https://ziglang.org/learn/build-system). The goal of the
**Zig SDK** is specifically to make it simple to integrate Zig, C, and C++
components into the .NET ecosystem. For that reason, the **Zig SDK** has no
support for platforms that Zig supports but that .NET does not (yet) run on,
such as `linux-riscv64`. The level of configuration that is possible for C and
C++ is also somewhat limited compared to most build systems that support those
languages.
|
0 | repos/zig-sdk | repos/zig-sdk/doc/toc.md | # Table of Contents
* [Home](index.md)
* [Usage](usage.md)
## Configuration
* [Editor](configuration/editor.md)
* [Properties](configuration/properties.md)
* [Items](configuration/items.md)
|
0 | repos/zig-sdk | repos/zig-sdk/doc/package-lock.json | {
"name": "doc",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"devDependencies": {
"markdownlint-cli2": "0.13.0"
},
"engines": {
"node": ">=20.0.0"
}
},
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
"dev": true,
"dependencies": {
"@nodelib/fs.stat": "2.0.5",
"run-parallel": "^1.1.9"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.stat": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
"dev": true,
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.walk": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
"dev": true,
"dependencies": {
"@nodelib/fs.scandir": "2.1.5",
"fastq": "^1.6.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/@sindresorhus/merge-streams": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz",
"integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==",
"dev": true,
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"dev": true
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"dev": true,
"dependencies": {
"fill-range": "^7.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true,
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/fast-glob": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
"integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
"dev": true,
"dependencies": {
"@nodelib/fs.stat": "^2.0.2",
"@nodelib/fs.walk": "^1.2.3",
"glob-parent": "^5.1.2",
"merge2": "^1.3.0",
"micromatch": "^4.0.4"
},
"engines": {
"node": ">=8.6.0"
}
},
"node_modules/fastq": {
"version": "1.17.1",
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz",
"integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==",
"dev": true,
"dependencies": {
"reusify": "^1.0.4"
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/glob-parent": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/globby": {
"version": "14.0.1",
"resolved": "https://registry.npmjs.org/globby/-/globby-14.0.1.tgz",
"integrity": "sha512-jOMLD2Z7MAhyG8aJpNOpmziMOP4rPLcc95oQPKXBazW82z+CEgPFBQvEpRUa1KeIMUJo4Wsm+q6uzO/Q/4BksQ==",
"dev": true,
"dependencies": {
"@sindresorhus/merge-streams": "^2.1.0",
"fast-glob": "^3.3.2",
"ignore": "^5.2.4",
"path-type": "^5.0.0",
"slash": "^5.1.0",
"unicorn-magic": "^0.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ignore": {
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz",
"integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==",
"dev": true,
"engines": {
"node": ">= 4"
}
},
"node_modules/is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-glob": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true,
"dependencies": {
"is-extglob": "^2.1.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"engines": {
"node": ">=0.12.0"
}
},
"node_modules/js-yaml": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dev": true,
"dependencies": {
"argparse": "^2.0.1"
},
"bin": {
"js-yaml": "bin/js-yaml.js"
}
},
"node_modules/jsonc-parser": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.1.tgz",
"integrity": "sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==",
"dev": true
},
"node_modules/linkify-it": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz",
"integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==",
"dev": true,
"dependencies": {
"uc.micro": "^2.0.0"
}
},
"node_modules/markdown-it": {
"version": "14.1.0",
"resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz",
"integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==",
"dev": true,
"dependencies": {
"argparse": "^2.0.1",
"entities": "^4.4.0",
"linkify-it": "^5.0.0",
"mdurl": "^2.0.0",
"punycode.js": "^2.3.1",
"uc.micro": "^2.1.0"
},
"bin": {
"markdown-it": "bin/markdown-it.mjs"
}
},
"node_modules/markdownlint": {
"version": "0.34.0",
"resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.34.0.tgz",
"integrity": "sha512-qwGyuyKwjkEMOJ10XN6OTKNOVYvOIi35RNvDLNxTof5s8UmyGHlCdpngRHoRGNvQVGuxO3BJ7uNSgdeX166WXw==",
"dev": true,
"dependencies": {
"markdown-it": "14.1.0",
"markdownlint-micromark": "0.1.9"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/DavidAnson"
}
},
"node_modules/markdownlint-cli2": {
"version": "0.13.0",
"resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.13.0.tgz",
"integrity": "sha512-Pg4nF7HlopU97ZXtrcVISWp3bdsuc5M0zXyLp2/sJv2zEMlInrau0ZKK482fQURzVezJzWBpNmu4u6vGAhij+g==",
"dev": true,
"dependencies": {
"globby": "14.0.1",
"js-yaml": "4.1.0",
"jsonc-parser": "3.2.1",
"markdownlint": "0.34.0",
"markdownlint-cli2-formatter-default": "0.0.4",
"micromatch": "4.0.5"
},
"bin": {
"markdownlint-cli2": "markdownlint-cli2.js"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/DavidAnson"
}
},
"node_modules/markdownlint-cli2-formatter-default": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/markdownlint-cli2-formatter-default/-/markdownlint-cli2-formatter-default-0.0.4.tgz",
"integrity": "sha512-xm2rM0E+sWgjpPn1EesPXx5hIyrN2ddUnUwnbCsD/ONxYtw3PX6LydvdH6dciWAoFDpwzbHM1TO7uHfcMd6IYg==",
"dev": true,
"peerDependencies": {
"markdownlint-cli2": ">=0.0.4"
}
},
"node_modules/markdownlint-micromark": {
"version": "0.1.9",
"resolved": "https://registry.npmjs.org/markdownlint-micromark/-/markdownlint-micromark-0.1.9.tgz",
"integrity": "sha512-5hVs/DzAFa8XqYosbEAEg6ok6MF2smDj89ztn9pKkCtdKHVdPQuGMH7frFfYL9mLkvfFe4pTyAMffLbjf3/EyA==",
"dev": true,
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/DavidAnson"
}
},
"node_modules/mdurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
"integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==",
"dev": true
},
"node_modules/merge2": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
"dev": true,
"engines": {
"node": ">= 8"
}
},
"node_modules/micromatch": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
"dev": true,
"dependencies": {
"braces": "^3.0.2",
"picomatch": "^2.3.1"
},
"engines": {
"node": ">=8.6"
}
},
"node_modules/path-type": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz",
"integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==",
"dev": true,
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"engines": {
"node": ">=8.6"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/punycode.js": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz",
"integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==",
"dev": true,
"engines": {
"node": ">=6"
}
},
"node_modules/queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/reusify": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
"integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
"dev": true,
"engines": {
"iojs": ">=1.0.0",
"node": ">=0.10.0"
}
},
"node_modules/run-parallel": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"queue-microtask": "^1.2.2"
}
},
"node_modules/slash": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
"integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==",
"dev": true,
"engines": {
"node": ">=14.16"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"dependencies": {
"is-number": "^7.0.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/uc.micro": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
"integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==",
"dev": true
},
"node_modules/unicorn-magic": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz",
"integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==",
"dev": true,
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
}
}
}
|
0 | repos/zig-sdk | repos/zig-sdk/doc/.markdownlint.json | {
"$schema": "https://raw.githubusercontent.com/DavidAnson/markdownlint/main/schema/markdownlint-config-schema.json",
"extends": "../.markdownlint.json",
"MD033": {
"allowed_elements": []
}
}
|
0 | repos/zig-sdk/doc | repos/zig-sdk/doc/configuration/editor.md | # Editor
The Zig SDK is capable of integrating with
[ZLS](https://github.com/zigtools/zls) and [clangd](https://clangd.llvm.org).
This means that any editor with support for those language servers (e.g.
[Visual Studio Code](https://code.visualstudio.com)) can be used to edit a
project using the Zig SDK.
## ZLS
Start by
[installing ZLS](https://github.com/zigtools/zls/blob/master/README.md#installation).
The
[Visual Studio Code extension](https://marketplace.visualstudio.com/items?itemName=AugusteRame.zls-vscode)
is highly recommended.
At the moment, no further configuration is necessary.
## clangd
Start by
[installing clangd](https://clangd.llvm.org/installation). The
[Visual Studio Code extension](https://marketplace.visualstudio.com/items?itemName=llvm-vs-code-extensions.vscode-clangd)
is highly recommended.
You have to tell clangd where to find the `compile_commands.json` compilation
database. The Zig SDK creates these files in `IntermediateOutputPath`, i.e.
`obj/Debug/linux-x64` if you build with `Configuration=Debug` and
`RuntimeIdentifier=linux-x64`. Due to the nature of C/C++ compilation, these
compilation databases have to be dependent on build flags. So, you will have to
pick one of them to use for your editing experience. The good news is that you
can change which compilation database you use at any point if you need to.
To tell clangd where to find the compilation database, create a file called
`.clangd` in your project directory with the following contents:
```yaml
CompileFlags:
CompilationDatabase: obj/Debug/linux-x64
```
(You may want to add this file to `.gitignore`.)
You can now restart the clangd language server. You should start to see rich
editor features like code completion, hover widgets, navigation, etc.
|
0 | repos/zig-sdk/doc | repos/zig-sdk/doc/configuration/items.md | # MSBuild Items
The following
[MSBuild items](https://docs.microsoft.com/en-us/visualstudio/msbuild/msbuild-items)
are used by the Zig SDK:
* `Compile`: Source code files passed to the Zig compiler. By default, the Zig
SDK will populate this item type according to the project type.
* `PreludeHeader`: C/C++ header files that will be automatically `#include`d in
every C/C++ source file by way of Clang's `-include` flag.
* `IncludeDirectory`: Header include directories passed to the compiler with the
`-I` flag. Note that this applies to Zig as well, not just C/C++.
* `LibraryIncludeDirectory`: Header include directories passed to the compiler
with the `-isystem` flag. Note that this applies to Zig as well, not just
C/C++.
* `LinkerDirectory`: Library search directories passed to the linker with the
`-L` flag.
* `LinkerReference`: Names of native libraries that should be linked using the
`-l` flag. These can be either static or dynamic.
* `LibraryReference`: Direct paths to native library files that should be
linked, ignoring library search directories. These can be either static or
dynamic.
* `CHeader`: Prepopulated by the Zig SDK with all files in the project directory
ending in `.h`.
* `CSource`: Prepopulated by the Zig SDK with all files in the project directory
ending in `.c`.
* `CxxHeader`: Prepopulated by the Zig SDK with all files in the project
directory ending in `.hxx`.
* `CxxHeader`: Prepopulated by the Zig SDK with all files in the project
directory ending in `.cxx`.
* `ZigSource`: Prepopulated by the Zig SDK with all files in the project
directory ending in `.zig`.
* `Watch`: Files that are monitored by `dotnet watch` for code changes. The Zig
SDK will automatically populate this with all C, C++, and Zig source and
header files in the project directory.
|
0 | repos/zig-sdk/doc | repos/zig-sdk/doc/configuration/properties.md | # Properties
The
[MSBuild properties](https://docs.microsoft.com/en-us/visualstudio/msbuild/msbuild-properties)
described on this page are used by the Zig SDK. These properties can all be set
in `PropertyGroup`s in your project file. Most of them should have sensible
defaults for new projects; a few (such as `TreatWarningsAsErrors` and
`SymbolVisibility`) have defaults that are not quite as sensible for unfortunate
historical reasons.
## Project Setup
* `AssemblyName`: Name of the project. By default, this is set to the file name
of the project file. Used to compute the final binary name (e.g. `foo` becomes
`libfoo.so`).
* `OutputType` (`Exe`, `WinExe`, `Library`): Output binary type. When targeting
Windows and building executables, `Exe` and `WinExe` will target the CUI and
GUI subsystems, respectively. Defaults to `Library`.
* `IsTestable` (`true`, `false`): Enable/disable `dotnet test` for Zig projects.
Defaults to `true`.
* `IsPackable` (`true`, `false`): Enable/disable `dotnet pack`. Defaults to
`true`.
* `IsPublishable` (`true`, `false`): Enable/disable `dotnet publish`. Defaults
to `true`.
* `DefaultSources` (`true`, `false`): Enable/disable default `Compile` item
includes. Defaults to `true`.
* `Deterministic` (`true`, `false`): Enable/disable deterministic builds. Among
other things, this will try to prevent the compiler from using absolute paths
and will prevent usage of certain problematic language features like
`__TIME__`. Defaults to `true`.
* `EditorSupport` (`true`, `false`): Enable/disable editor support. For C/C++
projects, this means generating a `compile_commands.json` compilation database
in `IntermediateOutputPath`. Defaults to `true`.
* `FormatOnBuild` (`true`, `false`): Enable/disable formatting source code into
canonical style on build in Zig projects. Defaults to `false`.
## Package Information
* `Product`: Human-friendly product name for the package. Defaults to the value
of `AssemblyName`.
* `Authors`: A list of package authors. Defaults to the value of `AssemblyName`.
* `Description`: Brief description of the package. Defaults to
`Package Description`.
* `Version`: Package version in [Semantic Versioning 2.0.0](https://semver.org)
form. Defaults to `1.0.0`.
* `Copyright`: Copyright notice for the package. Unset by default.
* `PackageLicenseExpression`: [SPDX](https://spdx.org/licenses) license
identifier for the package. Unset by default.
* `PackageProjectUrl`: Website URL associated with the package. Unset by
default.
* `RepositoryUrl`: Source code repository URL for the package. Unset by default.
## Preprocessor
* `PublicHeadersPath`: Can be set to a directory containing public C/C++
headers. These headers will be included in the NuGet package and will flow to
dependent projects. This directory will also be treated as an
`IncludeDirectory`. Unset by default.
* `DefineConstants`: A comma-separated list of preprocessor macros to define.
Each entry can be a simple name or an assignment of the form `NAME=VALUE`.
These macros are passed to the compiler with the `-D` flag. Note that this
applies to Zig as well, not just C/C++.
* `CompilerDefines` (`true`, `false`): Enable/disable adding some implicit
`DefineConstants` macros that describe the Zig compiler version. Defaults to
`true`.
* `PlatformDefines` (`true`, `false`): Enable/disable adding some implicit
`DefineConstants` macros that describe the target platform characteristics.
Defaults to `true`.
* `ConfigurationDefines` (`true`, `false`): Enable/disable adding some implicit
`DefineConstants` macros that describe the build configuration (`Debug`,
`ReleaseFast`, etc). Defaults to `true`.
* `PackageDefines` (`true`, `false`): Enable/disable adding some implicit
`DefineConstants` macros that describe the project being built. Defaults to
`true`.
## Language Features
* `ZigVersion` (`major.minor.patch`): The version of the Zig compiler toolset to
use. Defaults to the latest version known to the Zig SDK package that is in
use.
* `LanguageStandard`: The language standard used for C/C++ projects. Passed to
Clang's `-std` flag. Defaults to the latest standards known to the compiler
version that `ZigVersion` defaults to.
* `AccessControl` (`true`, `false`): Enable/disable access control in C++
projects. Defaults to `true`.
* `BlockExtensions` (`true`, `false`): Enable/disable Clang's block language
extensions. Defaults to `false`.
* `CxxExceptions` (`true`, `false`): Enable/disable C++ exceptions. In C
projects, this controls whether the C code will be unwindable by C++
exceptions. Defaults to `true`.
* `AsyncExceptions` (`true`, `false`): Enable/disable the ability to catch
[SEH](https://learn.microsoft.com/en-us/cpp/cpp/structured-exception-handling-c-cpp)
exceptions with standard `try`/`catch` statements. This only applies when
targeting Windows. Defaults to `false`.
* `CxxReflection` (`true`, `false`): Enable/disable generating C++ run-time type
information. This feature is required for some uses of `dynamic_cast`.
Defaults to `true`.
* `MicrosoftExtensions` (`true`, `false`): Enable/disable a variety of
Microsoft C/C++ extensions. Defaults to `false`, but note that the compiler
itself always enables some parts of this when targeting Windows as Win32
headers require it.
* `UnicodeEnvironment` (`true`, `false`): Enable/disable compiling for a Unicode
environment when targeting Windows in C/C++ projects. This causes the
`UNICODE` and `_UNICODE` macros to be defined, and makes it so that
[`wmain`](https://learn.microsoft.com/en-us/cpp/c-language/using-wmain) and
[`wWinMain`](https://learn.microsoft.com/en-us/windows/win32/learnwin32/winmain--the-application-entry-point)
entry point functions must be used when building executables. Defaults to
`false`.
## Static Analysis
* `EnforceCodeStyleInBuild` (`true`, `false`): Enable/disable checking that
source code is in the canonical style during build in Zig projects. Defaults
to `false`.
* `BufferAnalysis` (`true`, `false`): Enable/disable static analysis with
unsafe buffer annotations in C/C++ projects. Defaults to `false`.
* `ConsumptionAnalysis` (`true`, `false`): Enable/disable static analysis with
[consumption and type state annotations](https://clang.llvm.org/docs/AttributeReference.html#consumed-annotation-checking)
in C/C++ projects. Defaults to `true`.
* `DocumentationAnalysis` (`true`, `false`): Enable/disable
[Doxygen](https://doxygen.nl) documentation comment checking in C/C++
projects. Defaults to `false`.
* `NullabilityAnalysis` (`true`, `false`): Enable/disable static analysis with
[nullability annotations](https://clang.llvm.org/docs/analyzer/developer-docs/nullability.html)
in C/C++ projects. Defaults to `true`.
* `TagAnalysis` (`true`, `false`): Enable/disable static analysis with
[type tag annotations](https://clang.llvm.org/docs/AttributeReference.html#type-safety-checking)
in C/C++ projects. Defaults to `true`.
* `ThreadingAnalysis` (`true`, `false`): Enable/disable static analysis with
[thread safety annotations](https://clang.llvm.org/docs/ThreadSafetyAnalysis.html)
in C/C++ projects. Defaults to `true`.
* `TrustAnalysis` (`true`, `false`): Enable/disable static analysis with
[trusted computing base annotations](https://clang.llvm.org/docs/AttributeReference.html#enforce-tcb)
in C/C++ projects. Defaults to `true`.
* `WarningLevel` (`0`-`4`): How aggressively the compiler should analyze C/C++
projects for potentially problematic code. `0` disables warnings completely;
`4` enables all warnings, including a few controversial ones. Defaults to `3`.
* `DisableWarnings`: A comma-separated list of
[warning names](https://clang.llvm.org/docs/DiagnosticsReference.html) (e.g.
`cast-align`) to disable in C/C++ projects. Unset by default.
* `TreatWarningsAsErrors` (`true`, `false`): Enable/disable reporting warnings
as errors in C/C++ projects. Defaults to `false`.
## Code Generation
* `Configuration` (`Debug`, `Release`): Specifies the overarching configuration.
When `Release` is specified, `ReleaseMode` comes into effect. Defaults to
`Debug`. Usually specified by the user as e.g. `dotnet build -c Release`.
* `DebugSymbols` (`true`, `false`): Enable/disable emitting debug symbols.
Defaults to `true` if `Configuration` is `Debug`; otherwise, `false`.
* `ReleaseMode` (`Fast`, `Safe`, `Small`): The
[build mode](https://ziglang.org/documentation/master/#Build-Mode) to use when
`Configuration` is set to `Release`. Defaults to `Fast`.
* `FastMath` (`true`, `false`): Enable/disable certain lossy floating point
optimizations that may not be standards-compliant. Defaults to `false`.
* `LinkTimeOptimization` (`true`, `false`): Enable/disable link-time
optimization. Defaults to `true`.
* `SymbolExports` (`Used`, `All`): Specifies whether to export all public
symbols or only those that are needed to link successfully. This only applies
when building executables. Defaults to `Used`.
* `SymbolVisibility` (`Default`, `Hidden`): Specifies the symbol visibility in
C/C++ projects when `__attribute__((visibility(...)))` is not specified.
`Default` (the default 😉) means public, while `Hidden` means private.
* `AllowUndefinedSymbols` (`true`, `false`): Enable/disable permitting undefined
symbols when linking. This usually only applies when building libraries.
Defaults to `false`.
* `EagerBinding` (`true`, `false`): Enable/disable eager binding of symbols when
performing dynamic linking at run time. Eager binding has security benefits,
especially in combination with `RelocationHardening`. It is also more reliable
if calling external functions from signal handlers. Defaults to `true`.
* `RelocationHardening` (`true`, `false`): Enable/disable marking relocations as
read-only. This has security benefits, especially in combination with
`EagerBinding`. Defaults to `true`.
* `ImageBase`: The location in memory that the binary should be loaded at. Only
takes effect at run time if `DynamicImageBase` is `false`. Unset by default.
* `DynamicImageBase` (`true`, `false`): Enable/disable
[ASLR](https://en.wikipedia.org/wiki/Address_space_layout_randomization), i.e.
randomization of the image base at run time. This only applies when targeting
Windows. Defaults to `true`.
* `StackSize`: Sets the stack size for the main thread. This only applies when
building executables. Unset by default.
* `Sanitizers`: A semicolon-separated list of
[sanitizers](https://github.com/google/sanitizers) to instrument code with.
Currently, only `thread` is supported. Unset by default.
## Cross-Compilation
* `RuntimeIdentifier`: Specifies the runtime identifier (i.e. platform) to
target. When unset, `Build` and `Clean` will run for all runtime identifiers
specified in `RuntimeIdentifiers`. Usually specified by the user as e.g.
`dotnet build -r linux-x64`. Unset by default.
* `RuntimeIdentifiers`: A semicolon-separated list of runtime identifiers that
the project supports. All targets in this list will be cross-compiled as
necessary. Defaults to all targets that the Zig compiler has known-good
support for.
* `UseMicrosoftAbi` (`true`, `false`): Enable/disable using the Microsoft ABI
when targeting Windows. This may be necessary when linking to static libraries
containing C++ code that was compiled for the Microsoft ABI. Note that it is
currently not possible to cross-compile from non-Windows platforms when using
the Microsoft ABI. Unset by default.
* `UseEmulator` (`true`, `false`): Enable/disable usage of an appropriate binary
emulator when cross-compiling. Defaults to `true`.
|
0 | repos | repos/hello-algo-zig/README.md | # Hello-Algo-Zig
- [**Zig**](https://ziglang.org/) programming language codes for the famous public project [krahets/hello-algo](https://github.com/krahets/hello-algo) <img src="https://img.shields.io/github/stars/krahets/hello-algo?style=social"/> about data structures and algorithms.
- Go read this great open access e-book now -> [ hello-algo.com |《Hello, Algorithm》|《 Hello,算法 》](https://www.hello-algo.com/).
|
0 | repos | repos/hello-algo-zig/build.zig | // File: build.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
// Zig Version: 0.11.0-dev.3944+2e424e019
// Zig Build Command: zig build -Doptimize=ReleaseFast
// Zig Run Command: zig build run_* -Doptimize=ReleaseFast
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const group_name_path = .{
// Source File: "chapter_computational_complexity/time_complexity.zig"
// Run Command: zig build run_time_complexity -Doptimize=ReleaseFast
.{ .name = "time_complexity", .path = "chapter_computational_complexity/time_complexity.zig" },
// Source File: "chapter_computational_complexity/worst_best_time_complexity.zig"
// Run Command: zig build run_worst_best_time_complexity -Doptimize=ReleaseFast
.{ .name = "worst_best_time_complexity", .path = "chapter_computational_complexity/worst_best_time_complexity.zig" },
// Source File: "chapter_computational_complexity/space_complexity.zig"
// Run Command: zig build run_space_complexity -Doptimize=ReleaseFast
.{ .name = "space_complexity", .path = "chapter_computational_complexity/space_complexity.zig" },
// Source File: "chapter_array_and_linkedlist/array.zig"
// Run Command: zig build run_array -Doptimize=ReleaseFast
.{ .name = "array", .path = "chapter_array_and_linkedlist/array.zig" },
// Source File: "chapter_array_and_linkedlist/linked_list.zig"
// Run Command: zig build run_linked_list -Doptimize=ReleaseFast
.{ .name = "linked_list", .path = "chapter_array_and_linkedlist/linked_list.zig" },
// Source File: "chapter_array_and_linkedlist/list.zig"
// Run Command: zig build run_list -Doptimize=ReleaseFast
.{ .name = "list", .path = "chapter_array_and_linkedlist/list.zig" },
// Source File: "chapter_array_and_linkedlist/my_list.zig"
// Run Command: zig build run_my_list -Doptimize=ReleaseFast
.{ .name = "my_list", .path = "chapter_array_and_linkedlist/my_list.zig" },
// Source File: "chapter_stack_and_queue/stack.zig"
// Run Command: zig build run_stack -Doptimize=ReleaseFast
.{ .name = "stack", .path = "chapter_stack_and_queue/stack.zig" },
// Source File: "chapter_stack_and_queue/linkedlist_stack.zig"
// Run Command: zig build run_linkedlist_stack -Doptimize=ReleaseFast
.{ .name = "linkedlist_stack", .path = "chapter_stack_and_queue/linkedlist_stack.zig" },
// Source File: "chapter_stack_and_queue/array_stack.zig"
// Run Command: zig build run_array_stack -Doptimize=ReleaseFast
.{ .name = "array_stack", .path = "chapter_stack_and_queue/array_stack.zig" },
// Source File: "chapter_stack_and_queue/queue.zig"
// Run Command: zig build run_queue -Doptimize=ReleaseFast
.{ .name = "queue", .path = "chapter_stack_and_queue/queue.zig" },
// Source File: "chapter_stack_and_queue/array_queue.zig"
// Run Command: zig build run_array_queue -Doptimize=ReleaseFast
.{ .name = "array_queue", .path = "chapter_stack_and_queue/array_queue.zig" },
// Source File: "chapter_stack_and_queue/linkedlist_queue.zig"
// Run Command: zig build run_linkedlist_queue -Doptimize=ReleaseFast
.{ .name = "linkedlist_queue", .path = "chapter_stack_and_queue/linkedlist_queue.zig" },
// Source File: "chapter_stack_and_queue/deque.zig"
// Run Command: zig build run_deque -Doptimize=ReleaseFast
.{ .name = "deque", .path = "chapter_stack_and_queue/deque.zig" },
// Source File: "chapter_stack_and_queue/linkedlist_deque.zig"
// Run Command: zig build run_linkedlist_deque -Doptimize=ReleaseFast
.{ .name = "linkedlist_deque", .path = "chapter_stack_and_queue/linkedlist_deque.zig" },
// Source File: "chapter_hashing/hash_map.zig"
// Run Command: zig build run_hash_map -Doptimize=ReleaseFast
.{ .name = "hash_map", .path = "chapter_hashing/hash_map.zig" },
// Source File: "chapter_hashing/array_hash_map.zig"
// Run Command: zig build run_array_hash_map -Doptimize=ReleaseFast
.{ .name = "array_hash_map", .path = "chapter_hashing/array_hash_map.zig" },
// Source File: "chapter_tree/binary_tree.zig"
// Run Command: zig build run_binary_tree -Doptimize=ReleaseFast
.{ .name = "binary_tree", .path = "chapter_tree/binary_tree.zig" },
// Source File: "chapter_tree/binary_tree_bfs.zig"
// Run Command: zig build run_binary_tree_bfs -Doptimize=ReleaseFast
.{ .name = "binary_tree_bfs", .path = "chapter_tree/binary_tree_bfs.zig" },
// Source File: "chapter_tree/binary_tree_dfs.zig"
// Run Command: zig build run_binary_tree_dfs -Doptimize=ReleaseFast
.{ .name = "binary_tree_dfs", .path = "chapter_tree/binary_tree_dfs.zig" },
// Source File: "chapter_tree/binary_search_tree.zig"
// Run Command: zig build run_binary_search_tree -Doptimize=ReleaseFast
.{ .name = "binary_search_tree", .path = "chapter_tree/binary_search_tree.zig" },
// Source File: "chapter_tree/avl_tree.zig"
// Run Command: zig build run_avl_tree -Doptimize=ReleaseFast
.{ .name = "avl_tree", .path = "chapter_tree/avl_tree.zig" },
// Source File: "chapter_heap/heap.zig"
// Run Command: zig build run_heap -Doptimize=ReleaseFast
.{ .name = "heap", .path = "chapter_heap/heap.zig" },
// Source File: "chapter_heap/my_heap.zig"
// Run Command: zig build run_my_heap -Doptimize=ReleaseFast
.{ .name = "my_heap", .path = "chapter_heap/my_heap.zig" },
// Source File: "chapter_searching/linear_search.zig"
// Run Command: zig build run_linear_search -Doptimize=ReleaseFast
.{ .name = "linear_search", .path = "chapter_searching/linear_search.zig" },
// Source File: "chapter_searching/binary_search.zig"
// Run Command: zig build run_binary_search -Doptimize=ReleaseFast
.{ .name = "binary_search", .path = "chapter_searching/binary_search.zig" },
// Source File: "chapter_searching/hashing_search.zig"
// Run Command: zig build run_hashing_search -Doptimize=ReleaseFast
.{ .name = "hashing_search", .path = "chapter_searching/hashing_search.zig" },
// Source File: "chapter_searching/two_sum.zig"
// Run Command: zig build run_two_sum -Doptimize=ReleaseFast
.{ .name = "two_sum", .path = "chapter_searching/two_sum.zig" },
// Source File: "chapter_sorting/bubble_sort.zig"
// Run Command: zig build run_bubble_sort -Doptimize=ReleaseFast
.{ .name = "bubble_sort", .path = "chapter_sorting/bubble_sort.zig" },
// Source File: "chapter_sorting/insertion_sort.zig"
// Run Command: zig build run_insertion_sort -Doptimize=ReleaseFast
.{ .name = "insertion_sort", .path = "chapter_sorting/insertion_sort.zig" },
// Source File: "chapter_sorting/quick_sort.zig"
// Run Command: zig build run_quick_sort -Doptimize=ReleaseFast
.{ .name = "quick_sort", .path = "chapter_sorting/quick_sort.zig" },
// Source File: "chapter_sorting/merge_sort.zig"
// Run Command: zig build run_merge_sort -Doptimize=ReleaseFast
.{ .name = "merge_sort", .path = "chapter_sorting/merge_sort.zig" },
// Source File: "chapter_sorting/radix_sort.zig"
// Run Command: zig build run_radix_sort -Doptimize=ReleaseFast
.{ .name = "radix_sort", .path = "chapter_sorting/radix_sort.zig" },
// Source File: "chapter_dynamic_programming/climbing_stairs_backtrack.zig"
// Run Command: zig build run_climbing_stairs_backtrack -Doptimize=ReleaseFast
.{ .name = "climbing_stairs_backtrack", .path = "chapter_dynamic_programming/climbing_stairs_backtrack.zig" },
// Source File: "chapter_dynamic_programming/climbing_stairs_constraint_dp.zig"
// Run Command: zig build run_climbing_stairs_constraint_dp -Doptimize=ReleaseFast
.{ .name = "climbing_stairs_constraint_dp", .path = "chapter_dynamic_programming/climbing_stairs_constraint_dp.zig" },
// Source File: "chapter_dynamic_programming/climbing_stairs_dfs_mem.zig"
// Run Command: zig build run_climbing_stairs_dfs_mem -Doptimize=ReleaseFast
.{ .name = "climbing_stairs_dfs_mem", .path = "chapter_dynamic_programming/climbing_stairs_dfs_mem.zig" },
// Source File: "chapter_dynamic_programming/climbing_stairs_dfs.zig"
// Run Command: zig build run_climbing_stairs_dfs -Doptimize=ReleaseFast
.{ .name = "climbing_stairs_dfs", .path = "chapter_dynamic_programming/climbing_stairs_dfs.zig" },
// Source File: "chapter_dynamic_programming/climbing_stairs_dp.zig"
// Run Command: zig build run_climbing_stairs_dp -Doptimize=ReleaseFast
.{ .name = "climbing_stairs_dp", .path = "chapter_dynamic_programming/climbing_stairs_dp.zig" },
// Source File: "chapter_dynamic_programming/coin_change_ii.zig"
// Run Command: zig build run_coin_change_ii -Doptimize=ReleaseFast
.{ .name = "coin_change_ii", .path = "chapter_dynamic_programming/coin_change_ii.zig" },
// Source File: "chapter_dynamic_programming/coin_change.zig"
// Run Command: zig build run_coin_change -Doptimize=ReleaseFast
.{ .name = "coin_change", .path = "chapter_dynamic_programming/coin_change.zig" },
// Source File: "chapter_dynamic_programming/edit_distance.zig"
// Run Command: zig build run_edit_distance -Doptimize=ReleaseFast
.{ .name = "edit_distance", .path = "chapter_dynamic_programming/edit_distance.zig" },
// Source File: "chapter_dynamic_programming/knapsack.zig"
// Run Command: zig build run_knapsack -Doptimize=ReleaseFast
.{ .name = "knapsack", .path = "chapter_dynamic_programming/knapsack.zig" },
// Source File: "chapter_dynamic_programming/min_cost_climbing_stairs_dp.zig"
// Run Command: zig build run_min_cost_climbing_stairs_dp -Doptimize=ReleaseFast
.{ .name = "min_cost_climbing_stairs_dp", .path = "chapter_dynamic_programming/min_cost_climbing_stairs_dp.zig" },
// Source File: "chapter_dynamic_programming/min_path_sum.zig"
// Run Command: zig build run_min_path_sum -Doptimize=ReleaseFast
.{ .name = "min_path_sum", .path = "chapter_dynamic_programming/min_path_sum.zig" },
// Source File: "chapter_dynamic_programming/unbounded_knapsack.zig"
// Run Command: zig build run_unbounded_knapsack -Doptimize=ReleaseFast
.{ .name = "unbounded_knapsack", .path = "chapter_dynamic_programming/unbounded_knapsack.zig" },
// // Source File: "chapter_backtracking/n_queens.zig"
// // Run Command: zig build run_n_queens -Doptimize=ReleaseFast
// .{ .name = "n_queens", .path = "chapter_backtracking/n_queens.zig" },
};
inline for (group_name_path) |name_path| {
const exe = b.addExecutable(.{
.name = name_path.name,
.root_source_file = .{ .path = name_path.path },
.target = target,
.optimize = optimize,
});
exe.addModule("include", b.addModule("", .{
.source_file = .{ .path = "include/include.zig" },
}));
b.installArtifact(exe);
const run_cmd = b.addRunArtifact(exe);
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| run_cmd.addArgs(args);
const run_step = b.step("run_" ++ name_path.name, "Run the app");
run_step.dependOn(&run_cmd.step);
}
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_heap/my_heap.zig | // File: my_heap.zig
// Created Time: 2023-01-14
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 堆类简易实现
pub fn MaxHeap(comptime T: type) type {
return struct {
const Self = @This();
max_heap: ?std.ArrayList(T) = null, // 使用列表而非数组,这样无需考虑扩容问题
// 构造函数,根据输入列表建堆
pub fn init(self: *Self, allocator: std.mem.Allocator, nums: []const T) !void {
if (self.max_heap != null) return;
self.max_heap = std.ArrayList(T).init(allocator);
// 将列表元素原封不动添加进堆
try self.max_heap.?.appendSlice(nums);
// 堆化除叶节点以外的其他所有节点
var i: usize = parent(self.size() - 1) + 1;
while (i > 0) : (i -= 1) {
try self.siftDown(i - 1);
}
}
// 析构函数,释放内存
pub fn deinit(self: *Self) void {
if (self.max_heap != null) self.max_heap.?.deinit();
}
// 获取左子节点索引
fn left(i: usize) usize {
return 2 * i + 1;
}
// 获取右子节点索引
fn right(i: usize) usize {
return 2 * i + 2;
}
// 获取父节点索引
fn parent(i: usize) usize {
// return (i - 1) / 2; // 向下整除
return @divFloor(i - 1, 2);
}
// 交换元素
fn swap(self: *Self, i: usize, j: usize) !void {
var tmp = self.max_heap.?.items[i];
try self.max_heap.?.replaceRange(i, 1, &[_]T{self.max_heap.?.items[j]});
try self.max_heap.?.replaceRange(j, 1, &[_]T{tmp});
}
// 获取堆大小
pub fn size(self: *Self) usize {
return self.max_heap.?.items.len;
}
// 判断堆是否为空
pub fn isEmpty(self: *Self) bool {
return self.size() == 0;
}
// 访问堆顶元素
pub fn peek(self: *Self) T {
return self.max_heap.?.items[0];
}
// 元素入堆
pub fn push(self: *Self, val: T) !void {
// 添加节点
try self.max_heap.?.append(val);
// 从底至顶堆化
try self.siftUp(self.size() - 1);
}
// 从节点 i 开始,从底至顶堆化
fn siftUp(self: *Self, i_: usize) !void {
var i = i_;
while (true) {
// 获取节点 i 的父节点
var p = parent(i);
// 当“越过根节点”或“节点无需修复”时,结束堆化
if (p < 0 or self.max_heap.?.items[i] <= self.max_heap.?.items[p]) break;
// 交换两节点
try self.swap(i, p);
// 循环向上堆化
i = p;
}
}
// 元素出堆
pub fn pop(self: *Self) !T {
// 判断处理
if (self.isEmpty()) unreachable;
// 交换根节点与最右叶节点(即交换首元素与尾元素)
try self.swap(0, self.size() - 1);
// 删除节点
var val = self.max_heap.?.pop();
// 从顶至底堆化
try self.siftDown(0);
// 返回堆顶元素
return val;
}
// 从节点 i 开始,从顶至底堆化
fn siftDown(self: *Self, i_: usize) !void {
var i = i_;
while (true) {
// 判断节点 i, l, r 中值最大的节点,记为 ma
var l = left(i);
var r = right(i);
var ma = i;
if (l < self.size() and self.max_heap.?.items[l] > self.max_heap.?.items[ma]) ma = l;
if (r < self.size() and self.max_heap.?.items[r] > self.max_heap.?.items[ma]) ma = r;
// 若节点 i 最大或索引 l, r 越界,则无需继续堆化,跳出
if (ma == i) break;
// 交换两节点
try self.swap(i, ma);
// 循环向下堆化
i = ma;
}
}
fn lessThan(context: void, a: T, b: T) std.math.Order {
_ = context;
return std.math.order(a, b);
}
fn greaterThan(context: void, a: T, b: T) std.math.Order {
return lessThan(context, a, b).invert();
}
// 打印堆(二叉树)
pub fn print(self: *Self, mem_allocator: std.mem.Allocator) !void {
const PQgt = std.PriorityQueue(T, void, greaterThan);
var queue = PQgt.init(std.heap.page_allocator, {});
defer queue.deinit();
try queue.addSlice(self.max_heap.?.items);
try inc.PrintUtil.printHeap(T, mem_allocator, queue);
}
};
}
// Driver Code
pub fn main() !void {
// 初始化内存分配器
var mem_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer mem_arena.deinit();
const mem_allocator = mem_arena.allocator();
// 初始化大顶堆
var max_heap = MaxHeap(i32){};
try max_heap.init(std.heap.page_allocator, &[_]i32{ 9, 8, 6, 6, 7, 5, 2, 1, 4, 3, 6, 2 });
defer max_heap.deinit();
std.debug.print("\n输入列表并建堆后\n", .{});
try max_heap.print(mem_allocator);
// 获取堆顶元素
var peek = max_heap.peek();
std.debug.print("\n堆顶元素为 {}\n", .{peek});
// 元素入堆
const val = 7;
try max_heap.push(val);
std.debug.print("\n元素 {} 入堆后\n", .{val});
try max_heap.print(mem_allocator);
// 堆顶元素出堆
peek = try max_heap.pop();
std.debug.print("\n堆顶元素 {} 出堆后\n", .{peek});
try max_heap.print(mem_allocator);
// 获取堆的大小
var size = max_heap.size();
std.debug.print("\n堆元素数量为 {}", .{size});
// 判断堆是否为空
var is_empty = max_heap.isEmpty();
std.debug.print("\n堆是否为空 {}\n", .{is_empty});
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_heap/heap.zig | // File: heap.zig
// Created Time: 2023-01-14
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
fn lessThan(context: void, a: i32, b: i32) std.math.Order {
_ = context;
return std.math.order(a, b);
}
fn greaterThan(context: void, a: i32, b: i32) std.math.Order {
return lessThan(context, a, b).invert();
}
fn testPush(comptime T: type, mem_allocator: std.mem.Allocator, heap: anytype, val: T) !void {
try heap.add(val); //元素入堆
std.debug.print("\n元素 {} 入堆后\n", .{val});
try inc.PrintUtil.printHeap(T, mem_allocator, heap);
}
fn testPop(comptime T: type, mem_allocator: std.mem.Allocator, heap: anytype) !void {
var val = heap.remove(); //堆顶元素出堆
std.debug.print("\n堆顶元素 {} 出堆后\n", .{val});
try inc.PrintUtil.printHeap(T, mem_allocator, heap);
}
// Driver Code
pub fn main() !void {
// 初始化内存分配器
var mem_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer mem_arena.deinit();
const mem_allocator = mem_arena.allocator();
// 初始化堆
// 初始化小顶堆
const PQlt = std.PriorityQueue(i32, void, lessThan);
var min_heap = PQlt.init(std.heap.page_allocator, {});
defer min_heap.deinit();
// 初始化大顶堆
const PQgt = std.PriorityQueue(i32, void, greaterThan);
var max_heap = PQgt.init(std.heap.page_allocator, {});
defer max_heap.deinit();
std.debug.print("\n以下测试样例为大顶堆", .{});
// 元素入堆
try testPush(i32, mem_allocator, &max_heap, 1);
try testPush(i32, mem_allocator, &max_heap, 3);
try testPush(i32, mem_allocator, &max_heap, 2);
try testPush(i32, mem_allocator, &max_heap, 5);
try testPush(i32, mem_allocator, &max_heap, 4);
// 获取堆顶元素
var peek = max_heap.peek().?;
std.debug.print("\n堆顶元素为 {}\n", .{peek});
// 堆顶元素出堆
try testPop(i32, mem_allocator, &max_heap);
try testPop(i32, mem_allocator, &max_heap);
try testPop(i32, mem_allocator, &max_heap);
try testPop(i32, mem_allocator, &max_heap);
try testPop(i32, mem_allocator, &max_heap);
// 获取堆的大小
var size = max_heap.len;
std.debug.print("\n堆元素数量为 {}\n", .{size});
// 判断堆是否为空
var is_empty = if (max_heap.len == 0) true else false;
std.debug.print("\n堆是否为空 {}\n", .{is_empty});
// 输入列表并建堆
try min_heap.addSlice(&[_]i32{ 1, 3, 2, 5, 4 });
std.debug.print("\n输入列表并建立小顶堆后\n", .{});
try inc.PrintUtil.printHeap(i32, mem_allocator, min_heap);
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_tree/avl_tree.zig | // File: avl_tree.zig
// Created Time: 2023-01-15
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 平衡二叉树
pub fn AVLTree(comptime T: type) type {
return struct {
const Self = @This();
root: ?*inc.TreeNode(T) = null, // 根节点
mem_arena: ?std.heap.ArenaAllocator = null,
mem_allocator: std.mem.Allocator = undefined, // 内存分配器
// 构造函数
pub fn init(self: *Self, allocator: std.mem.Allocator) void {
if (self.mem_arena == null) {
self.mem_arena = std.heap.ArenaAllocator.init(allocator);
self.mem_allocator = self.mem_arena.?.allocator();
}
}
// 析构函数
pub fn deinit(self: *Self) void {
if (self.mem_arena == null) return;
self.mem_arena.?.deinit();
}
// 获取节点高度
fn height(self: *Self, node: ?*inc.TreeNode(T)) i32 {
_ = self;
// 空节点高度为 -1 ,叶节点高度为 0
return if (node == null) -1 else node.?.height;
}
// 更新节点高度
fn updateHeight(self: *Self, node: ?*inc.TreeNode(T)) void {
// 节点高度等于最高子树高度 + 1
node.?.height = @max(self.height(node.?.left), self.height(node.?.right)) + 1;
}
// 获取平衡因子
fn balanceFactor(self: *Self, node: ?*inc.TreeNode(T)) i32 {
// 空节点平衡因子为 0
if (node == null) return 0;
// 节点平衡因子 = 左子树高度 - 右子树高度
return self.height(node.?.left) - self.height(node.?.right);
}
// 右旋操作
fn rightRotate(self: *Self, node: ?*inc.TreeNode(T)) ?*inc.TreeNode(T) {
var child = node.?.left;
var grandChild = child.?.right;
// 以 child 为原点,将 node 向右旋转
child.?.right = node;
node.?.left = grandChild;
// 更新节点高度
self.updateHeight(node);
self.updateHeight(child);
// 返回旋转后子树的根节点
return child;
}
// 左旋操作
fn leftRotate(self: *Self, node: ?*inc.TreeNode(T)) ?*inc.TreeNode(T) {
var child = node.?.right;
var grandChild = child.?.left;
// 以 child 为原点,将 node 向左旋转
child.?.left = node;
node.?.right = grandChild;
// 更新节点高度
self.updateHeight(node);
self.updateHeight(child);
// 返回旋转后子树的根节点
return child;
}
// 执行旋转操作,使该子树重新恢复平衡
fn rotate(self: *Self, node: ?*inc.TreeNode(T)) ?*inc.TreeNode(T) {
// 获取节点 node 的平衡因子
var balance_factor = self.balanceFactor(node);
// 左偏树
if (balance_factor > 1) {
if (self.balanceFactor(node.?.left) >= 0) {
// 右旋
return self.rightRotate(node);
} else {
// 先左旋后右旋
node.?.left = self.leftRotate(node.?.left);
return self.rightRotate(node);
}
}
// 右偏树
if (balance_factor < -1) {
if (self.balanceFactor(node.?.right) <= 0) {
// 左旋
return self.leftRotate(node);
} else {
// 先右旋后左旋
node.?.right = self.rightRotate(node.?.right);
return self.leftRotate(node);
}
}
// 平衡树,无需旋转,直接返回
return node;
}
// 插入节点
fn insert(self: *Self, val: T) !void {
self.root = (try self.insertHelper(self.root, val)).?;
}
// 递归插入节点(辅助函数)
fn insertHelper(self: *Self, node_: ?*inc.TreeNode(T), val: T) !?*inc.TreeNode(T) {
var node = node_;
if (node == null) {
var tmp_node = try self.mem_allocator.create(inc.TreeNode(T));
tmp_node.init(val);
return tmp_node;
}
// 1. 查找插入位置,并插入节点
if (val < node.?.val) {
node.?.left = try self.insertHelper(node.?.left, val);
} else if (val > node.?.val) {
node.?.right = try self.insertHelper(node.?.right, val);
} else {
return node; // 重复节点不插入,直接返回
}
self.updateHeight(node); // 更新节点高度
// 2. 执行旋转操作,使该子树重新恢复平衡
node = self.rotate(node);
// 返回子树的根节点
return node;
}
// 删除节点
fn remove(self: *Self, val: T) void {
self.root = self.removeHelper(self.root, val).?;
}
// 递归删除节点(辅助函数)
fn removeHelper(self: *Self, node_: ?*inc.TreeNode(T), val: T) ?*inc.TreeNode(T) {
var node = node_;
if (node == null) return null;
// 1. 查找节点,并删除之
if (val < node.?.val) {
node.?.left = self.removeHelper(node.?.left, val);
} else if (val > node.?.val) {
node.?.right = self.removeHelper(node.?.right, val);
} else {
if (node.?.left == null or node.?.right == null) {
var child = if (node.?.left != null) node.?.left else node.?.right;
// 子节点数量 = 0 ,直接删除 node 并返回
if (child == null) {
return null;
// 子节点数量 = 1 ,直接删除 node
} else {
node = child;
}
} else {
// 子节点数量 = 2 ,则将中序遍历的下个节点删除,并用该节点替换当前节点
var temp = node.?.right;
while (temp.?.left != null) {
temp = temp.?.left;
}
node.?.right = self.removeHelper(node.?.right, temp.?.val);
node.?.val = temp.?.val;
}
}
self.updateHeight(node); // 更新节点高度
// 2. 执行旋转操作,使该子树重新恢复平衡
node = self.rotate(node);
// 返回子树的根节点
return node;
}
// 查找节点
fn search(self: *Self, val: T) ?*inc.TreeNode(T) {
var cur = self.root;
// 循环查找,越过叶节点后跳出
while (cur != null) {
// 目标节点在 cur 的右子树中
if (cur.?.val < val) {
cur = cur.?.right;
// 目标节点在 cur 的左子树中
} else if (cur.?.val > val) {
cur = cur.?.left;
// 找到目标节点,跳出循环
} else {
break;
}
}
// 返回目标节点
return cur;
}
};
}
pub fn testInsert(comptime T: type, tree_: *AVLTree(T), val: T) !void {
var tree = tree_;
try tree.insert(val);
std.debug.print("\n插入节点 {} 后,AVL 树为\n", .{val});
try inc.PrintUtil.printTree(tree.root, null, false);
}
pub fn testRemove(comptime T: type, tree_: *AVLTree(T), val: T) void {
var tree = tree_;
tree.remove(val);
std.debug.print("\n删除节点 {} 后,AVL 树为\n", .{val});
try inc.PrintUtil.printTree(tree.root, null, false);
}
// Driver Code
pub fn main() !void {
// 初始化空 AVL 树
var avl_tree = AVLTree(i32){};
avl_tree.init(std.heap.page_allocator);
defer avl_tree.deinit();
// 插入节点
// 请关注插入节点后,AVL 树是如何保持平衡的
try testInsert(i32, &avl_tree, 1);
try testInsert(i32, &avl_tree, 2);
try testInsert(i32, &avl_tree, 3);
try testInsert(i32, &avl_tree, 4);
try testInsert(i32, &avl_tree, 5);
try testInsert(i32, &avl_tree, 8);
try testInsert(i32, &avl_tree, 7);
try testInsert(i32, &avl_tree, 9);
try testInsert(i32, &avl_tree, 10);
try testInsert(i32, &avl_tree, 6);
// 插入重复节点
try testInsert(i32, &avl_tree, 7);
// 删除节点
// 请关注删除节点后,AVL 树是如何保持平衡的
testRemove(i32, &avl_tree, 8); // 删除度为 0 的节点
testRemove(i32, &avl_tree, 5); // 删除度为 1 的节点
testRemove(i32, &avl_tree, 4); // 删除度为 2 的节点
// 查找节点
var node = avl_tree.search(7).?;
std.debug.print("\n查找到的节点对象为 {any},节点值 = {}\n", .{node, node.val});
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_tree/binary_tree_bfs.zig | // File: binary_tree_bfs.zig
// Created Time: 2023-01-15
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 层序遍历
fn hierOrder(comptime T: type, mem_allocator: std.mem.Allocator, root: *inc.TreeNode(T)) !std.ArrayList(T) {
// 初始化队列,加入根节点
const L = std.TailQueue(*inc.TreeNode(T));
var queue = L{};
var root_node = try mem_allocator.create(L.Node);
root_node.data = root;
queue.append(root_node);
// 初始化一个列表,用于保存遍历序列
var list = std.ArrayList(T).init(std.heap.page_allocator);
while (queue.len > 0) {
var queue_node = queue.popFirst().?; // 队列出队
var node = queue_node.data;
try list.append(node.val); // 保存节点
if (node.left != null) {
var tmp_node = try mem_allocator.create(L.Node);
tmp_node.data = node.left.?;
queue.append(tmp_node); // 左子节点入队
}
if (node.right != null) {
var tmp_node = try mem_allocator.create(L.Node);
tmp_node.data = node.right.?;
queue.append(tmp_node); // 右子节点入队
}
}
return list;
}
// Driver Code
pub fn main() !void {
// 初始化内存分配器
var mem_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer mem_arena.deinit();
const mem_allocator = mem_arena.allocator();
// 初始化二叉树
// 这里借助了一个从数组直接生成二叉树的函数
var nums = [_]i32{1, 2, 3, 4, 5, 6, 7};
var root = try inc.TreeUtil.arrToTree(i32, mem_allocator, &nums);
std.debug.print("初始化二叉树\n", .{});
try inc.PrintUtil.printTree(root, null, false);
// 层序遍历
var list = try hierOrder(i32, mem_allocator, root.?);
defer list.deinit();
std.debug.print("\n层序遍历的节点打印序列 = ", .{});
inc.PrintUtil.printList(i32, list);
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_tree/binary_tree.zig | // File: binary_tree.zig
// Created Time: 2023-01-14
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// Driver Code
pub fn main() !void {
// 初始化二叉树
// 初始化节点
var n1 = inc.TreeNode(i32){ .val = 1 };
var n2 = inc.TreeNode(i32){ .val = 2 };
var n3 = inc.TreeNode(i32){ .val = 3 };
var n4 = inc.TreeNode(i32){ .val = 4 };
var n5 = inc.TreeNode(i32){ .val = 5 };
// 构建引用指向(即指针)
n1.left = &n2;
n1.right = &n3;
n2.left = &n4;
n2.right = &n5;
std.debug.print("初始化二叉树\n", .{});
try inc.PrintUtil.printTree(&n1, null, false);
// 插入与删除节点
var p = inc.TreeNode(i32){ .val = 0 };
// 在 n1 -> n2 中间插入节点 P
n1.left = &p;
p.left = &n2;
std.debug.print("插入节点 P 后\n", .{});
try inc.PrintUtil.printTree(&n1, null, false);
// 删除节点
n1.left = &n2;
std.debug.print("删除节点 P 后\n", .{});
try inc.PrintUtil.printTree(&n1, null, false);
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_tree/binary_search_tree.zig | // File: binary_search_tree.zig
// Created Time: 2023-01-15
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 二叉搜索树
pub fn BinarySearchTree(comptime T: type) type {
return struct {
const Self = @This();
root: ?*inc.TreeNode(T) = null,
mem_arena: ?std.heap.ArenaAllocator = null,
mem_allocator: std.mem.Allocator = undefined, // 内存分配器
// 构造函数
pub fn init(self: *Self, allocator: std.mem.Allocator, nums: []T) !void {
if (self.mem_arena == null) {
self.mem_arena = std.heap.ArenaAllocator.init(allocator);
self.mem_allocator = self.mem_arena.?.allocator();
}
std.mem.sort(T, nums, {}, comptime std.sort.asc(T)); // 排序数组
self.root = try self.buildTree(nums, 0, nums.len - 1); // 构建二叉搜索树
}
// 析构函数
pub fn deinit(self: *Self) void {
if (self.mem_arena == null) return;
self.mem_arena.?.deinit();
}
// 构建二叉搜索树
fn buildTree(self: *Self, nums: []T, i: usize, j: usize) !?*inc.TreeNode(T) {
if (i > j) return null;
// 将数组中间节点作为根节点
var mid = (i + j) / 2;
var node = try self.mem_allocator.create(inc.TreeNode(T));
node.init(nums[mid]);
// 递归建立左子树和右子树
if (mid >= 1) node.left = try self.buildTree(nums, i, mid - 1);
node.right = try self.buildTree(nums, mid + 1, j);
return node;
}
// 获取二叉树根节点
fn getRoot(self: *Self) ?*inc.TreeNode(T) {
return self.root;
}
// 查找节点
fn search(self: *Self, num: T) ?*inc.TreeNode(T) {
var cur = self.root;
// 循环查找,越过叶节点后跳出
while (cur != null) {
// 目标节点在 cur 的右子树中
if (cur.?.val < num) {
cur = cur.?.right;
// 目标节点在 cur 的左子树中
} else if (cur.?.val > num) {
cur = cur.?.left;
// 找到目标节点,跳出循环
} else {
break;
}
}
// 返回目标节点
return cur;
}
// 插入节点
fn insert(self: *Self, num: T) !void {
// 若树为空,直接提前返回
if (self.root == null) return;
var cur = self.root;
var pre: ?*inc.TreeNode(T) = null;
// 循环查找,越过叶节点后跳出
while (cur != null) {
// 找到重复节点,直接返回
if (cur.?.val == num) return;
pre = cur;
// 插入位置在 cur 的右子树中
if (cur.?.val < num) {
cur = cur.?.right;
// 插入位置在 cur 的左子树中
} else {
cur = cur.?.left;
}
}
// 插入节点 val
var node = try self.mem_allocator.create(inc.TreeNode(T));
node.init(num);
if (pre.?.val < num) {
pre.?.right = node;
} else {
pre.?.left = node;
}
}
// 删除节点
fn remove(self: *Self, num: T) void {
// 若树为空,直接提前返回
if (self.root == null) return;
var cur = self.root;
var pre: ?*inc.TreeNode(T) = null;
// 循环查找,越过叶节点后跳出
while (cur != null) {
// 找到待删除节点,跳出循环
if (cur.?.val == num) break;
pre = cur;
// 待删除节点在 cur 的右子树中
if (cur.?.val < num) {
cur = cur.?.right;
// 待删除节点在 cur 的左子树中
} else {
cur = cur.?.left;
}
}
// 若无待删除节点,则直接返回
if (cur == null) return;
// 子节点数量 = 0 or 1
if (cur.?.left == null or cur.?.right == null) {
// 当子节点数量 = 0 / 1 时, child = null / 该子节点
var child = if (cur.?.left != null) cur.?.left else cur.?.right;
// 删除节点 cur
if (pre.?.left == cur) {
pre.?.left = child;
} else {
pre.?.right = child;
}
// 子节点数量 = 2
} else {
// 获取中序遍历中 cur 的下一个节点
var tmp = cur.?.right;
while (tmp.?.left != null) {
tmp = tmp.?.left;
}
var tmp_val = tmp.?.val;
// 递归删除节点 nex
self.remove(tmp.?.val);
// 将 tmp 的值复制给 cur
cur.?.val = tmp_val;
}
}
};
}
// Driver Code
pub fn main() !void {
// 初始化二叉树
var nums = [_]i32{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 };
var bst = BinarySearchTree(i32){};
try bst.init(std.heap.page_allocator, &nums);
defer bst.deinit();
std.debug.print("初始化的二叉树为\n", .{});
try inc.PrintUtil.printTree(bst.getRoot(), null, false);
// 查找节点
var node = bst.search(7);
std.debug.print("\n查找到的节点对象为 {any},节点值 = {}\n", .{node, node.?.val});
// 插入节点
try bst.insert(16);
std.debug.print("\n插入节点 16 后,二叉树为\n", .{});
try inc.PrintUtil.printTree(bst.getRoot(), null, false);
// 删除节点
bst.remove(1);
std.debug.print("\n删除节点 1 后,二叉树为\n", .{});
try inc.PrintUtil.printTree(bst.getRoot(), null, false);
bst.remove(2);
std.debug.print("\n删除节点 2 后,二叉树为\n", .{});
try inc.PrintUtil.printTree(bst.getRoot(), null, false);
bst.remove(4);
std.debug.print("\n删除节点 4 后,二叉树为\n", .{});
try inc.PrintUtil.printTree(bst.getRoot(), null, false);
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_tree/binary_tree_dfs.zig | // File: binary_tree_dfs.zig
// Created Time: 2023-01-15
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
var list = std.ArrayList(i32).init(std.heap.page_allocator);
// 前序遍历
fn preOrder(comptime T: type, root: ?*inc.TreeNode(T)) !void {
if (root == null) return;
// 访问优先级:根节点 -> 左子树 -> 右子树
try list.append(root.?.val);
try preOrder(T, root.?.left);
try preOrder(T, root.?.right);
}
// 中序遍历
fn inOrder(comptime T: type, root: ?*inc.TreeNode(T)) !void {
if (root == null) return;
// 访问优先级:左子树 -> 根节点 -> 右子树
try inOrder(T, root.?.left);
try list.append(root.?.val);
try inOrder(T, root.?.right);
}
// 后序遍历
fn postOrder(comptime T: type, root: ?*inc.TreeNode(T)) !void {
if (root == null) return;
// 访问优先级:左子树 -> 右子树 -> 根节点
try postOrder(T, root.?.left);
try postOrder(T, root.?.right);
try list.append(root.?.val);
}
// Driver Code
pub fn main() !void {
// 初始化内存分配器
var mem_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer mem_arena.deinit();
const mem_allocator = mem_arena.allocator();
// 初始化二叉树
// 这里借助了一个从数组直接生成二叉树的函数
var nums = [_]i32{1, 2, 3, 4, 5, 6, 7};
var root = try inc.TreeUtil.arrToTree(i32, mem_allocator, &nums);
std.debug.print("初始化二叉树\n", .{});
try inc.PrintUtil.printTree(root, null, false);
// 前序遍历
list.clearRetainingCapacity();
try preOrder(i32, root);
std.debug.print("\n前序遍历的节点打印序列 = ", .{});
inc.PrintUtil.printList(i32, list);
// 中序遍历
list.clearRetainingCapacity();
try inOrder(i32, root);
std.debug.print("\n中序遍历的节点打印序列 = ", .{});
inc.PrintUtil.printList(i32, list);
// 后序遍历
list.clearRetainingCapacity();
try postOrder(i32, root);
std.debug.print("\n后续遍历的节点打印序列 = ", .{});
inc.PrintUtil.printList(i32, list);
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/include/include.zig | // File: include.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
pub const PrintUtil = @import("PrintUtil.zig");
pub const ListUtil = @import("ListNode.zig");
pub const ListNode = ListUtil.ListNode;
pub const TreeUtil = @import("TreeNode.zig");
pub const TreeNode = TreeUtil.TreeNode; |
0 | repos/hello-algo-zig | repos/hello-algo-zig/include/ListNode.zig | // File: ListNode.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
// Definition for a singly-linked list node
pub fn ListNode(comptime T: type) type {
return struct {
const Self = @This();
val: T = undefined,
next: ?*Self = null,
// Initialize a list node with specific value
pub fn init(self: *Self, x: i32) void {
self.val = x;
self.next = null;
}
};
}
// Generate a linked list with a list
pub fn listToLinkedList(comptime T: type, mem_allocator: std.mem.Allocator, list: std.ArrayList(T)) !?*ListNode(T) {
var dum = try mem_allocator.create(ListNode(T));
dum.init(0);
var head = dum;
for (list.items) |val| {
var tmp = try mem_allocator.create(ListNode(T));
tmp.init(val);
head.next = tmp;
head = head.next.?;
}
return dum.next;
}
// Generate a linked list with an array
pub fn arrToLinkedList(comptime T: type, mem_allocator: std.mem.Allocator, arr: []T) !?*ListNode(T) {
var dum = try mem_allocator.create(ListNode(T));
dum.init(0);
var head = dum;
for (arr) |val| {
var tmp = try mem_allocator.create(ListNode(T));
tmp.init(val);
head.next = tmp;
head = head.next.?;
}
return dum.next;
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/include/PrintUtil.zig | // File: PrintUtil.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
pub const ListUtil = @import("ListNode.zig");
pub const ListNode = ListUtil.ListNode;
pub const TreeUtil = @import("TreeNode.zig");
pub const TreeNode = TreeUtil.TreeNode;
// Print an array
pub fn printArray(comptime T: type, nums: []T) void {
std.debug.print("[", .{});
if (nums.len > 0) {
for (nums, 0..) |num, j| {
std.debug.print("{}{s}", .{num, if (j == nums.len-1) "]" else ", " });
}
} else {
std.debug.print("]", .{});
}
}
// Print a list
pub fn printList(comptime T: type, list: std.ArrayList(T)) void {
std.debug.print("[", .{});
if (list.items.len > 0) {
for (list.items, 0..) |value, i| {
std.debug.print("{}{s}", .{value, if (i == list.items.len-1) "]" else ", " });
}
} else {
std.debug.print("]", .{});
}
}
// Print a linked list
pub fn printLinkedList(comptime T: type, node: ?*ListNode(T)) !void {
if (node == null) return;
var list = std.ArrayList(T).init(std.heap.page_allocator);
defer list.deinit();
var head = node;
while (head != null) {
try list.append(head.?.val);
head = head.?.next;
}
for (list.items, 0..) |value, i| {
std.debug.print("{}{s}", .{value, if (i == list.items.len-1) "\n" else "->" });
}
}
// Print a queue or deque
pub fn printQueue(comptime T: type, queue: std.TailQueue(T)) void {
var node = queue.first;
std.debug.print("[", .{});
var i: i32 = 0;
while (node != null) : (i += 1) {
var data = node.?.data;
std.debug.print("{}{s}", .{data, if (i == queue.len - 1) "]" else ", " });
node = node.?.next;
}
}
// Print a hash map
pub fn printHashMap(comptime TKey: type, comptime TValue: type, map: std.AutoHashMap(TKey, TValue)) void {
var it = map.iterator();
while (it.next()) |kv| {
var key = kv.key_ptr.*;
var value = kv.value_ptr.*;
std.debug.print("{} -> {s}\n", .{key, value});
}
}
// print a heap (PriorityQueue)
pub fn printHeap(comptime T: type, mem_allocator: std.mem.Allocator, queue: anytype) !void {
var arr = queue.items;
var len = queue.len;
std.debug.print("堆的数组表示:", .{});
printArray(T, arr[0..len]);
std.debug.print("\n堆的树状表示:\n", .{});
var root = try TreeUtil.arrToTree(T, mem_allocator, arr[0..len]);
try printTree(root, null, false);
}
// This tree printer is borrowed from TECHIE DELIGHT
// https://www.techiedelight.com/c-program-print-binary-tree/
const Trunk = struct {
prev: ?*Trunk = null,
str: []const u8 = undefined,
pub fn init(self: *Trunk, prev: ?*Trunk, str: []const u8) void {
self.prev = prev;
self.str = str;
}
};
// Helper function to print branches of the binary tree
pub fn showTrunks(p: ?*Trunk) void {
if (p == null) return;
showTrunks(p.?.prev);
std.debug.print("{s}", .{p.?.str});
}
// The interface of the tree printer
// Print a binary tree
pub fn printTree(root: ?*TreeNode(i32), prev: ?*Trunk, isLeft: bool) !void {
if (root == null) {
return;
}
var prev_str = " ";
var trunk = Trunk{.prev = prev, .str = prev_str};
try printTree(root.?.right, &trunk, true);
if (prev == null) {
trunk.str = "———";
} else if (isLeft) {
trunk.str = "/———";
prev_str = " |";
} else {
trunk.str = "\\———";
prev.?.str = prev_str;
}
showTrunks(&trunk);
std.debug.print(" {}\n", .{root.?.val});
if (prev) |_| {
prev.?.str = prev_str;
}
trunk.str = " |";
try printTree(root.?.left, &trunk, false);
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/include/TreeNode.zig | // File: TreeNode.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
// Definition for a binary tree node
pub fn TreeNode(comptime T: type) type {
return struct {
const Self = @This();
val: T = undefined, // 节点值
height: i32 = undefined, // 节点高度
left: ?*Self = null, // 左子节点指针
right: ?*Self = null, // 右子节点指针
// Initialize a tree node with specific value
pub fn init(self: *Self, x: i32) void {
self.val = x;
self.height = 0;
self.left = null;
self.right = null;
}
};
}
// Generate a binary tree with an array
pub fn arrToTree(comptime T: type, mem_allocator: std.mem.Allocator, arr: []T) !?*TreeNode(T) {
if (arr.len == 0) return null;
var root = try mem_allocator.create(TreeNode(T));
root.init(arr[0]);
const L = std.TailQueue(*TreeNode(T));
var que = L{};
var root_node = try mem_allocator.create(L.Node);
root_node.data = root;
que.append(root_node);
var index: usize = 0;
while (que.len > 0) {
var que_node = que.popFirst().?;
var node = que_node.data;
index += 1;
if (index >= arr.len) break;
if (index < arr.len) {
var tmp = try mem_allocator.create(TreeNode(T));
tmp.init(arr[index]);
node.left = tmp;
var tmp_node = try mem_allocator.create(L.Node);
tmp_node.data = node.left.?;
que.append(tmp_node);
}
index += 1;
if (index >= arr.len) break;
if (index < arr.len) {
var tmp = try mem_allocator.create(TreeNode(T));
tmp.init(arr[index]);
node.right = tmp;
var tmp_node = try mem_allocator.create(L.Node);
tmp_node.data = node.right.?;
que.append(tmp_node);
}
}
return root;
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_array_and_linkedlist/my_list.zig | // File: my_list.zig
// Created Time: 2023-01-08
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 列表类简易实现
pub fn MyList(comptime T: type) type {
return struct {
const Self = @This();
nums: []T = undefined, // 数组(存储列表元素)
numsCapacity: usize = 10, // 列表容量
numSize: usize = 0, // 列表长度(即当前元素数量)
extendRatio: usize = 2, // 每次列表扩容的倍数
mem_arena: ?std.heap.ArenaAllocator = null,
mem_allocator: std.mem.Allocator = undefined, // 内存分配器
// 构造函数(分配内存+初始化列表)
pub fn init(self: *Self, allocator: std.mem.Allocator) !void {
if (self.mem_arena == null) {
self.mem_arena = std.heap.ArenaAllocator.init(allocator);
self.mem_allocator = self.mem_arena.?.allocator();
}
self.nums = try self.mem_allocator.alloc(T, self.numsCapacity);
@memset(self.nums, @as(T, 0));
}
// 析构函数(释放内存)
pub fn deinit(self: *Self) void {
if (self.mem_arena == null) return;
self.mem_arena.?.deinit();
}
// 获取列表长度(即当前元素数量)
pub fn size(self: *Self) usize {
return self.numSize;
}
// 获取列表容量
pub fn capacity(self: *Self) usize {
return self.numsCapacity;
}
// 访问元素
pub fn get(self: *Self, index: usize) T {
// 索引如果越界则抛出异常,下同
if (index < 0 or index >= self.size()) @panic("索引越界");
return self.nums[index];
}
// 更新元素
pub fn set(self: *Self, index: usize, num: T) void {
// 索引如果越界则抛出异常,下同
if (index < 0 or index >= self.size()) @panic("索引越界");
self.nums[index] = num;
}
// 尾部添加元素
pub fn add(self: *Self, num: T) !void {
// 元素数量超出容量时,触发扩容机制
if (self.size() == self.capacity()) try self.extendCapacity();
self.nums[self.size()] = num;
// 更新元素数量
self.numSize += 1;
}
// 中间插入元素
pub fn insert(self: *Self, index: usize, num: T) !void {
if (index < 0 or index >= self.size()) @panic("索引越界");
// 元素数量超出容量时,触发扩容机制
if (self.size() == self.capacity()) try self.extendCapacity();
// 索引 i 以及之后的元素都向后移动一位
var j = self.size() - 1;
while (j >= index) : (j -= 1) {
self.nums[j + 1] = self.nums[j];
}
self.nums[index] = num;
// 更新元素数量
self.numSize += 1;
}
// 删除元素
pub fn remove(self: *Self, index: usize) T {
if (index < 0 or index >= self.size()) @panic("索引越界");
var num = self.nums[index];
// 索引 i 之后的元素都向前移动一位
var j = index;
while (j < self.size() - 1) : (j += 1) {
self.nums[j] = self.nums[j + 1];
}
// 更新元素数量
self.numSize -= 1;
// 返回被删除元素
return num;
}
// 列表扩容
pub fn extendCapacity(self: *Self) !void {
// 新建一个长度为 size * extendRatio 的数组,并将原数组拷贝到新数组
var newCapacity = self.capacity() * self.extendRatio;
var extend = try self.mem_allocator.alloc(T, newCapacity);
@memset(extend, @as(T, 0));
// 将原数组中的所有元素复制到新数组
std.mem.copy(T, extend, self.nums);
self.nums = extend;
// 更新列表容量
self.numsCapacity = newCapacity;
}
// 将列表转换为数组
pub fn toArray(self: *Self) ![]T {
// 仅转换有效长度范围内的列表元素
var nums = try self.mem_allocator.alloc(T, self.size());
@memset(nums, @as(T, 0));
for (nums, 0..) |*num, i| {
num.* = self.get(i);
}
return nums;
}
};
}
// Driver Code
pub fn main() !void {
// 初始化列表
var list = MyList(i32){};
try list.init(std.heap.page_allocator);
// 延迟释放内存
defer list.deinit();
// 尾部添加元素
try list.add(1);
try list.add(3);
try list.add(2);
try list.add(5);
try list.add(4);
std.debug.print("列表 list = ", .{});
inc.PrintUtil.printArray(i32, try list.toArray());
std.debug.print(" ,容量 = {} ,长度 = {}", .{list.capacity(), list.size()});
// 中间插入元素
try list.insert(3, 6);
std.debug.print("\n在索引 3 处插入数字 6 ,得到 list = ", .{});
inc.PrintUtil.printArray(i32, try list.toArray());
// 删除元素
_ = list.remove(3);
std.debug.print("\n删除索引 3 处的元素,得到 list = ", .{});
inc.PrintUtil.printArray(i32, try list.toArray());
// 访问元素
var num = list.get(1);
std.debug.print("\n访问索引 1 处的元素,得到 num = {}", .{num});
// 更新元素
list.set(1, 0);
std.debug.print("\n将索引 1 处的元素更新为 0 ,得到 list = ", .{});
inc.PrintUtil.printArray(i32, try list.toArray());
// 测试扩容机制
var i: i32 = 0;
while (i < 10) : (i += 1) {
// 在 i = 5 时,列表长度将超出列表容量,此时触发扩容机制
try list.add(i);
}
std.debug.print("\n扩容后的列表 list = ", .{});
inc.PrintUtil.printArray(i32, try list.toArray());
std.debug.print(" ,容量 = {} ,长度 = {}\n", .{list.capacity(), list.size()});
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_array_and_linkedlist/linked_list.zig | // File: linked_list.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 在链表的节点 n0 之后插入节点 P
pub fn insert(n0: ?*inc.ListNode(i32), P: ?*inc.ListNode(i32)) void {
var n1 = n0.?.next;
n0.?.next = P;
P.?.next = n1;
}
// 删除链表的节点 n0 之后的首个节点
pub fn remove(n0: ?*inc.ListNode(i32)) void {
if (n0.?.next == null) return;
// n0 -> P -> n1
var P = n0.?.next;
var n1 = P.?.next;
n0.?.next = n1;
}
// 访问链表中索引为 index 的节点
pub fn access(node: ?*inc.ListNode(i32), index: i32) ?*inc.ListNode(i32) {
var head = node;
var i: i32 = 0;
while (i < index) : (i += 1) {
head = head.?.next;
if (head == null) return null;
}
return head;
}
// 在链表中查找值为 target 的首个节点
pub fn find(node: ?*inc.ListNode(i32), target: i32) i32 {
var head = node;
var index: i32 = 0;
while (head != null) {
if (head.?.val == target) return index;
head = head.?.next;
index += 1;
}
return -1;
}
// Driver Code
pub fn main() !void {
// 初始化链表
// 初始化各个节点
var n0 = inc.ListNode(i32){.val = 1};
var n1 = inc.ListNode(i32){.val = 3};
var n2 = inc.ListNode(i32){.val = 2};
var n3 = inc.ListNode(i32){.val = 5};
var n4 = inc.ListNode(i32){.val = 4};
// 构建引用指向
n0.next = &n1;
n1.next = &n2;
n2.next = &n3;
n3.next = &n4;
std.debug.print("初始化的链表为", .{});
try inc.PrintUtil.printLinkedList(i32, &n0);
// 插入节点
var tmp = inc.ListNode(i32){.val = 0};
insert(&n0, &tmp);
std.debug.print("插入节点后的链表为", .{});
try inc.PrintUtil.printLinkedList(i32, &n0);
// 删除节点
remove(&n0);
std.debug.print("删除节点后的链表为", .{});
try inc.PrintUtil.printLinkedList(i32, &n0);
// 访问节点
var node = access(&n0, 3);
std.debug.print("链表中索引 3 处的节点的值 = {}\n", .{node.?.val});
// 查找节点
var index = find(&n0, 2);
std.debug.print("链表中值为 2 的节点的索引 = {}\n", .{index});
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_array_and_linkedlist/array.zig | // File: array.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 随机返回一个数组元素
pub fn randomAccess(nums: []i32) i32 {
// 在区间 [0, nums.len) 中随机抽取一个整数
var randomIndex = std.crypto.random.intRangeLessThan(usize, 0, nums.len);
// 获取并返回随机元素
var randomNum = nums[randomIndex];
return randomNum;
}
// 扩展数组长度
pub fn extend(mem_allocator: std.mem.Allocator, nums: []i32, enlarge: usize) ![]i32 {
// 初始化一个扩展长度后的数组
var res = try mem_allocator.alloc(i32, nums.len + enlarge);
@memset(res, 0);
// 将原数组中的所有元素复制到新数组
std.mem.copy(i32, res, nums);
// 返回扩展后的新数组
return res;
}
// 在数组的索引 index 处插入元素 num
pub fn insert(nums: []i32, num: i32, index: usize) void {
// 把索引 index 以及之后的所有元素向后移动一位
var i = nums.len - 1;
while (i > index) : (i -= 1) {
nums[i] = nums[i - 1];
}
// 将 num 赋给 index 处元素
nums[index] = num;
}
// 删除索引 index 处元素
pub fn remove(nums: []i32, index: usize) void {
// 把索引 index 之后的所有元素向前移动一位
var i = index;
while (i < nums.len - 1) : (i += 1) {
nums[i] = nums[i + 1];
}
}
// 遍历数组
pub fn traverse(nums: []i32) void {
var count: i32 = 0;
// 通过索引遍历数组
var i: i32 = 0;
while (i < nums.len) : (i += 1) {
count += 1;
}
count = 0;
// 直接遍历数组
for (nums) |_| {
count += 1;
}
}
// 在数组中查找指定元素
pub fn find(nums: []i32, target: i32) i32 {
for (nums, 0..) |num, i| {
if (num == target) return @intCast(i);
}
return -1;
}
// Driver Code
pub fn main() !void {
// 初始化内存分配器
var mem_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer mem_arena.deinit();
const mem_allocator = mem_arena.allocator();
// 初始化数组
var arr = [_]i32{0} ** 5;
std.debug.print("数组 arr = ", .{});
inc.PrintUtil.printArray(i32, &arr);
var array = [_]i32{ 1, 3, 2, 5, 4 };
var known_at_runtime_zero: usize = 0;
var nums = array[known_at_runtime_zero..];
std.debug.print("\n数组 nums = ", .{});
inc.PrintUtil.printArray(i32, nums);
// 随机访问
var randomNum = randomAccess(nums);
std.debug.print("\n在 nums 中获取随机元素 {}", .{randomNum});
// 长度扩展
nums = try extend(mem_allocator, nums, 3);
std.debug.print("\n将数组长度扩展至 8 ,得到 nums = ", .{});
inc.PrintUtil.printArray(i32, nums);
// 插入元素
insert(nums, 6, 3);
std.debug.print("\n在索引 3 处插入数字 6 ,得到 nums = ", .{});
inc.PrintUtil.printArray(i32, nums);
// 删除元素
remove(nums, 2);
std.debug.print("\n删除索引 2 处的元素,得到 nums = ", .{});
inc.PrintUtil.printArray(i32, nums);
// 遍历数组
traverse(nums);
// 查找元素
var index = find(nums, 3);
std.debug.print("\n在 nums 中查找元素 3 ,得到索引 = {}\n", .{index});
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_array_and_linkedlist/list.zig | // File: list.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// Driver Code
pub fn main() !void {
// 初始化列表
var list = std.ArrayList(i32).init(std.heap.page_allocator);
// 延迟释放内存
defer list.deinit();
try list.appendSlice(&[_]i32{ 1, 3, 2, 5, 4 });
std.debug.print("列表 list = ", .{});
inc.PrintUtil.printList(i32, list);
// 访问元素
var num = list.items[1];
std.debug.print("\n访问索引 1 处的元素,得到 num = {}", .{num});
// 更新元素
list.items[1] = 0;
std.debug.print("\n将索引 1 处的元素更新为 0 ,得到 list = ", .{});
inc.PrintUtil.printList(i32, list);
// 清空列表
list.clearRetainingCapacity();
std.debug.print("\n清空列表后 list = ", .{});
inc.PrintUtil.printList(i32, list);
// 尾部添加元素
try list.append(1);
try list.append(3);
try list.append(2);
try list.append(5);
try list.append(4);
std.debug.print("\n添加元素后 list = ", .{});
inc.PrintUtil.printList(i32, list);
// 中间插入元素
try list.insert(3, 6);
std.debug.print("\n在索引 3 处插入数字 6 ,得到 list = ", .{});
inc.PrintUtil.printList(i32, list);
// 删除元素
_ = list.orderedRemove(3);
std.debug.print("\n删除索引 3 处的元素,得到 list = ", .{});
inc.PrintUtil.printList(i32, list);
// 通过索引遍历列表
var count: i32 = 0;
var i: i32 = 0;
while (i < list.items.len) : (i += 1) {
count += 1;
}
// 直接遍历列表元素
count = 0;
for (list.items) |_| {
count += 1;
}
// 拼接两个列表
var list1 = std.ArrayList(i32).init(std.heap.page_allocator);
defer list1.deinit();
try list1.appendSlice(&[_]i32{ 6, 8, 7, 10, 9 });
try list.insertSlice(list.items.len, list1.items);
std.debug.print("\n将列表 list1 拼接到 list 之后,得到 list = ", .{});
inc.PrintUtil.printList(i32, list);
// 排序列表
std.mem.sort(i32, list.items, {}, comptime std.sort.asc(i32));
std.debug.print("\n排序列表后 list = ", .{});
inc.PrintUtil.printList(i32, list);
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_searching/linear_search.zig | // File: linear_search.zig
// Created Time: 2023-01-13
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 线性查找(数组)
fn linearSearchArray(comptime T: type, nums: std.ArrayList(T), target: T) T {
// 遍历数组
for (nums.items, 0..) |num, i| {
// 找到目标元素, 返回其索引
if (num == target) {
return @intCast(i);
}
}
// 未找到目标元素,返回 -1
return -1;
}
// 线性查找(链表)
pub fn linearSearchLinkedList(comptime T: type, node: ?*inc.ListNode(T), target: T) ?*inc.ListNode(T) {
var head = node;
// 遍历链表
while (head != null) {
// 找到目标节点,返回之
if (head.?.val == target) return head;
head = head.?.next;
}
return null;
}
// Driver Code
pub fn main() !void {
var target: i32 = 3;
// 在数组中执行线性查找
var nums = std.ArrayList(i32).init(std.heap.page_allocator);
defer nums.deinit();
try nums.appendSlice(&[_]i32{ 1, 5, 3, 2, 4, 7, 5, 9, 10, 8 });
var index = linearSearchArray(i32, nums, target);
std.debug.print("目标元素 3 的索引 = {}\n", .{index});
// 在链表中执行线性查找
var mem_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer mem_arena.deinit();
const mem_allocator = mem_arena.allocator();
var head = try inc.ListUtil.listToLinkedList(i32, mem_allocator, nums);
var node = linearSearchLinkedList(i32, head, target);
std.debug.print("目标节点值 3 的对应节点对象为 ", .{});
try inc.PrintUtil.printLinkedList(i32, node);
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_searching/two_sum.zig | // File: two_sum.zig
// Created Time: 2023-01-07
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 方法一:暴力枚举
pub fn twoSumBruteForce(nums: []i32, target: i32) ?[2]i32 {
var size: usize = nums.len;
var i: usize = 0;
// 两层循环,时间复杂度 O(n^2)
while (i < size - 1) : (i += 1) {
var j = i + 1;
while (j < size) : (j += 1) {
if (nums[i] + nums[j] == target) {
return [_]i32{@intCast(i), @intCast(j)};
}
}
}
return null;
}
// 方法二:辅助哈希表
pub fn twoSumHashTable(nums: []i32, target: i32) !?[2]i32 {
var size: usize = nums.len;
// 辅助哈希表,空间复杂度 O(n)
var dic = std.AutoHashMap(i32, i32).init(std.heap.page_allocator);
defer dic.deinit();
var i: usize = 0;
// 单层循环,时间复杂度 O(n)
while (i < size) : (i += 1) {
if (dic.contains(target - nums[i])) {
return [_]i32{dic.get(target - nums[i]).?, @intCast(i)};
}
try dic.put(nums[i], @intCast(i));
}
return null;
}
pub fn main() !void {
// ======= Test Case =======
var nums = [_]i32{ 2, 7, 11, 15 };
var target: i32 = 9;
// ====== Driver Code ======
// 方法一
var res = twoSumBruteForce(&nums, target).?;
std.debug.print("方法一 res = ", .{});
inc.PrintUtil.printArray(i32, &res);
// 方法二
res = (try twoSumHashTable(&nums, target)).?;
std.debug.print("\n方法二 res = ", .{});
inc.PrintUtil.printArray(i32, &res);
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_searching/hashing_search.zig | // File: hashing_search.zig
// Created Time: 2023-01-15
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 哈希查找(数组)
fn hashingSearch(comptime T: type, map: std.AutoHashMap(T, T), target: T) T {
// 哈希表的 key: 目标元素,value: 索引
// 若哈希表中无此 key ,返回 -1
if (map.getKey(target) == null) return -1;
return map.get(target).?;
}
// 哈希查找(数组)
fn hashingSearch1(comptime T: type, map: std.AutoHashMap(T, *inc.ListNode(T)), target: T) ?*inc.ListNode(T) {
// 哈希表的 key: 目标节点值,value: 节点对象
// 若哈希表中无此 key ,返回 null
if (map.getKey(target) == null) return null;
return map.get(target);
}
// Driver Code
pub fn main() !void {
var target: i32 = 3;
// 哈希查找(数组)
var nums = [_]i32{ 1, 5, 3, 2, 4, 7, 5, 9, 10, 8 };
// 初始化哈希表
var map = std.AutoHashMap(i32, i32).init(std.heap.page_allocator);
defer map.deinit();
for (nums, 0..) |num, i| {
try map.put(num, @intCast(i)); // key: 元素,value: 索引
}
var index = hashingSearch(i32, map, target);
std.debug.print("目标元素 3 的索引 = {}\n", .{index});
// 哈希查找(链表)
var mem_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer mem_arena.deinit();
const mem_allocator = mem_arena.allocator();
var head = try inc.ListUtil.arrToLinkedList(i32, mem_allocator, &nums);
// 初始化哈希表
var map1 = std.AutoHashMap(i32, *inc.ListNode(i32)).init(std.heap.page_allocator);
defer map1.deinit();
while (head != null) {
try map1.put(head.?.val, head.?);
head = head.?.next;
}
var node = hashingSearch1(i32, map1, target);
std.debug.print("目标节点值 3 的对应节点对象为 ", .{});
try inc.PrintUtil.printLinkedList(i32, node);
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_searching/binary_search.zig | // File: binary_search.zig
// Created Time: 2023-01-15
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 二分查找(双闭区间)
fn binarySearch(comptime T: type, nums: std.ArrayList(T), target: T) T {
// 初始化双闭区间 [0, n-1] ,即 i, j 分别指向数组首元素、尾元素
var i: usize = 0;
var j: usize = nums.items.len - 1;
// 循环,当搜索区间为空时跳出(当 i > j 时为空)
while (i <= j) {
var m = (i + j) / 2; // 计算中点索引 m
if (nums.items[m] < target) { // 此情况说明 target 在区间 [m+1, j] 中
i = m + 1;
} else if (nums.items[m] > target) { // 此情况说明 target 在区间 [i, m-1] 中
j = m - 1;
} else { // 找到目标元素,返回其索引
return @intCast(m);
}
}
// 未找到目标元素,返回 -1
return -1;
}
// 二分查找(左闭右开)
fn binarySearch1(comptime T: type, nums: std.ArrayList(T), target: T) T {
// 初始化左闭右开 [0, n) ,即 i, j 分别指向数组首元素、尾元素+1
var i: usize = 0;
var j: usize = nums.items.len;
// 循环,当搜索区间为空时跳出(当 i = j 时为空)
while (i <= j) {
var m = (i + j) / 2; // 计算中点索引 m
if (nums.items[m] < target) { // 此情况说明 target 在区间 [m+1, j) 中
i = m + 1;
} else if (nums.items[m] > target) { // 此情况说明 target 在区间 [i, m) 中
j = m;
} else { // 找到目标元素,返回其索引
return @intCast(m);
}
}
// 未找到目标元素,返回 -1
return -1;
}
// Driver Code
pub fn main() !void {
var target: i32 = 6;
var nums = std.ArrayList(i32).init(std.heap.page_allocator);
defer nums.deinit();
try nums.appendSlice(&[_]i32{ 1, 3, 6, 8, 12, 15, 23, 67, 70, 92 });
// 二分查找(双闭区间)
var index = binarySearch(i32, nums, target);
std.debug.print("目标元素 6 的索引 = {}\n", .{index});
// 二分查找(左闭右开)
index = binarySearch1(i32, nums, target);
std.debug.print("目标元素 6 的索引 = {}\n", .{index});
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_hashing/hash_map.zig | // File: hash_map.zig
// Created Time: 2023-01-13
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// Driver Code
pub fn main() !void {
// 初始化哈希表
var map = std.AutoHashMap(i32, []const u8).init(std.heap.page_allocator);
// 延迟释放内存
defer map.deinit();
// 添加操作
// 在哈希表中添加键值对 (key, value)
try map.put(12836, "小哈");
try map.put(15937, "小啰");
try map.put(16750, "小算");
try map.put(13276, "小法");
try map.put(10583, "小鸭");
std.debug.print("\n添加完成后,哈希表为\nKey -> Value\n", .{});
inc.PrintUtil.printHashMap(i32, []const u8, map);
// 查询操作
// 向哈希表输入键 key ,得到值 value
var name = map.get(15937).?;
std.debug.print("\n输入学号 15937 ,查询到姓名 {s}\n", .{name});
// 删除操作
// 在哈希表中删除键值对 (key, value)
_ = map.remove(10583);
std.debug.print("\n删除 10583 后,哈希表为\nKey -> Value\n", .{});
inc.PrintUtil.printHashMap(i32, []const u8, map);
// 遍历哈希表
std.debug.print("\n遍历键值对 Key->Value\n", .{});
inc.PrintUtil.printHashMap(i32, []const u8, map);
std.debug.print("\n单独遍历键 Key\n", .{});
var it = map.iterator();
while (it.next()) |kv| {
std.debug.print("{}\n", .{kv.key_ptr.*});
}
std.debug.print("\n单独遍历值 value\n", .{});
it = map.iterator();
while (it.next()) |kv| {
std.debug.print("{s}\n", .{kv.value_ptr.*});
}
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_hashing/array_hash_map.zig | // File: array_hash_map.zig
// Created Time: 2023-01-15
// Author: sjinzh ([email protected])
const std = @import("std");
const inc = @import("include");
// 键值对
const Pair = struct {
key: usize = undefined,
val: []const u8 = undefined,
pub fn init(key: usize, val: []const u8) Pair {
return Pair {
.key = key,
.val = val,
};
}
};
// 基于数组简易实现的哈希表
pub fn ArrayHashMap(comptime T: type) type {
return struct {
bucket: ?std.ArrayList(?T) = null,
mem_allocator: std.mem.Allocator = undefined,
const Self = @This();
// 构造函数
pub fn init(self: *Self, allocator: std.mem.Allocator) !void {
self.mem_allocator = allocator;
// 初始化一个长度为 100 的桶(数组)
self.bucket = std.ArrayList(?T).init(self.mem_allocator);
var i: i32 = 0;
while (i < 100) : (i += 1) {
try self.bucket.?.append(null);
}
}
// 析构函数
pub fn deinit(self: *Self) void {
if (self.bucket != null) self.bucket.?.deinit();
}
// 哈希函数
fn hashFunc(key: usize) usize {
var index = key % 100;
return index;
}
// 查询操作
pub fn get(self: *Self, key: usize) []const u8 {
var index = hashFunc(key);
var pair = self.bucket.?.items[index];
return pair.?.val;
}
// 添加操作
pub fn put(self: *Self, key: usize, val: []const u8) !void {
var pair = Pair.init(key, val);
var index = hashFunc(key);
self.bucket.?.items[index] = pair;
}
// 删除操作
pub fn remove(self: *Self, key: usize) !void {
var index = hashFunc(key);
// 置为 null ,代表删除
self.bucket.?.items[index] = null;
}
// 获取所有键值对
pub fn pairSet(self: *Self) !std.ArrayList(T) {
var entry_set = std.ArrayList(T).init(self.mem_allocator);
for (self.bucket.?.items) |item| {
if (item == null) continue;
try entry_set.append(item.?);
}
return entry_set;
}
// 获取所有键
pub fn keySet(self: *Self) !std.ArrayList(usize) {
var key_set = std.ArrayList(usize).init(self.mem_allocator);
for (self.bucket.?.items) |item| {
if (item == null) continue;
try key_set.append(item.?.key);
}
return key_set;
}
// 获取所有值
pub fn valueSet(self: *Self) !std.ArrayList([]const u8) {
var value_set = std.ArrayList([]const u8).init(self.mem_allocator);
for (self.bucket.?.items) |item| {
if (item == null) continue;
try value_set.append(item.?.val);
}
return value_set;
}
// 打印哈希表
pub fn print(self: *Self) !void {
var entry_set = try self.pairSet();
defer entry_set.deinit();
for (entry_set.items) |item| {
std.debug.print("{} -> {s}\n", .{item.key, item.val});
}
}
};
}
// Driver Code
pub fn main() !void {
// 初始化哈希表
var map = ArrayHashMap(Pair){};
try map.init(std.heap.page_allocator);
defer map.deinit();
// 添加操作
// 在哈希表中添加键值对 (key, value)
try map.put(12836, "小哈");
try map.put(15937, "小啰");
try map.put(16750, "小算");
try map.put(13276, "小法");
try map.put(10583, "小鸭");
std.debug.print("\n添加完成后,哈希表为\nKey -> Value\n", .{});
try map.print();
// 查询操作
// 向哈希表输入键 key ,得到值 value
var name = map.get(15937);
std.debug.print("\n输入学号 15937 ,查询到姓名 {s}\n", .{name});
// 删除操作
// 在哈希表中删除键值对 (key, value)
try map.remove(10583);
std.debug.print("\n删除 10583 后,哈希表为\nKey -> Value\n", .{});
try map.print();
// 遍历哈希表
std.debug.print("\n遍历键值对 Key->Value\n", .{});
var entry_set = try map.pairSet();
for (entry_set.items) |kv| {
std.debug.print("{} -> {s}\n", .{kv.key, kv.val});
}
defer entry_set.deinit();
std.debug.print("\n单独遍历键 Key\n", .{});
var key_set = try map.keySet();
for (key_set.items) |key| {
std.debug.print("{}\n", .{key});
}
defer key_set.deinit();
std.debug.print("\n单独遍历值 value\n", .{});
var value_set = try map.valueSet();
for (value_set.items) |val| {
std.debug.print("{s}\n", .{val});
}
defer value_set.deinit();
_ = try std.io.getStdIn().reader().readByte();
} |
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/climbing_stairs_backtrack.zig | // File: climbing_stairs_backtrack.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 回溯
fn backtrack(choices: []i32, state: i32, n: i32, res: std.ArrayList(i32)) void {
// 当爬到第 n 阶时,方案数量加 1
if (state == n) {
res.items[0] = res.items[0] + 1;
}
// 遍历所有选择
for (choices) |choice| {
// 剪枝:不允许越过第 n 阶
if (state + choice > n) {
break;
}
// 尝试:做出选择,更新状态
backtrack(choices, state + choice, n, res);
// 回退
}
}
// 爬楼梯:回溯
fn climbingStairsBacktrack(n: usize) !i32 {
var choices = [_]i32{ 1, 2 }; // 可选择向上爬 1 或 2 阶
var state: i32 = 0; // 从第 0 阶开始爬
var res = std.ArrayList(i32).init(std.heap.page_allocator);
defer res.deinit();
try res.append(0); // 使用 res[0] 记录方案数量
backtrack(&choices, state, @intCast(n), res);
return res.items[0];
}
// Driver Code
pub fn main() !void {
var n: usize = 9;
var res = try climbingStairsBacktrack(n);
std.debug.print("爬 {} 阶楼梯共有 {} 种方案\n", .{ n, res });
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/knapsack.zig | // File: knapsack.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 0-1 背包:暴力搜索
fn knapsackDFS(wgt: []i32, val: []i32, i: usize, c: usize) i32 {
// 若已选完所有物品或背包无容量,则返回价值 0
if (i == 0 or c == 0) {
return 0;
}
// 若超过背包容量,则只能不放入背包
if (wgt[i - 1] > c) {
return knapsackDFS(wgt, val, i - 1, c);
}
// 计算不放入和放入物品 i 的最大价值
var no = knapsackDFS(wgt, val, i - 1, c);
var yes = knapsackDFS(wgt, val, i - 1, c - @as(usize, @intCast(wgt[i - 1]))) + val[i - 1];
// 返回两种方案中价值更大的那一个
return @max(no, yes);
}
// 0-1 背包:记忆化搜索
fn knapsackDFSMem(wgt: []i32, val: []i32, mem: anytype, i: usize, c: usize) i32 {
// 若已选完所有物品或背包无容量,则返回价值 0
if (i == 0 or c == 0) {
return 0;
}
// 若已有记录,则直接返回
if (mem[i][c] != -1) {
return mem[i][c];
}
// 若超过背包容量,则只能不放入背包
if (wgt[i - 1] > c) {
return knapsackDFSMem(wgt, val, mem, i - 1, c);
}
// 计算不放入和放入物品 i 的最大价值
var no = knapsackDFSMem(wgt, val, mem, i - 1, c);
var yes = knapsackDFSMem(wgt, val, mem, i - 1, c - @as(usize, @intCast(wgt[i - 1]))) + val[i - 1];
// 记录并返回两种方案中价值更大的那一个
mem[i][c] = @max(no, yes);
return mem[i][c];
}
// 0-1 背包:动态规划
fn knapsackDP(comptime wgt: []i32, val: []i32, comptime cap: usize) i32 {
comptime var n = wgt.len;
// 初始化 dp 表
var dp = [_][cap + 1]i32{[_]i32{0} ** (cap + 1)} ** (n + 1);
// 状态转移
for (1..n + 1) |i| {
for (1..cap + 1) |c| {
if (wgt[i - 1] > c) {
// 若超过背包容量,则不选物品 i
dp[i][c] = dp[i - 1][c];
} else {
// 不选和选物品 i 这两种方案的较大值
dp[i][c] = @max(dp[i - 1][c], dp[i - 1][c - @as(usize, @intCast(wgt[i - 1]))] + val[i - 1]);
}
}
}
return dp[n][cap];
}
// 0-1 背包:状态压缩后的动态规划
fn knapsackDPComp(wgt: []i32, val: []i32, comptime cap: usize) i32 {
var n = wgt.len;
// 初始化 dp 表
var dp = [_]i32{0} ** (cap + 1);
// 状态转移
for (1..n + 1) |i| {
// 倒序遍历
var c = cap;
while (c > 0) : (c -= 1) {
if (wgt[i - 1] < c) {
// 不选和选物品 i 这两种方案的较大值
dp[c] = @max(dp[c], dp[c - @as(usize, @intCast(wgt[i - 1]))] + val[i - 1]);
}
}
}
return dp[cap];
}
// Driver Code
pub fn main() !void {
comptime var wgt = [_]i32{ 10, 20, 30, 40, 50 };
comptime var val = [_]i32{ 50, 120, 150, 210, 240 };
comptime var cap = 50;
comptime var n = wgt.len;
// 暴力搜索
var res = knapsackDFS(&wgt, &val, n, cap);
std.debug.print("不超过背包容量的最大物品价值为 {}\n", .{res});
// 记忆搜索
var mem = [_][cap + 1]i32{[_]i32{-1} ** (cap + 1)} ** (n + 1);
res = knapsackDFSMem(&wgt, &val, @constCast(&mem), n, cap);
std.debug.print("不超过背包容量的最大物品价值为 {}\n", .{res});
// 动态规划
res = knapsackDP(&wgt, &val, cap);
std.debug.print("不超过背包容量的最大物品价值为 {}\n", .{res});
// 状态压缩后的动态规划
res = knapsackDPComp(&wgt, &val, cap);
std.debug.print("不超过背包容量的最大物品价值为 {}\n", .{res});
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/climbing_stairs_dp.zig | // File: climbing_stairs_dp.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 爬楼梯:动态规划
fn climbingStairsDP(comptime n: usize) i32 {
// 已知 dp[1] 和 dp[2] ,返回之
if (n == 1 or n == 2) {
return @intCast(n);
}
// 初始化 dp 表,用于存储子问题的解
var dp = [_]i32{-1} ** (n + 1);
// 初始状态:预设最小子问题的解
dp[1] = 1;
dp[2] = 2;
// 状态转移:从较小子问题逐步求解较大子问题
for (3..n + 1) |i| {
dp[i] = dp[i - 1] + dp[i - 2];
}
return dp[n];
}
// 爬楼梯:状态压缩后的动态规划
fn climbingStairsDPComp(comptime n: usize) i32 {
if (n == 1 or n == 2) {
return @intCast(n);
}
var a: i32 = 1;
var b: i32 = 2;
for (3..n + 1) |_| {
var tmp = b;
b = a + b;
a = tmp;
}
return b;
}
// Driver Code
pub fn main() !void {
comptime var n: usize = 9;
var res = climbingStairsDP(n);
std.debug.print("爬 {} 阶楼梯共有 {} 种方案\n", .{ n, res });
res = climbingStairsDPComp(n);
std.debug.print("爬 {} 阶楼梯共有 {} 种方案\n", .{ n, res });
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/min_path_sum.zig | // File: min_path_sum.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 最小路径和:暴力搜索
fn minPathSumDFS(grid: anytype, i: i32, j: i32) i32 {
// 若为左上角单元格,则终止搜索
if (i == 0 and j == 0) {
return grid[0][0];
}
// 若行列索引越界,则返回 +∞ 代价
if (i < 0 or j < 0) {
return std.math.maxInt(i32);
}
// 计算从左上角到 (i-1, j) 和 (i, j-1) 的最小路径代价
var left = minPathSumDFS(grid, i - 1, j);
var up = minPathSumDFS(grid, i, j - 1);
// 返回从左上角到 (i, j) 的最小路径代价
return @min(left, up) + grid[@as(usize, @intCast(i))][@as(usize, @intCast(j))];
}
// 最小路径和:记忆化搜索
fn minPathSumDFSMem(grid: anytype, mem: anytype, i: i32, j: i32) i32 {
// 若为左上角单元格,则终止搜索
if (i == 0 and j == 0) {
return grid[0][0];
}
// 若行列索引越界,则返回 +∞ 代价
if (i < 0 or j < 0) {
return std.math.maxInt(i32);
}
// 若已有记录,则直接返回
if (mem[@as(usize, @intCast(i))][@as(usize, @intCast(j))] != -1) {
return mem[@as(usize, @intCast(i))][@as(usize, @intCast(j))];
}
// 计算从左上角到 (i-1, j) 和 (i, j-1) 的最小路径代价
var left = minPathSumDFSMem(grid, mem, i - 1, j);
var up = minPathSumDFSMem(grid, mem, i, j - 1);
// 返回从左上角到 (i, j) 的最小路径代价
// 记录并返回左上角到 (i, j) 的最小路径代价
mem[@as(usize, @intCast(i))][@as(usize, @intCast(j))] = @min(left, up) + grid[@as(usize, @intCast(i))][@as(usize, @intCast(j))];
return mem[@as(usize, @intCast(i))][@as(usize, @intCast(j))];
}
// 最小路径和:动态规划
fn minPathSumDP(comptime grid: anytype) i32 {
comptime var n = grid.len;
comptime var m = grid[0].len;
// 初始化 dp 表
var dp = [_][m]i32{[_]i32{0} ** m} ** n;
dp[0][0] = grid[0][0];
// 状态转移:首行
for (1..m) |j| {
dp[0][j] = dp[0][j - 1] + grid[0][j];
}
// 状态转移:首列
for (1..n) |i| {
dp[i][0] = dp[i - 1][0] + grid[i][0];
}
// 状态转移:其余行列
for (1..n) |i| {
for (1..m) |j| {
dp[i][j] = @min(dp[i][j - 1], dp[i - 1][j]) + grid[i][j];
}
}
return dp[n - 1][m - 1];
}
// 最小路径和:状态压缩后的动态规划
fn minPathSumDPComp(comptime grid: anytype) i32 {
comptime var n = grid.len;
comptime var m = grid[0].len;
// 初始化 dp 表
var dp = [_]i32{0} ** m;
// 状态转移:首行
dp[0] = grid[0][0];
for (1..m) |j| {
dp[j] = dp[j - 1] + grid[0][j];
}
// 状态转移:其余行
for (1..n) |i| {
// 状态转移:首列
dp[0] = dp[0] + grid[i][0];
for (1..m) |j| {
dp[j] = @min(dp[j - 1], dp[j]) + grid[i][j];
}
}
return dp[m - 1];
}
// Driver Code
pub fn main() !void {
comptime var grid = [_][4]i32{
[_]i32{ 1, 3, 1, 5 },
[_]i32{ 2, 2, 4, 2 },
[_]i32{ 5, 3, 2, 1 },
[_]i32{ 4, 3, 5, 2 },
};
comptime var n = grid.len;
comptime var m = grid[0].len;
// 暴力搜索
var res = minPathSumDFS(&grid, n - 1, m - 1);
std.debug.print("从左上角到右下角的最小路径和为 {}\n", .{res});
// 记忆化搜索
var mem = [_][m]i32{[_]i32{-1} ** m} ** n;
res = minPathSumDFSMem(&grid, &mem, n - 1, m - 1);
std.debug.print("从左上角到右下角的最小路径和为 {}\n", .{res});
// 动态规划
res = minPathSumDP(&grid);
std.debug.print("从左上角到右下角的最小路径和为 {}\n", .{res});
// 状态压缩后的动态规划
res = minPathSumDPComp(&grid);
std.debug.print("从左上角到右下角的最小路径和为 {}\n", .{res});
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/climbing_stairs_dfs_mem.zig | // File: climbing_stairs_dfs_mem.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 记忆化搜索
fn dfs(i: usize, mem: []i32) i32 {
// 已知 dp[1] 和 dp[2] ,返回之
if (i == 1 or i == 2) {
return @intCast(i);
}
// 若存在记录 dp[i] ,则直接返回之
if (mem[i] != -1) {
return mem[i];
}
// dp[i] = dp[i-1] + dp[i-2]
var count = dfs(i - 1, mem) + dfs(i - 2, mem);
// 记录 dp[i]
mem[i] = count;
return count;
}
// 爬楼梯:记忆化搜索
fn climbingStairsDFSMem(comptime n: usize) i32 {
// mem[i] 记录爬到第 i 阶的方案总数,-1 代表无记录
var mem = [_]i32{ -1 } ** (n + 1);
return dfs(n, &mem);
}
// Driver Code
pub fn main() !void {
comptime var n: usize = 9;
var res = climbingStairsDFSMem(n);
std.debug.print("爬 {} 阶楼梯共有 {} 种方案\n", .{ n, res });
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/min_cost_climbing_stairs_dp.zig | // File: min_cost_climbing_stairs_dp.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 爬楼梯最小代价:动态规划
fn minCostClimbingStairsDP(comptime cost: []i32) i32 {
comptime var n = cost.len - 1;
if (n == 1 or n == 2) {
return cost[n];
}
// 初始化 dp 表,用于存储子问题的解
var dp = [_]i32{-1} ** (n + 1);
// 初始状态:预设最小子问题的解
dp[1] = cost[1];
dp[2] = cost[2];
// 状态转移:从较小子问题逐步求解较大子问题
for (3..n + 1) |i| {
dp[i] = @min(dp[i - 1], dp[i - 2]) + cost[i];
}
return dp[n];
}
// 爬楼梯最小代价:状态压缩后的动态规划
fn minCostClimbingStairsDPComp(cost: []i32) i32 {
var n = cost.len - 1;
if (n == 1 or n == 2) {
return cost[n];
}
var a = cost[1];
var b = cost[2];
// 状态转移:从较小子问题逐步求解较大子问题
for (3..n + 1) |i| {
var tmp = b;
b = @min(a, tmp) + cost[i];
a = tmp;
}
return b;
}
// Driver Code
pub fn main() !void {
comptime var cost = [_]i32{ 0, 1, 10, 1, 1, 1, 10, 1, 1, 10, 1 };
std.debug.print("输入楼梯的代价列表为 {any}\n", .{cost});
var res = minCostClimbingStairsDP(&cost);
std.debug.print("输入楼梯的代价列表为 {}\n", .{res});
res = minCostClimbingStairsDPComp(&cost);
std.debug.print("输入楼梯的代价列表为 {}\n", .{res});
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/coin_change_ii.zig | // File: coin_change_ii.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 零钱兑换 II:动态规划
fn coinChangeIIDP(comptime coins: []i32, comptime amt: usize) i32 {
comptime var n = coins.len;
// 初始化 dp 表
var dp = [_][amt + 1]i32{[_]i32{0} ** (amt + 1)} ** (n + 1);
// 初始化首列
for (0..n + 1) |i| {
dp[i][0] = 1;
}
// 状态转移
for (1..n + 1) |i| {
for (1..amt + 1) |a| {
if (coins[i - 1] > @as(i32, @intCast(a))) {
// 若超过背包容量,则不选硬币 i
dp[i][a] = dp[i - 1][a];
} else {
// 不选和选硬币 i 这两种方案的较小值
dp[i][a] = dp[i - 1][a] + dp[i][a - @as(usize, @intCast(coins[i - 1]))];
}
}
}
return dp[n][amt];
}
// 零钱兑换 II:状态压缩后的动态规划
fn coinChangeIIDPComp(comptime coins: []i32, comptime amt: usize) i32 {
comptime var n = coins.len;
// 初始化 dp 表
var dp = [_]i32{0} ** (amt + 1);
dp[0] = 1;
// 状态转移
for (1..n + 1) |i| {
for (1..amt + 1) |a| {
if (coins[i - 1] > @as(i32, @intCast(a))) {
// 若超过背包容量,则不选硬币 i
dp[a] = dp[a];
} else {
// 不选和选硬币 i 这两种方案的较小值
dp[a] = dp[a] + dp[a - @as(usize, @intCast(coins[i - 1]))];
}
}
}
return dp[amt];
}
// Driver Code
pub fn main() !void {
comptime var coins = [_]i32{ 1, 2, 5 };
comptime var amt: usize = 5;
// 动态规划
var res = coinChangeIIDP(&coins, amt);
std.debug.print("凑出目标金额的硬币组合数量为 {}\n", .{res});
// 状态压缩后的动态规划
res = coinChangeIIDPComp(&coins, amt);
std.debug.print("凑出目标金额的硬币组合数量为 {}\n", .{res});
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/edit_distance.zig | // File: edit_distance.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 编辑距离:暴力搜索
fn editDistanceDFS(comptime s: []const u8, comptime t: []const u8, i: usize, j: usize) i32 {
// 若 s 和 t 都为空,则返回 0
if (i == 0 and j == 0) {
return 0;
}
// 若 s 为空,则返回 t 长度
if (i == 0) {
return @intCast(j);
}
// 若 t 为空,则返回 s 长度
if (j == 0) {
return @intCast(i);
}
// 若两字符相等,则直接跳过此两字符
if (s[i - 1] == t[j - 1]) {
return editDistanceDFS(s, t, i - 1, j - 1);
}
// 最少编辑步数 = 插入、删除、替换这三种操作的最少编辑步数 + 1
var insert = editDistanceDFS(s, t, i, j - 1);
var delete = editDistanceDFS(s, t, i - 1, j);
var replace = editDistanceDFS(s, t, i - 1, j - 1);
// 返回最少编辑步数
return @min(@min(insert, delete), replace) + 1;
}
// 编辑距离:记忆化搜索
fn editDistanceDFSMem(comptime s: []const u8, comptime t: []const u8, mem: anytype, i: usize, j: usize) i32 {
// 若 s 和 t 都为空,则返回 0
if (i == 0 and j == 0) {
return 0;
}
// 若 s 为空,则返回 t 长度
if (i == 0) {
return @intCast(j);
}
// 若 t 为空,则返回 s 长度
if (j == 0) {
return @intCast(i);
}
// 若已有记录,则直接返回之
if (mem[i][j] != -1) {
return mem[i][j];
}
// 若两字符相等,则直接跳过此两字符
if (s[i - 1] == t[j - 1]) {
return editDistanceDFSMem(s, t, mem, i - 1, j - 1);
}
// 最少编辑步数 = 插入、删除、替换这三种操作的最少编辑步数 + 1
var insert = editDistanceDFSMem(s, t, mem, i, j - 1);
var delete = editDistanceDFSMem(s, t, mem, i - 1, j);
var replace = editDistanceDFSMem(s, t, mem, i - 1, j - 1);
// 记录并返回最少编辑步数
mem[i][j] = @min(@min(insert, delete), replace) + 1;
return mem[i][j];
}
// 编辑距离:动态规划
fn editDistanceDP(comptime s: []const u8, comptime t: []const u8) i32 {
comptime var n = s.len;
comptime var m = t.len;
var dp = [_][m + 1]i32{[_]i32{0} ** (m + 1)} ** (n + 1);
// 状态转移:首行首列
for (1..n + 1) |i| {
dp[i][0] = @intCast(i);
}
for (1..m + 1) |j| {
dp[0][j] = @intCast(j);
}
// 状态转移:其余行列
for (1..n + 1) |i| {
for (1..m + 1) |j| {
if (s[i - 1] == t[j - 1]) {
// 若两字符相等,则直接跳过此两字符
dp[i][j] = dp[i - 1][j - 1];
} else {
// 最少编辑步数 = 插入、删除、替换这三种操作的最少编辑步数 + 1
dp[i][j] = @min(@min(dp[i][j - 1], dp[i - 1][j]), dp[i - 1][j - 1]) + 1;
}
}
}
return dp[n][m];
}
// 编辑距离:状态压缩后的动态规划
fn editDistanceDPComp(comptime s: []const u8, comptime t: []const u8) i32 {
comptime var n = s.len;
comptime var m = t.len;
var dp = [_]i32{0} ** (m + 1);
// 状态转移:首行
for (1..m + 1) |j| {
dp[j] = @intCast(j);
}
// 状态转移:其余行
for (1..n + 1) |i| {
// 状态转移:首列
var leftup = dp[0]; // 暂存 dp[i-1, j-1]
dp[0] = @intCast(i);
// 状态转移:其余列
for (1..m + 1) |j| {
var temp = dp[j];
if (s[i - 1] == t[j - 1]) {
// 若两字符相等,则直接跳过此两字符
dp[j] = leftup;
} else {
// 最少编辑步数 = 插入、删除、替换这三种操作的最少编辑步数 + 1
dp[j] = @min(@min(dp[j - 1], dp[j]), leftup) + 1;
}
leftup = temp; // 更新为下一轮的 dp[i-1, j-1]
}
}
return dp[m];
}
// Driver Code
pub fn main() !void {
const s = "bag";
const t = "pack";
comptime var n = s.len;
comptime var m = t.len;
// 暴力搜索
var res = editDistanceDFS(s, t, n, m);
std.debug.print("将 {s} 更改为 {s} 最少需要编辑 {} 步\n", .{ s, t, res });
// 记忆搜索
var mem = [_][m + 1]i32{[_]i32{-1} ** (m + 1)} ** (n + 1);
res = editDistanceDFSMem(s, t, @constCast(&mem), n, m);
std.debug.print("将 {s} 更改为 {s} 最少需要编辑 {} 步\n", .{ s, t, res });
// 动态规划
res = editDistanceDP(s, t);
std.debug.print("将 {s} 更改为 {s} 最少需要编辑 {} 步\n", .{ s, t, res });
// 状态压缩后的动态规划
res = editDistanceDPComp(s, t);
std.debug.print("将 {s} 更改为 {s} 最少需要编辑 {} 步\n", .{ s, t, res });
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/climbing_stairs_dfs.zig | // File: climbing_stairs_dfs.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 搜索
fn dfs(i: usize) i32 {
// 已知 dp[1] 和 dp[2] ,返回之
if (i == 1 or i == 2) {
return @intCast(i);
}
// dp[i] = dp[i-1] + dp[i-2]
var count = dfs(i - 1) + dfs(i - 2);
return count;
}
// 爬楼梯:搜索
fn climbingStairsDFS(comptime n: usize) i32 {
return dfs(n);
}
// Driver Code
pub fn main() !void {
comptime var n: usize = 9;
var res = climbingStairsDFS(n);
std.debug.print("爬 {} 阶楼梯共有 {} 种方案\n", .{ n, res });
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/unbounded_knapsack.zig | // File: unbounded_knapsack.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 完全背包:动态规划
fn unboundedKnapsackDP(comptime wgt: []i32, val: []i32, comptime cap: usize) i32 {
comptime var n = wgt.len;
// 初始化 dp 表
var dp = [_][cap + 1]i32{[_]i32{0} ** (cap + 1)} ** (n + 1);
// 状态转移
for (1..n + 1) |i| {
for (1..cap + 1) |c| {
if (wgt[i - 1] > c) {
// 若超过背包容量,则不选物品 i
dp[i][c] = dp[i - 1][c];
} else {
// 不选和选物品 i 这两种方案的较大值
dp[i][c] = @max(dp[i - 1][c], dp[i][c - @as(usize, @intCast(wgt[i - 1]))] + val[i - 1]);
}
}
}
return dp[n][cap];
}
// 完全背包:状态压缩后的动态规划
fn unboundedKnapsackDPComp(comptime wgt: []i32, val: []i32, comptime cap: usize) i32 {
comptime var n = wgt.len;
// 初始化 dp 表
var dp = [_]i32{0} ** (cap + 1);
// 状态转移
for (1..n + 1) |i| {
for (1..cap + 1) |c| {
if (wgt[i - 1] > c) {
// 若超过背包容量,则不选物品 i
dp[c] = dp[c];
} else {
// 不选和选物品 i 这两种方案的较大值
dp[c] = @max(dp[c], dp[c - @as(usize, @intCast(wgt[i - 1]))] + val[i - 1]);
}
}
}
return dp[cap];
}
// Driver Code
pub fn main() !void {
comptime var wgt = [_]i32{ 1, 2, 3 };
comptime var val = [_]i32{ 5, 11, 15 };
comptime var cap = 4;
// 动态规划
var res = unboundedKnapsackDP(&wgt, &val, cap);
std.debug.print("不超过背包容量的最大物品价值为 {}\n", .{res});
// 状态压缩后的动态规划
res = unboundedKnapsackDPComp(&wgt, &val, cap);
std.debug.print("不超过背包容量的最大物品价值为 {}\n", .{res});
_ = try std.io.getStdIn().reader().readByte();
}
|
0 | repos/hello-algo-zig | repos/hello-algo-zig/chapter_dynamic_programming/coin_change.zig | // File: coin_change.zig
// Created Time: 2023-07-15
// Author: sjinzh ([email protected])
const std = @import("std");
// 零钱兑换:动态规划
fn coinChangeDP(comptime coins: []i32, comptime amt: usize) i32 {
comptime var n = coins.len;
comptime var max = amt + 1;
// 初始化 dp 表
var dp = [_][amt + 1]i32{[_]i32{0} ** (amt + 1)} ** (n + 1);
// 状态转移:首行首列
for (1..amt + 1) |a| {
dp[0][a] = max;
}
// 状态转移:其余行列
for (1..n + 1) |i| {
for (1..amt + 1) |a| {
if (coins[i - 1] > @as(i32, @intCast(a))) {
// 若超过背包容量,则不选硬币 i
dp[i][a] = dp[i - 1][a];
} else {
// 不选和选硬币 i 这两种方案的较小值
dp[i][a] = @min(dp[i - 1][a], dp[i][a - @as(usize, @intCast(coins[i - 1]))] + 1);
}
}
}
if (dp[n][amt] != max) {
return @intCast(dp[n][amt]);
} else {
return -1;
}
}
// 零钱兑换:状态压缩后的动态规划
fn coinChangeDPComp(comptime coins: []i32, comptime amt: usize) i32 {
comptime var n = coins.len;
comptime var max = amt + 1;
// 初始化 dp 表
var dp = [_]i32{0} ** (amt + 1);
@memset(&dp, max);
dp[0] = 0;
// 状态转移
for (1..n + 1) |i| {
for (1..amt + 1) |a| {
if (coins[i - 1] > @as(i32, @intCast(a))) {
// 若超过背包容量,则不选硬币 i
dp[a] = dp[a];
} else {
// 不选和选硬币 i 这两种方案的较小值
dp[a] = @min(dp[a], dp[a - @as(usize, @intCast(coins[i - 1]))] + 1);
}
}
}
if (dp[amt] != max) {
return @intCast(dp[amt]);
} else {
return -1;
}
}
// Driver Code
pub fn main() !void {
comptime var coins = [_]i32{ 1, 2, 5 };
comptime var amt: usize = 4;
// 动态规划
var res = coinChangeDP(&coins, amt);
std.debug.print("凑到目标金额所需的最少硬币数量为 {}\n", .{res});
// 状态压缩后的动态规划
res = coinChangeDPComp(&coins, amt);
std.debug.print("凑到目标金额所需的最少硬币数量为 {}\n", .{res});
_ = try std.io.getStdIn().reader().readByte();
}
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.