Update example build config and Llama demo to support the new async epoll backend and zigcoro scheduler.

This commit is contained in:
Foke Singh 2024-01-22 12:17:01 +00:00
parent a7b7ae0180
commit 0ce36599da
2 changed files with 1 additions and 1 deletions

View File

@ -134,7 +134,6 @@ filegroup(
) )
use_repo(huggingface, "Meta-Llama-3.2-3B-Instruct") use_repo(huggingface, "Meta-Llama-3.2-3B-Instruct")
# Llama 3.1 # Llama 3.1
huggingface.model( huggingface.model(
name = "Meta-Llama-3.1-8B-Instruct", name = "Meta-Llama-3.1-8B-Instruct",

View File

@ -28,6 +28,7 @@ pub const std_options = .{
.{ .scope = .zml_module, .level = if (show_mlir) .debug else .warn }, .{ .scope = .zml_module, .level = if (show_mlir) .debug else .warn },
.{ .scope = .llama, .level = .info }, .{ .scope = .llama, .level = .info },
}, },
.logFn = asynk.logFn,
}; };
pub fn generateText( pub fn generateText(