mirror of
https://github.com/ggerganov/llama.cpp
synced 2026-03-02 05:09:23 +01:00
Setting mmap and direct_io to false as default in llama-bench.cpp (#18841)
This commit is contained in:
parent
4ea2eaac01
commit
aa1dc3770a
@ -372,8 +372,8 @@ static const cmd_params cmd_params_defaults = {
|
||||
/* devices */ { {} },
|
||||
/* tensor_split */ { std::vector<float>(llama_max_devices(), 0.0f) },
|
||||
/* tensor_buft_overrides*/ { std::vector<llama_model_tensor_buft_override>{ { nullptr, nullptr } } },
|
||||
/* use_mmap */ { true },
|
||||
/* use_direct_io */ { true },
|
||||
/* use_mmap */ { false },
|
||||
/* use_direct_io */ { false },
|
||||
/* embeddings */ { false },
|
||||
/* no_op_offload */ { false },
|
||||
/* no_host */ { false },
|
||||
|
||||
Loading…
Reference in New Issue
Block a user