train : remove LLAMA_SUPPORTS_GPU_OFFLOAD
This commit is contained in:
parent
8bfb0b6a64
commit
aa71356dc8
1 changed files with 6 additions and 6 deletions
|
@ -1363,12 +1363,12 @@ bool consume_common_train_arg(
|
||||||
*invalid_param = true;
|
*invalid_param = true;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD
|
if (llama_supports_gpu_offload()) {
|
||||||
params->n_gpu_layers = std::stoi(argv[i]);
|
params->n_gpu_layers = std::stoi(argv[i]);
|
||||||
#else
|
} else {
|
||||||
fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n");
|
fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n");
|
||||||
fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n");
|
fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n");
|
||||||
#endif
|
}
|
||||||
} else if (arg == "-h" || arg == "--help") {
|
} else if (arg == "-h" || arg == "--help") {
|
||||||
params->print_usage = true;
|
params->print_usage = true;
|
||||||
return true;
|
return true;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue