diff --git a/core/cli/run.go b/core/cli/run.go index 7583d97f9a81..015e2ce1d2c0 100644 --- a/core/cli/run.go +++ b/core/cli/run.go @@ -378,7 +378,7 @@ func (r *RunCMD) Run(ctx *cliContext.Context) error { app, err := application.New(opts...) if err != nil { - return fmt.Errorf("failed basic startup tasks with error %s", err.Error()) + return fmt.Errorf("LocalAI failed to start: %w.\nTroubleshooting steps:\n 1. Check that your models directory exists and is accessible: %s\n 2. Verify model config files are valid YAML: 'local-ai util usecase-heuristic '\n 3. Check available disk space and file permissions\n 4. Run with --log-level=debug for more details\nSee https://localai.io/basics/troubleshooting/ for more help", err, r.ModelsPath) } appHTTP, err := http.API(app) diff --git a/core/cli/transcript.go b/core/cli/transcript.go index 8da3892a0b39..341386540383 100644 --- a/core/cli/transcript.go +++ b/core/cli/transcript.go @@ -3,7 +3,6 @@ package cli import ( "context" "encoding/json" - "errors" "fmt" "strings" @@ -61,7 +60,7 @@ func (t *TranscriptCMD) Run(ctx *cliContext.Context) error { c, exists := cl.GetModelConfig(t.Model) if !exists { - return errors.New("model not found") + return fmt.Errorf("model %q not found. Run 'local-ai models list' to see available models, or install one with 'local-ai models install '. See https://localai.io/models/ for more information", t.Model) } c.Threads = &t.Threads diff --git a/core/cli/util.go b/core/cli/util.go index b002e254e789..14ab9b561cdc 100644 --- a/core/cli/util.go +++ b/core/cli/util.go @@ -74,7 +74,7 @@ func (u *CreateOCIImageCMD) Run(ctx *cliContext.Context) error { func (u *GGUFInfoCMD) Run(ctx *cliContext.Context) error { if len(u.Args) == 0 { - return fmt.Errorf("no GGUF file provided") + return fmt.Errorf("no GGUF file provided. Usage: local-ai util gguf-info \nGGUF is a binary format for storing quantized language models. You can download GGUF models from https://huggingface.co or install one with 'local-ai models install '") } // We try to guess only if we don't have a template defined already f, err := gguf.ParseGGUFFile(u.Args[0]) diff --git a/core/cli/worker/worker_p2p.go b/core/cli/worker/worker_p2p.go index 868357ccffd5..b843c44d972b 100644 --- a/core/cli/worker/worker_p2p.go +++ b/core/cli/worker/worker_p2p.go @@ -38,7 +38,7 @@ func (r *P2P) Run(ctx *cliContext.Context) error { // Check if the token is set // as we always need it. if r.Token == "" { - return fmt.Errorf("Token is required") + return fmt.Errorf("a P2P token is required to join the network. Set it via the LOCALAI_TOKEN environment variable or the --token flag. You can generate a token by running 'local-ai run --p2p' on the main node. See https://localai.io/features/distribute/ for more information") } port, err := freeport.GetFreePort() diff --git a/core/config/model_config_loader.go b/core/config/model_config_loader.go index 68647a0867f6..7c214b186978 100644 --- a/core/config/model_config_loader.go +++ b/core/config/model_config_loader.go @@ -192,9 +192,9 @@ func (bcl *ModelConfigLoader) ReadModelConfig(file string, opts ...ConfigLoaderO bcl.configs[c.Name] = *c } else { if err != nil { - return fmt.Errorf("config is not valid: %w", err) + return fmt.Errorf("model config %q is not valid: %w. Ensure the YAML file has a valid 'name' field and correct syntax. See https://localai.io/docs/getting-started/customize-model/ for config reference", file, err) } - return fmt.Errorf("config is not valid") + return fmt.Errorf("model config %q is not valid. Ensure the YAML file has a valid 'name' field and correct syntax. See https://localai.io/docs/getting-started/customize-model/ for config reference", file) } return nil