-
Notifications
You must be signed in to change notification settings - Fork 23
Support loading prompt from yml file #44
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -21,6 +21,7 @@ import ( | |
| "github.com/github/gh-models/pkg/util" | ||
| "github.com/spf13/cobra" | ||
| "github.com/spf13/pflag" | ||
| "gopkg.in/yaml.v3" | ||
| ) | ||
|
|
||
| // ModelParameters represents the parameters that can be set for a model run. | ||
|
|
@@ -188,6 +189,22 @@ func isPipe(r io.Reader) bool { | |
| return false | ||
| } | ||
|
|
||
| // promptFile mirrors the format of .prompt.yml | ||
| type promptFile struct { | ||
| Name string `yaml:"name"` | ||
| Description string `yaml:"description"` | ||
| Model string `yaml:"model"` | ||
| ModelParameters struct { | ||
| MaxTokens *int `yaml:"maxTokens"` | ||
| Temperature *float64 `yaml:"temperature"` | ||
| TopP *float64 `yaml:"topP"` | ||
| } `yaml:"modelParameters"` | ||
| Messages []struct { | ||
| Role string `yaml:"role"` | ||
| Content string `yaml:"content"` | ||
| } `yaml:"messages"` | ||
| } | ||
|
sgoedecke marked this conversation as resolved.
|
||
|
|
||
| // NewRunCommand returns a new gh command for running a model. | ||
| func NewRunCommand(cfg *command.Config) *cobra.Command { | ||
| cmd := &cobra.Command{ | ||
|
|
@@ -208,6 +225,24 @@ func NewRunCommand(cfg *command.Config) *cobra.Command { | |
| Example: "gh models run openai/gpt-4o-mini \"how many types of hyena are there?\"", | ||
| Args: cobra.ArbitraryArgs, | ||
| RunE: func(cmd *cobra.Command, args []string) error { | ||
| filePath, _ := cmd.Flags().GetString("file") | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. That
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yeah, it's probably wise to do that, but I think it's safe to follow up in a later change. The possible errors here are things like "you defined the |
||
| var pf *promptFile | ||
| if filePath != "" { | ||
| b, err := os.ReadFile(filePath) | ||
| if err != nil { | ||
| return err | ||
| } | ||
| p := promptFile{} | ||
| if err := yaml.Unmarshal(b, &p); err != nil { | ||
| return err | ||
| } | ||
| pf = &p | ||
| // Inject model name as the first positional arg if user didn't supply one | ||
| if pf.Model != "" && len(args) == 0 { | ||
| args = append([]string{pf.Model}, args...) | ||
| } | ||
| } | ||
|
|
||
| cmdHandler := newRunCommandHandler(cmd, cfg, args) | ||
| if cmdHandler == nil { | ||
| return nil | ||
|
|
@@ -248,12 +283,36 @@ func NewRunCommand(cfg *command.Config) *cobra.Command { | |
| systemPrompt: systemPrompt, | ||
| } | ||
|
|
||
| // preload conversation & parameters from YAML | ||
| if pf != nil { | ||
| for _, m := range pf.Messages { | ||
| switch strings.ToLower(m.Role) { | ||
| case "system": | ||
| if conversation.systemPrompt == "" { | ||
| conversation.systemPrompt = m.Content | ||
| } else { | ||
| conversation.AddMessage(azuremodels.ChatMessageRoleSystem, m.Content) | ||
| } | ||
| case "user": | ||
| conversation.AddMessage(azuremodels.ChatMessageRoleUser, m.Content) | ||
| case "assistant": | ||
| conversation.AddMessage(azuremodels.ChatMessageRoleAssistant, m.Content) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| mp := ModelParameters{} | ||
| err = mp.PopulateFromFlags(cmd.Flags()) | ||
| if err != nil { | ||
| return err | ||
| } | ||
|
|
||
| if pf != nil { | ||
| mp.maxTokens = pf.ModelParameters.MaxTokens | ||
| mp.temperature = pf.ModelParameters.Temperature | ||
| mp.topP = pf.ModelParameters.TopP | ||
| } | ||
|
|
||
|
sgoedecke marked this conversation as resolved.
|
||
| for { | ||
| prompt := "" | ||
| if initialPrompt != "" { | ||
|
|
@@ -369,6 +428,7 @@ func NewRunCommand(cfg *command.Config) *cobra.Command { | |
| }, | ||
| } | ||
|
|
||
| cmd.Flags().String("file", "", "Path to a .prompt.yml file.") | ||
| cmd.Flags().String("max-tokens", "", "Limit the maximum tokens for the model response.") | ||
| cmd.Flags().String("temperature", "", "Controls randomness in the response, use lower to be more deterministic.") | ||
| cmd.Flags().String("top-p", "", "Controls text diversity by selecting the most probable words until a set probability is reached.") | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,14 @@ | ||
| name: Text Summarizer | ||
| description: Summarizes input text concisely | ||
| model: openai/gpt-4o-mini | ||
| modelParameters: | ||
| temperature: 0.5 | ||
| messages: | ||
| - role: system | ||
| content: You are a text summarizer. Your only job is to summarize text given to you. | ||
| - role: user | ||
| content: | | ||
| Summarize the given text, beginning with "Summary -": | ||
| <text> | ||
| {{input}} | ||
| </text> |
Uh oh!
There was an error while loading. Please reload this page.