Skip to content

Commit

Permalink
Merge pull request #3 from peetya/feature/use-bubbletea
Browse files Browse the repository at this point in the history
feat: refactored generate command to use bubbletea prompts in interactive mode
  • Loading branch information
peetya authored May 11, 2023
2 parents 8990eb6 + a68de9a commit ffd2144
Show file tree
Hide file tree
Showing 20 changed files with 1,163 additions and 338 deletions.
53 changes: 34 additions & 19 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,16 +83,19 @@ $ snipforge generate --help
Here's a detailed explanation of the available flags for the `generate` command:

```bash
-g, --goal: The functionality description for the code snippet
-l, --language: The programming or tooling language to generate code in (e.g., PHP, Golang, etc.)
-v, --language-version: The version of the programming or tooling language to generate code for (if applicable)
-o, --output: The output file path for the generated code snippet
--output-chmod: The chmod value to apply to the output file (default 644)
--stdout: Print the generated code snippet to stdout instead of saving to a file
-k, --openai-key: The OpenAI API key
-m, --openai-model: The OpenAI model to use (default "gpt-3.5-turbo")
-q, --quiet: Suppress all output except for the generated code snippet
-d, --dry-run: Do not generate a code snippet, only print the generated description
-d, --dry-run do not generate a code snippet, only print the generated description
-g, --goal string the functionality description for the code snippet
-h, --help help for generate
-l, --language string the programming or tooling language to generate code in (e.g. PHP, Golang, etc...)
-v, --language-version string the version of the programming or tooling language to generate code for (if applicable)
-k, --openai-key string the OpenAI API key
--openai-max-tokens int the maximum number of tokens to generate
-m, --openai-model string the OpenAI model to use
--openai-temperature float32 the sampling temperature for the OpenAI model (between 0.0 and 2.0)
-o, --output string the output file path for the generated code snippet
-q, --quiet suppress all output except for the generated code snippet
--stdout print the generated code snippet to isStdout instead of saving to a file

```

## Example
Expand Down Expand Up @@ -126,25 +129,37 @@ $ snipforge generate
First, we need to define a set of goals that will be used to generate the snippet.

```
Goal #1: A controller that returns a list of users via the "/api/v1/users" endpoint
Goal #2: The output format can be changed via content negotiation
Goal #3: Support pagination using the page and limit query parameters
Goal #4: Read the users from the injected UserRepositoryInterface
Goal #5: The controller must follow the PSR-12 coding standard
Goal #6: The controller must follow the PSR-4 autoloading standard
What are your goals?
┃ 1 A controller that returns a list of users via the "/api/v1/users" endpoint
┃ 2 The output format can be changed via content negotiaton
┃ 3 Support pagination using the page and limit query parameters
┃ 4 Read the users from the injected UserRepositoryInterface
┃ 5 The controller must follow PSR-4 and PSR-12 standards
```

Next, we need to define the programming language and version to generate the snippet for.

```
Language: Symfony
LanguageVersion (optional): 6
Which programming or tooling language do you want to use?
> Symfony
```

```
Which version of PHP do you want to use? (optional)
> 6
```

Then we need to define the output path:

```
Output file path: src/Controller/Api/V1/UserController.php
Where do you want to save the snippet?
> src/Controller/Api/V1/UserController.php
```

Then it will generate the following code snippet for you in `src/Controller/Api/V1/UserController.php`:
Expand Down
281 changes: 35 additions & 246 deletions cmd/generate.go
Original file line number Diff line number Diff line change
@@ -1,34 +1,24 @@
package cmd

import (
"fmt"
"github.com/adrg/strutil"
"github.com/adrg/strutil/metrics"
"github.com/briandowns/spinner"
"github.com/manifoldco/promptui"
"github.com/peetya/snipforge-cli/data"
"github.com/peetya/snipforge-cli/generator"
tea "github.com/charmbracelet/bubbletea"
"github.com/peetya/snipforge-cli/model"
"github.com/peetya/snipforge-cli/util"
"github.com/sashabaranov/go-openai"
"github.com/sirupsen/logrus"
"strings"
"time"

"github.com/peetya/snipforge-cli/prompt"
"github.com/spf13/cobra"
)

var (
goal string
language string
languageVersion string
output string
stdout bool
openaiKey string
openaiModel string
isQuiet bool
isDryRun bool
isInteractive bool
goal string
language string
languageVersion string
output string
openaiKey string
openaiModel string
openaiMaxTokens int
openaiTemperature float32
isQuiet bool
isDryRun bool
isStdout bool
)

var generateCmd = &cobra.Command{
Expand All @@ -38,100 +28,33 @@ var generateCmd = &cobra.Command{
Find more information at: https://github.com/peetya/snipforge-cli`,
RunE: func(cmd *cobra.Command, args []string) error {
isInteractive = !areMandatoryFlagsProvided()

if stdout {
isQuiet = true
}

if isInteractive && !isQuiet {
logrus.Info("Welcome to SnipForge's interactive mode! " +
"In this mode, you will be prompted to provide the necessary information for generating a code snippet.\n")
}

if goal == "" {
if isQuiet {
return fmt.Errorf("goal is required")
}
promptGoals()
}

if language == "" {
if isQuiet {
return fmt.Errorf("language is required")
}
promptLanguage()

if languageVersion == "" && !isQuiet {
promptVersion()
}
}

detectedLanguage := detectLanguage()

if output == "" {
if isQuiet {
output = guessOutput(detectedLanguage)
} else {
promptOutput(detectedLanguage)
}
}

if openaiKey == "" {
if isQuiet {
return fmt.Errorf("openai-key is required")
}
promptOpenAIKey()
}

rq := &model.GenerateRequest{
Goal: goal,
Language: language,
LanguageVersion: languageVersion,
Output: output,
OpenAIKey: openaiKey,
OpenAIModel: openaiModel,
}
req := &model.GenerateRequest{
Goal: goal,
Language: language,
LanguageVersion: languageVersion,
Output: output,
OpenAIKey: openaiKey,
OpenAIModel: openaiModel,
OpenAIMaxTokens: openaiMaxTokens,
OpenAITemperature: openaiTemperature,

s := spinner.New(spinner.CharSets[14], 100*time.Millisecond)
s.Suffix = " Generating code snippet...\n"
s.Start()

if !stdout {
if err := util.PrepareOutputFolderPath(output); err != nil {
return err
}
IsQuiet: isQuiet,
IsDryRun: isDryRun,
IsStdout: isStdout,
}

if isDryRun {
logrus.WithFields(logrus.Fields{
"goal": goal,
"language": language,
"languageVersion": languageVersion,
"output": output,
}).Warningf("Dry run enabled, skipping generating code snippet\n")
return nil
}
var po []tea.ProgramOption

snippet, err := generator.GenerateCodeSnippet(rq, detectedLanguage)
if err != nil {
return err
if req.IsStdout {
req.IsQuiet = true
po = append(po, tea.WithoutRenderer())
}

s.Stop()

if stdout {
fmt.Println(snippet)
return nil
}

if err = util.SaveSnippet(snippet, output); err != nil {
p := tea.NewProgram(prompt.InitializeModel(req), po...)
if _, err := p.Run(); err != nil {
return err
}

logrus.Infof("The snippet is successfully generated and saved to %s\n", output)
logrus.Warn("Please review the generated code snippet before using it in your project!")

return nil
},
}
Expand All @@ -142,149 +65,15 @@ func init() {
generateCmd.Flags().StringVarP(&languageVersion, "language-version", "v", "", "the version of the programming or tooling language to generate code for (if applicable)")

generateCmd.Flags().StringVarP(&output, "output", "o", "", "the output file path for the generated code snippet")
generateCmd.Flags().BoolVar(&stdout, "stdout", false, "print the generated code snippet to stdout instead of saving to a file")
generateCmd.Flags().BoolVar(&isStdout, "stdout", false, "print the generated code snippet to isStdout instead of saving to a file")

generateCmd.Flags().StringVarP(&openaiKey, "openai-key", "k", "", "the OpenAI API key")
generateCmd.Flags().StringVarP(&openaiModel, "openai-model", "m", openai.GPT3Dot5Turbo, "the OpenAI model to use")
generateCmd.Flags().StringVarP(&openaiModel, "openai-model", "m", "", "the OpenAI model to use")
generateCmd.Flags().IntVar(&openaiMaxTokens, "openai-max-tokens", 0, "the maximum number of tokens to generate")
generateCmd.Flags().Float32Var(&openaiTemperature, "openai-temperature", 0.0, "the sampling temperature for the OpenAI model (between 0.0 and 2.0)")

generateCmd.Flags().BoolVarP(&isQuiet, "quiet", "q", false, "suppress all output except for the generated code snippet")
generateCmd.Flags().BoolVarP(&isDryRun, "dry-run", "d", false, "do not generate a code snippet, only print the generated description")

rootCmd.AddCommand(generateCmd)
}

func promptGoals() {
var goals []string
i := 1

logrus.Info("First, please enter your goals one by one. These goals will help SnipForge understand the functionality you want in your code snippet. " +
"After entering a goal, press Enter to input the next one. When you're done, simply press Enter on an empty line to proceed to the next step.")
logrus.Info("Enter your goals:")

for {
prompt := promptui.Prompt{
Label: fmt.Sprintf("Goal #%d", i),
}

res, err := prompt.Run()
if err != nil {
logrus.Fatal(err)
}

if res == "" {
break
}

goals = append(goals, res)
i++
}

goal = strings.Join(goals, "; ")
}

func promptLanguage() {
prompt := promptui.Prompt{
Label: "Language",
Validate: func(input string) error {
if input == "" {
return fmt.Errorf("Language cannot be empty. Please provide a programming or tooling language, e.g. PHP, Golang, Docker, etc...")
}

return nil
},
}

res, err := prompt.Run()
if err != nil {
logrus.Fatal(err)
}

language = res
}

func promptVersion() {
prompt := promptui.Prompt{
Label: "Language Version (optional)",
}

res, err := prompt.Run()
if err != nil {
logrus.Fatal(err)
}

languageVersion = res
}

func promptOutput(detectedLanguage *data.Language) {
prompt := promptui.Prompt{
Label: "Output file path",
Default: guessOutput(detectedLanguage),
AllowEdit: true,
}

res, err := prompt.Run()
if err != nil {
logrus.Fatal(err)
}

output = res
}

func guessOutput(detectedLanguage *data.Language) string {
if detectedLanguage != nil {
return detectedLanguage.PreferredFileName
}

return "snippet.txt"
}

func promptOpenAIKey() {
prompt := promptui.Prompt{
Label: "OpenAI API Key",
Validate: func(input string) error {
if input == "" {
return fmt.Errorf("OpenAI API Key cannot be empty. Please provide a valid OpenAI API Key. More info: https://platform.openai.com/account/api-keys")
}

return nil
},
}

res, err := prompt.Run()
if err != nil {
logrus.Fatal(err)
}

openaiKey = res
}

func areMandatoryFlagsProvided() bool {
return goal != "" && language != "" && output != "" && openaiKey != ""
}

func detectLanguage() *data.Language {
var detectedLanguage data.Language
var maxSimilarity float64

similarityScoreThreshold := 0.5

for _, lang := range data.Languages {
for _, name := range lang.Names {
similarity := strutil.Similarity(strings.ToLower(language), strings.ToLower(name), metrics.NewLevenshtein())

if similarity > maxSimilarity {
maxSimilarity = similarity
detectedLanguage = lang
}
}
}

logrus.Debugf("Detected language is %s with similarity score of %f", detectedLanguage.Names[0], maxSimilarity)

if maxSimilarity < similarityScoreThreshold {
logrus.Debugf("Similarity score is lower than %f, skipping language detection", similarityScoreThreshold)
return nil
}

return &detectedLanguage
}
Loading

0 comments on commit ffd2144

Please sign in to comment.